repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
apache/flink-ml
37,586
flink-ml-core/src/main/java/org/apache/flink/ml/common/datastream/DataStreamUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.ml.common.datastream; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.functions.AggregateFunction; import org.apache.flink.api.common.functions.CoGroupFunction; import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.functions.MapPartitionFunction; import org.apache.flink.api.common.functions.ReduceFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.base.IntSerializer; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.TypeExtractor; import org.apache.flink.iteration.datacache.nonkeyed.ListStateWithCache; import org.apache.flink.iteration.datacache.nonkeyed.OperatorScopeManagedMemoryManager; import org.apache.flink.iteration.operator.OperatorStateUtils; import org.apache.flink.ml.common.datastream.sort.CoGroupOperator; import org.apache.flink.ml.common.window.CountTumblingWindows; import org.apache.flink.ml.common.window.EventTimeSessionWindows; import org.apache.flink.ml.common.window.EventTimeTumblingWindows; import org.apache.flink.ml.common.window.GlobalWindows; import org.apache.flink.ml.common.window.ProcessingTimeSessionWindows; import org.apache.flink.ml.common.window.ProcessingTimeTumblingWindows; import org.apache.flink.ml.common.window.Windows; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.state.StateInitializationContext; import org.apache.flink.runtime.state.StateSnapshotContext; import org.apache.flink.runtime.state.VoidNamespace; import org.apache.flink.runtime.state.VoidNamespaceSerializer; import org.apache.flink.streaming.api.datastream.AllWindowedStream; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction; import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.AbstractUdfStreamOperator; import org.apache.flink.streaming.api.operators.BoundedOneInput; import org.apache.flink.streaming.api.operators.InternalTimer; import org.apache.flink.streaming.api.operators.InternalTimerService; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.operators.TimestampedCollector; import org.apache.flink.streaming.api.operators.Triggerable; import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows; import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows; import org.apache.flink.streaming.api.windowing.assigners.WindowAssigner; import org.apache.flink.streaming.api.windowing.windows.GlobalWindow; import org.apache.flink.streaming.api.windowing.windows.TimeWindow; import org.apache.flink.streaming.api.windowing.windows.Window; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.tasks.StreamTask; import org.apache.flink.util.Collector; import org.apache.commons.collections.IteratorUtils; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Random; import static org.apache.flink.iteration.utils.DataStreamUtils.setManagedMemoryWeight; /** Provides utility functions for {@link DataStream}. */ @Internal public class DataStreamUtils { /** * Applies allReduceSum on the input data stream. The input data stream is supposed to contain * up to one double array in each partition. The result data stream has the same parallelism as * the input, where each partition contains one double array that sums all of the double arrays * in the input data stream. * * <p>Note that we throw exception when one of the following two cases happen: * <li>There exists one partition that contains more than one double array. * <li>The length of the double array is not consistent among all partitions. * * @param input The input data stream. * @return The result data stream. */ public static DataStream<double[]> allReduceSum(DataStream<double[]> input) { return AllReduceImpl.allReduceSum(input); } /** * Applies a {@link MapPartitionFunction} on a bounded data stream. * * @param input The input data stream. * @param func The user defined mapPartition function. * @param <IN> The class type of the input. * @param <OUT> The class type of output. * @return The result data stream. */ public static <IN, OUT> DataStream<OUT> mapPartition( DataStream<IN> input, MapPartitionFunction<IN, OUT> func) { TypeInformation<OUT> outType = TypeExtractor.getMapPartitionReturnTypes(func, input.getType(), null, true); return mapPartition(input, func, outType); } /** * Applies a {@link MapPartitionFunction} on a bounded data stream. * * @param input The input data stream. * @param func The user defined mapPartition function. * @param outType The type information of the output. * @param <IN> The class type of the input. * @param <OUT> The class type of output. * @return The result data stream. */ public static <IN, OUT> DataStream<OUT> mapPartition( DataStream<IN> input, MapPartitionFunction<IN, OUT> func, TypeInformation<OUT> outType) { func = input.getExecutionEnvironment().clean(func); return input.transform("mapPartition", outType, new MapPartitionOperator<>(func)) .setParallelism(input.getParallelism()); } /** * Applies a {@link ReduceFunction} on a bounded data stream. The output stream contains at most * one stream record and its parallelism is one. * * @param input The input data stream. * @param func The user defined reduce function. * @param <T> The class type of the input. * @return The result data stream. */ public static <T> DataStream<T> reduce(DataStream<T> input, ReduceFunction<T> func) { return reduce(input, func, input.getType()); } /** * Applies a {@link ReduceFunction} on a bounded data stream. The output stream contains at most * one stream record and its parallelism is one. * * @param input The input data stream. * @param func The user defined reduce function. * @param outType The type information of the output. * @param <T> The class type of the input. * @return The result data stream. */ public static <T> DataStream<T> reduce( DataStream<T> input, ReduceFunction<T> func, TypeInformation<T> outType) { func = input.getExecutionEnvironment().clean(func); DataStream<T> partialReducedStream = input.transform("reduce", outType, new ReduceOperator<>(func)) .setParallelism(input.getParallelism()); if (partialReducedStream.getParallelism() == 1) { return partialReducedStream; } else { return partialReducedStream .transform("reduce", outType, new ReduceOperator<>(func)) .setParallelism(1); } } /** * Applies a {@link ReduceFunction} on a bounded keyed data stream. The output stream contains * one stream record for each key. * * @param input The input keyed data stream. * @param func The user defined reduce function. * @param <T> The class type of input. * @param <K> The key type of input. * @return The result data stream. */ public static <T, K> DataStream<T> reduce(KeyedStream<T, K> input, ReduceFunction<T> func) { return reduce(input, func, input.getType()); } /** * Applies a {@link ReduceFunction} on a bounded keyed data stream. The output stream contains * one stream record for each key. * * @param input The input keyed data stream. * @param func The user defined reduce function. * @param outType The type information of the output. * @param <T> The class type of input. * @param <K> The key type of input. * @return The result data stream. */ public static <T, K> DataStream<T> reduce( KeyedStream<T, K> input, ReduceFunction<T> func, TypeInformation<T> outType) { func = input.getExecutionEnvironment().clean(func); return input.transform( "Keyed Reduce", outType, new KeyedReduceOperator<>( func, outType.createSerializer(input.getExecutionConfig()))) .setParallelism(input.getParallelism()); } /** * Aggregates the elements in each partition of the input bounded stream, and then merges the * partial results of all partitions. The output stream contains the aggregated result and its * parallelism is one. * * <p>Note: If the parallelism of the input stream is N, this method would invoke {@link * AggregateFunction#createAccumulator()} N times and {@link AggregateFunction#merge(Object, * Object)} N - 1 times. Thus the initial accumulator should be neutral (e.g. empty list for * list concatenation or `0` for summation), otherwise the aggregation result would be affected * by the parallelism of the input stream. * * @param input The input data stream. * @param func The user defined aggregate function. * @param <IN> The class type of the input. * @param <ACC> The class type of the accumulated values. * @param <OUT> The class type of the output values. * @return The result data stream. */ public static <IN, ACC, OUT> DataStream<OUT> aggregate( DataStream<IN> input, AggregateFunction<IN, ACC, OUT> func) { TypeInformation<ACC> accType = TypeExtractor.getAggregateFunctionAccumulatorType( func, input.getType(), null, true); TypeInformation<OUT> outType = TypeExtractor.getAggregateFunctionReturnType(func, input.getType(), null, true); return aggregate(input, func, accType, outType); } /** * Aggregates the elements in each partition of the input bounded stream, and then merges the * partial results of all partitions. The output stream contains the aggregated result and its * parallelism is one. * * <p>Note: If the parallelism of the input stream is N, this method would invoke {@link * AggregateFunction#createAccumulator()} N times and {@link AggregateFunction#merge(Object, * Object)} N - 1 times. Thus the initial accumulator should be neutral (e.g. empty list for * list concatenation or `0` for summation), otherwise the aggregation result would be affected * by the parallelism of the input stream. * * @param input The input data stream. * @param func The user defined aggregate function. * @param accType The type of the accumulated values. * @param outType The types of the output. * @param <IN> The class type of the input. * @param <ACC> The class type of the accumulated values. * @param <OUT> The class type of the output values. * @return The result data stream. */ public static <IN, ACC, OUT> DataStream<OUT> aggregate( DataStream<IN> input, AggregateFunction<IN, ACC, OUT> func, TypeInformation<ACC> accType, TypeInformation<OUT> outType) { func = input.getExecutionEnvironment().clean(func); DataStream<ACC> partialAggregatedStream = input.transform( "partialAggregate", accType, new PartialAggregateOperator<>(func, accType)); DataStream<OUT> aggregatedStream = partialAggregatedStream.transform( "aggregate", outType, new AggregateOperator<>(func, accType)); aggregatedStream.getTransformation().setParallelism(1); return aggregatedStream; } /** * Performs an approximate uniform sampling over the elements in a bounded data stream. The * difference of probabilities of two data points been sampled is bounded by O(numSamples * p * * p / (M * M)), where p is the parallelism of the input stream, M is the total number of data * points that the input stream contains. * * <p>This method takes samples without replacement. If the number of elements in the stream is * smaller than expected number of samples, all elements will be included in the sample. * * @param input The input data stream. * @param numSamples The number of elements to be sampled. * @param randomSeed The seed to randomly pick elements as sample. * @return A data stream containing a list of the sampled elements. */ public static <T> DataStream<T> sample(DataStream<T> input, int numSamples, long randomSeed) { int inputParallelism = input.getParallelism(); // The maximum difference of number of data points in each partition after calling // `rebalance` is `inputParallelism`. As a result, extra `inputParallelism` data points are // sampled for each partition in the first round. int firstRoundNumSamples = Math.min((numSamples / inputParallelism) + inputParallelism, numSamples); return input.rebalance() .transform( "firstRoundSampling", input.getType(), new SamplingOperator<>(firstRoundNumSamples, randomSeed)) .setParallelism(inputParallelism) .transform( "secondRoundSampling", input.getType(), new SamplingOperator<>(numSamples, randomSeed)) .setParallelism(1) .map(x -> x, input.getType()) .setParallelism(inputParallelism); } /** * Creates windows from data in the non key grouped input stream and applies the given window * function to each window. * * @param input The input data stream to be windowed and processed. * @param windows The windowing strategy that defines how input data would be sliced into * batches. * @param function The user defined process function. * @return The data stream that is the result of applying the window function to each window. */ public static <IN, OUT, W extends Window> SingleOutputStreamOperator<OUT> windowAllAndProcess( DataStream<IN> input, Windows windows, ProcessAllWindowFunction<IN, OUT, W> function) { function = input.getExecutionEnvironment().clean(function); AllWindowedStream<IN, W> allWindowedStream = getAllWindowedStream(input, windows); return allWindowedStream.process(function); } /** * Creates windows from data in the non key grouped input stream and applies the given window * function to each window. * * @param input The input data stream to be windowed and processed. * @param windows The windowing strategy that defines how input data would be sliced into * batches. * @param function The user defined process function. * @param outType The type information of the output. * @return The data stream that is the result of applying the window function to each window. */ public static <IN, OUT, W extends Window> SingleOutputStreamOperator<OUT> windowAllAndProcess( DataStream<IN> input, Windows windows, ProcessAllWindowFunction<IN, OUT, W> function, TypeInformation<OUT> outType) { function = input.getExecutionEnvironment().clean(function); AllWindowedStream<IN, W> allWindowedStream = getAllWindowedStream(input, windows); return allWindowedStream.process(function, outType); } /** * A CoGroup transformation combines the elements of two {@link DataStream DataStreams} into one * DataStream. It groups each DataStream individually on a key and gives groups of both * DataStreams with equal keys together into a {@link * org.apache.flink.api.common.functions.CoGroupFunction}. If a DataStream has a group with no * matching key in the other DataStream, the CoGroupFunction is called with an empty group for * the non-existing group. * * <p>The CoGroupFunction can iterate over the elements of both groups and return any number of * elements including none. * * <p>NOTE: This method assumes both inputs are bounded. * * @param input1 The first data stream. * @param input2 The second data stream. * @param keySelector1 The KeySelector to be used for extracting the first input's key for * partitioning. * @param keySelector2 The KeySelector to be used for extracting the second input's key for * partitioning. * @param outTypeInformation The type information describing the output type. * @param func The user-defined co-group function. * @param <IN1> The class type of the first input. * @param <IN2> The class type of the second input. * @param <KEY> The class type of the key. * @param <OUT> The class type of the output values. * @return The result data stream. */ public static <IN1, IN2, KEY extends Serializable, OUT> DataStream<OUT> coGroup( DataStream<IN1> input1, DataStream<IN2> input2, KeySelector<IN1, KEY> keySelector1, KeySelector<IN2, KEY> keySelector2, TypeInformation<OUT> outTypeInformation, CoGroupFunction<IN1, IN2, OUT> func) { func = input1.getExecutionEnvironment().clean(func); DataStream<OUT> result = input1.connect(input2) .keyBy(keySelector1, keySelector2) .transform( "CoGroupOperator", outTypeInformation, new CoGroupOperator<>(func)) .setParallelism(Math.max(input1.getParallelism(), input2.getParallelism())); setManagedMemoryWeight(result, 100); return result; } @SuppressWarnings({"rawtypes", "unchecked"}) private static <IN, W extends Window> AllWindowedStream<IN, W> getAllWindowedStream( DataStream<IN> input, Windows windows) { if (windows instanceof CountTumblingWindows) { long countWindowSize = ((CountTumblingWindows) windows).getSize(); return (AllWindowedStream<IN, W>) input.countWindowAll(countWindowSize); } else { return input.windowAll((WindowAssigner) getDataStreamTimeWindowAssigner(windows)); } } private static WindowAssigner<Object, TimeWindow> getDataStreamTimeWindowAssigner( Windows windows) { if (windows instanceof GlobalWindows) { return EndOfStreamWindows.get(); } else if (windows instanceof EventTimeTumblingWindows) { return TumblingEventTimeWindows.of( getStreamWindowTime(((EventTimeTumblingWindows) windows).getSize())); } else if (windows instanceof ProcessingTimeTumblingWindows) { return TumblingProcessingTimeWindows.of( getStreamWindowTime(((ProcessingTimeTumblingWindows) windows).getSize())); } else if (windows instanceof EventTimeSessionWindows) { return org.apache.flink.streaming.api.windowing.assigners.EventTimeSessionWindows .withGap(getStreamWindowTime(((EventTimeSessionWindows) windows).getGap())); } else if (windows instanceof ProcessingTimeSessionWindows) { return org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows .withGap( getStreamWindowTime(((ProcessingTimeSessionWindows) windows).getGap())); } else { throw new UnsupportedOperationException( String.format( "Unsupported Windows subclass: %s", windows.getClass().getName())); } } private static org.apache.flink.streaming.api.windowing.time.Time getStreamWindowTime( Time time) { return org.apache.flink.streaming.api.windowing.time.Time.of( time.getSize(), time.getUnit()); } /** * A stream operator to apply {@link MapPartitionFunction} on each partition of the input * bounded data stream. */ private static class MapPartitionOperator<IN, OUT> extends AbstractUdfStreamOperator<OUT, MapPartitionFunction<IN, OUT>> implements OneInputStreamOperator<IN, OUT>, BoundedOneInput { private ListStateWithCache<IN> valuesState; public MapPartitionOperator(MapPartitionFunction<IN, OUT> mapPartitionFunc) { super(mapPartitionFunc); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); final StreamTask<?, ?> containingTask = getContainingTask(); final OperatorID operatorID = config.getOperatorID(); final OperatorScopeManagedMemoryManager manager = OperatorScopeManagedMemoryManager.getOrCreate(containingTask, operatorID); final String stateKey = "values-state"; manager.register(stateKey, 1.); valuesState = new ListStateWithCache<>( getOperatorConfig().getTypeSerializerIn(0, getClass().getClassLoader()), stateKey, context, this); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); valuesState.snapshotState(context); } @Override public void processElement(StreamRecord<IN> input) throws Exception { valuesState.add(input.getValue()); } @Override public void endInput() throws Exception { userFunction.mapPartition(valuesState.get(), new TimestampedCollector<>(output)); valuesState.clear(); } } /** A stream operator to apply {@link ReduceFunction} on the input bounded data stream. */ private static class ReduceOperator<T> extends AbstractUdfStreamOperator<T, ReduceFunction<T>> implements OneInputStreamOperator<T, T>, BoundedOneInput { /** The temp result of the reduce function. */ private T result; private ListState<T> state; public ReduceOperator(ReduceFunction<T> userFunction) { super(userFunction); } @Override public void endInput() { if (result != null) { output.collect(new StreamRecord<>(result)); } } @Override public void processElement(StreamRecord<T> streamRecord) throws Exception { if (result == null) { result = streamRecord.getValue(); } else { result = userFunction.reduce(streamRecord.getValue(), result); } } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); state = context.getOperatorStateStore() .getListState( new ListStateDescriptor<>( "state", getOperatorConfig() .getTypeSerializerIn( 0, getClass().getClassLoader()))); result = OperatorStateUtils.getUniqueElement(state, "state").orElse(null); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); state.clear(); if (result != null) { state.add(result); } } } /** * A stream operator to apply {@link ReduceFunction} on the input bounded keyed data stream. * * <p>Note: this class is a copy of {@link * org.apache.flink.streaming.api.operators.BatchGroupedReduceOperator} in case of unexpected * changes of its implementation. */ private static class KeyedReduceOperator<IN, KEY> extends AbstractUdfStreamOperator<IN, ReduceFunction<IN>> implements OneInputStreamOperator<IN, IN>, Triggerable<KEY, VoidNamespace> { private static final long serialVersionUID = 1L; private static final String STATE_NAME = "_op_state"; private transient ValueState<IN> values; private final TypeSerializer<IN> serializer; private InternalTimerService<VoidNamespace> timerService; public KeyedReduceOperator(ReduceFunction<IN> reducer, TypeSerializer<IN> serializer) { super(reducer); this.serializer = serializer; } @Override public void open() throws Exception { super.open(); ValueStateDescriptor<IN> stateId = new ValueStateDescriptor<>(STATE_NAME, serializer); values = getPartitionedState(stateId); timerService = getInternalTimerService("end-key-timers", new VoidNamespaceSerializer(), this); } @Override public void processElement(StreamRecord<IN> element) throws Exception { IN value = element.getValue(); IN currentValue = values.value(); if (currentValue == null) { // Registers a timer for emitting the result at the end when this is the // first input for this key. timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, Long.MAX_VALUE); } else { // Otherwise, reduces things. value = userFunction.reduce(currentValue, value); } values.update(value); } @Override public void onEventTime(InternalTimer<KEY, VoidNamespace> timer) throws Exception { IN currentValue = values.value(); if (currentValue != null) { output.collect(new StreamRecord<>(currentValue, Long.MAX_VALUE)); } } @Override public void onProcessingTime(InternalTimer<KEY, VoidNamespace> timer) throws Exception {} } /** * A stream operator to apply {@link AggregateFunction#add(IN, ACC)} on each partition of the * input bounded data stream. */ private static class PartialAggregateOperator<IN, ACC, OUT> extends AbstractUdfStreamOperator<ACC, AggregateFunction<IN, ACC, OUT>> implements OneInputStreamOperator<IN, ACC>, BoundedOneInput { /** Type information of the accumulated result. */ private final TypeInformation<ACC> accType; /** The accumulated result of the aggregate function in one partition. */ private ACC acc; /** State of acc. */ private ListState<ACC> accState; public PartialAggregateOperator( AggregateFunction<IN, ACC, OUT> userFunction, TypeInformation<ACC> accType) { super(userFunction); this.accType = accType; } @Override public void endInput() { output.collect(new StreamRecord<>(acc)); } @Override public void processElement(StreamRecord<IN> streamRecord) throws Exception { acc = userFunction.add(streamRecord.getValue(), acc); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); accState = context.getOperatorStateStore() .getListState(new ListStateDescriptor<>("accState", accType)); acc = OperatorStateUtils.getUniqueElement(accState, "accState") .orElse(userFunction.createAccumulator()); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); accState.clear(); accState.add(acc); } } /** * A stream operator to apply {@link AggregateFunction#merge(ACC, ACC)} and {@link * AggregateFunction#getResult(ACC)} on the input bounded data stream. */ private static class AggregateOperator<IN, ACC, OUT> extends AbstractUdfStreamOperator<OUT, AggregateFunction<IN, ACC, OUT>> implements OneInputStreamOperator<ACC, OUT>, BoundedOneInput { /** Type information of the accumulated result. */ private final TypeInformation<ACC> accType; /** The accumulated result of the aggregate function in the final partition. */ private ACC acc; /** State of acc. */ private ListState<ACC> accState; public AggregateOperator( AggregateFunction<IN, ACC, OUT> userFunction, TypeInformation<ACC> accType) { super(userFunction); this.accType = accType; } @Override public void endInput() { output.collect(new StreamRecord<>(userFunction.getResult(acc))); } @Override public void processElement(StreamRecord<ACC> streamRecord) throws Exception { if (acc == null) { acc = streamRecord.getValue(); } else { acc = userFunction.merge(streamRecord.getValue(), acc); } } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); accState = context.getOperatorStateStore() .getListState(new ListStateDescriptor<>("accState", accType)); acc = OperatorStateUtils.getUniqueElement(accState, "accState").orElse(null); } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); accState.clear(); if (acc != null) { accState.add(acc); } } } /** * Splits the input data into global batches of batchSize. After splitting, each global batch is * further split into local batches for downstream operators with each worker has one batch. */ public static <T> DataStream<T[]> generateBatchData( DataStream<T> inputData, final int downStreamParallelism, int batchSize) { return inputData .countWindowAll(batchSize) .apply(new GlobalBatchCreator<>()) .flatMap(new GlobalBatchSplitter<>(downStreamParallelism)) .partitionCustom((chunkId, numPartitions) -> chunkId, x -> x.f0) .map( new MapFunction<Tuple2<Integer, T[]>, T[]>() { @Override public T[] map(Tuple2<Integer, T[]> integerTuple2) throws Exception { return integerTuple2.f1; } }); } /** Splits the input data into global batches. */ private static class GlobalBatchCreator<T> implements AllWindowFunction<T, T[], GlobalWindow> { @Override public void apply(GlobalWindow timeWindow, Iterable<T> iterable, Collector<T[]> collector) { List<T> points = IteratorUtils.toList(iterable.iterator()); collector.collect(points.toArray((T[]) new Object[0])); } } /** * An operator that splits a global batch into evenly-sized local batches, and distributes them * to downstream operator. */ private static class GlobalBatchSplitter<T> implements FlatMapFunction<T[], Tuple2<Integer, T[]>> { private final int downStreamParallelism; public GlobalBatchSplitter(int downStreamParallelism) { this.downStreamParallelism = downStreamParallelism; } @Override public void flatMap(T[] values, Collector<Tuple2<Integer, T[]>> collector) { int div = values.length / downStreamParallelism; int mod = values.length % downStreamParallelism; int offset = 0; int i = 0; int size = div + 1; for (; i < mod; i++) { collector.collect(Tuple2.of(i, Arrays.copyOfRange(values, offset, offset + size))); offset += size; } size = div; for (; i < downStreamParallelism; i++) { collector.collect(Tuple2.of(i, Arrays.copyOfRange(values, offset, offset + size))); offset += size; } } } /* * A stream operator that takes a randomly sampled subset of elements in a bounded data stream. */ private static class SamplingOperator<T> extends AbstractStreamOperator<T> implements OneInputStreamOperator<T, T>, BoundedOneInput { private final int numSamples; private final Random random; private ListState<T> samplesState; private List<T> samples; private ListState<Integer> countState; private int count; SamplingOperator(int numSamples, long randomSeed) { this.numSamples = numSamples; this.random = new Random(randomSeed); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); ListStateDescriptor<T> samplesDescriptor = new ListStateDescriptor<>( "samplesState", getOperatorConfig() .getTypeSerializerIn(0, getClass().getClassLoader())); samplesState = context.getOperatorStateStore().getListState(samplesDescriptor); samples = new ArrayList<>(numSamples); samplesState.get().forEach(samples::add); ListStateDescriptor<Integer> countDescriptor = new ListStateDescriptor<>("countState", IntSerializer.INSTANCE); countState = context.getOperatorStateStore().getListState(countDescriptor); Iterator<Integer> countIterator = countState.get().iterator(); if (countIterator.hasNext()) { count = countIterator.next(); } else { count = 0; } } @Override public void snapshotState(StateSnapshotContext context) throws Exception { super.snapshotState(context); samplesState.update(samples); countState.update(Collections.singletonList(count)); } @Override public void processElement(StreamRecord<T> streamRecord) throws Exception { T value = streamRecord.getValue(); count++; if (samples.size() < numSamples) { samples.add(value); } else { int index = random.nextInt(count); if (index < numSamples) { samples.set(index, value); } } } @Override public void endInput() throws Exception { for (T sample : samples) { output.collect(new StreamRecord<>(sample)); } } } }
apache/iceberg
37,024
kafka-connect/kafka-connect/src/test/java/org/apache/iceberg/connect/data/TestRecordConverter.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iceberg.connect.data; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import com.fasterxml.jackson.databind.ObjectMapper; import java.math.BigDecimal; import java.nio.ByteBuffer; import java.time.Duration; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.time.temporal.Temporal; import java.util.Base64; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.UUID; import java.util.function.Function; import org.apache.iceberg.FileFormat; import org.apache.iceberg.Table; import org.apache.iceberg.TableProperties; import org.apache.iceberg.connect.IcebergSinkConfig; import org.apache.iceberg.connect.data.SchemaUpdate.AddColumn; import org.apache.iceberg.connect.data.SchemaUpdate.UpdateType; import org.apache.iceberg.data.GenericRecord; import org.apache.iceberg.data.Record; import org.apache.iceberg.mapping.MappedField; import org.apache.iceberg.mapping.NameMapping; import org.apache.iceberg.mapping.NameMappingParser; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.Types.BinaryType; import org.apache.iceberg.types.Types.BooleanType; import org.apache.iceberg.types.Types.DateType; import org.apache.iceberg.types.Types.DecimalType; import org.apache.iceberg.types.Types.DoubleType; import org.apache.iceberg.types.Types.FixedType; import org.apache.iceberg.types.Types.FloatType; import org.apache.iceberg.types.Types.IntegerType; import org.apache.iceberg.types.Types.ListType; import org.apache.iceberg.types.Types.LongType; import org.apache.iceberg.types.Types.MapType; import org.apache.iceberg.types.Types.NestedField; import org.apache.iceberg.types.Types.StringType; import org.apache.iceberg.types.Types.StructType; import org.apache.iceberg.types.Types.TimeType; import org.apache.iceberg.types.Types.TimestampType; import org.apache.iceberg.types.Types.UUIDType; import org.apache.iceberg.util.UUIDUtil; import org.apache.kafka.connect.data.Decimal; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.data.Time; import org.apache.kafka.connect.data.Timestamp; import org.apache.kafka.connect.json.JsonConverter; import org.apache.kafka.connect.json.JsonConverterConfig; import org.apache.kafka.connect.storage.ConverterConfig; import org.apache.kafka.connect.storage.ConverterType; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; public class TestRecordConverter { private static final ObjectMapper MAPPER = new ObjectMapper(); private static final org.apache.iceberg.Schema SCHEMA = new org.apache.iceberg.Schema( NestedField.required(20, "i", IntegerType.get()), NestedField.required(21, "l", LongType.get()), NestedField.required(22, "d", DateType.get()), NestedField.required(23, "t", TimeType.get()), NestedField.required(24, "ts", TimestampType.withoutZone()), NestedField.required(25, "tsz", TimestampType.withZone()), NestedField.required(26, "fl", FloatType.get()), NestedField.required(27, "do", DoubleType.get()), NestedField.required(28, "dec", DecimalType.of(9, 2)), NestedField.required(29, "s", StringType.get()), NestedField.required(30, "b", BooleanType.get()), NestedField.required(31, "u", UUIDType.get()), NestedField.required(32, "f", FixedType.ofLength(3)), NestedField.required(33, "bi", BinaryType.get()), NestedField.required(34, "li", ListType.ofRequired(35, StringType.get())), NestedField.required( 36, "ma", MapType.ofRequired(37, 38, StringType.get(), StringType.get())), NestedField.optional(39, "extra", StringType.get())); // we have 1 unmapped column so exclude that from the count private static final int MAPPED_CNT = SCHEMA.columns().size() - 1; private static final org.apache.iceberg.Schema NESTED_SCHEMA = new org.apache.iceberg.Schema( NestedField.required(1, "ii", IntegerType.get()), NestedField.required(2, "st", SCHEMA.asStruct())); private static final org.apache.iceberg.Schema SIMPLE_SCHEMA = new org.apache.iceberg.Schema( NestedField.required(1, "ii", IntegerType.get()), NestedField.required(2, "st", StringType.get())); private static final org.apache.iceberg.Schema ID_SCHEMA = new org.apache.iceberg.Schema(NestedField.required(1, "ii", IntegerType.get())); private static final org.apache.iceberg.Schema STRUCT_IN_LIST_SCHEMA = new org.apache.iceberg.Schema( NestedField.required(100, "stli", ListType.ofRequired(101, NESTED_SCHEMA.asStruct()))); private static final org.apache.iceberg.Schema STRUCT_IN_LIST_BASIC_SCHEMA = new org.apache.iceberg.Schema( NestedField.required(100, "stli", ListType.ofRequired(101, ID_SCHEMA.asStruct()))); private static final org.apache.iceberg.Schema STRUCT_IN_MAP_SCHEMA = new org.apache.iceberg.Schema( NestedField.required( 100, "stma", MapType.ofRequired(101, 102, StringType.get(), NESTED_SCHEMA.asStruct()))); private static final org.apache.iceberg.Schema STRUCT_IN_MAP_BASIC_SCHEMA = new org.apache.iceberg.Schema( NestedField.required( 100, "stma", MapType.ofRequired(101, 102, StringType.get(), ID_SCHEMA.asStruct()))); private static final Schema CONNECT_SCHEMA = SchemaBuilder.struct() .field("i", Schema.INT32_SCHEMA) .field("l", Schema.INT64_SCHEMA) .field("d", org.apache.kafka.connect.data.Date.SCHEMA) .field("t", Time.SCHEMA) .field("ts", Timestamp.SCHEMA) .field("tsz", Timestamp.SCHEMA) .field("fl", Schema.FLOAT32_SCHEMA) .field("do", Schema.FLOAT64_SCHEMA) .field("dec", Decimal.schema(2)) .field("s", Schema.STRING_SCHEMA) .field("b", Schema.BOOLEAN_SCHEMA) .field("u", Schema.STRING_SCHEMA) .field("f", Schema.BYTES_SCHEMA) .field("bi", Schema.BYTES_SCHEMA) .field("li", SchemaBuilder.array(Schema.STRING_SCHEMA)) .field("ma", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA)); private static final Schema CONNECT_NESTED_SCHEMA = SchemaBuilder.struct().field("ii", Schema.INT32_SCHEMA).field("st", CONNECT_SCHEMA); private static final Schema CONNECT_STRUCT_IN_LIST_SCHEMA = SchemaBuilder.struct().field("stli", SchemaBuilder.array(CONNECT_NESTED_SCHEMA)).build(); private static final Schema CONNECT_STRUCT_IN_MAP_SCHEMA = SchemaBuilder.struct() .field("stma", SchemaBuilder.map(Schema.STRING_SCHEMA, CONNECT_NESTED_SCHEMA)) .build(); private static final LocalDate DATE_VAL = LocalDate.parse("2023-05-18"); private static final LocalTime TIME_VAL = LocalTime.parse("07:14:21"); private static final LocalDateTime TS_VAL = LocalDateTime.parse("2023-05-18T07:14:21"); private static final OffsetDateTime TSZ_VAL = OffsetDateTime.parse("2023-05-18T07:14:21Z"); private static final BigDecimal DEC_VAL = new BigDecimal("12.34"); private static final String STR_VAL = "foobar"; private static final UUID UUID_VAL = UUID.randomUUID(); private static final ByteBuffer BYTES_VAL = ByteBuffer.wrap(new byte[] {1, 2, 3}); private static final List<String> LIST_VAL = ImmutableList.of("hello", "world"); private static final Map<String, String> MAP_VAL = ImmutableMap.of("one", "1", "two", "2"); private static final JsonConverter JSON_CONVERTER = new JsonConverter(); private IcebergSinkConfig config; @BeforeAll public static void beforeAll() { JSON_CONVERTER.configure( ImmutableMap.of( JsonConverterConfig.SCHEMAS_ENABLE_CONFIG, false, ConverterConfig.TYPE_CONFIG, ConverterType.VALUE.getName())); } @BeforeEach public void before() { this.config = mock(IcebergSinkConfig.class); when(config.jsonConverter()).thenReturn(JSON_CONVERTER); } @Test public void testMapConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = createMapData(); Record record = converter.convert(data); assertRecordValues(record); } @Test public void testUUIDConversionWithParquet() { Table table = mock(Table.class); when(table.schema()) .thenReturn(new org.apache.iceberg.Schema(NestedField.required(1, "uuid", UUIDType.get()))); when(config.writeProps()) .thenReturn( ImmutableMap.of( TableProperties.DEFAULT_FILE_FORMAT, FileFormat.PARQUET.name().toLowerCase(Locale.ROOT))); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = ImmutableMap.<String, Object>builder().put("uuid", UUID_VAL.toString()).build(); Record record = converter.convert(data); assertThat(record.getField("uuid")).isEqualTo(UUIDUtil.convert(UUID_VAL)); } @Test public void testNestedMapConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(NESTED_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> nestedData = createNestedMapData(); Record record = converter.convert(nestedData); assertNestedRecordValues(record); } @Test @SuppressWarnings("unchecked") public void testMapToString() throws Exception { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> nestedData = createNestedMapData(); Record record = converter.convert(nestedData); String str = (String) record.getField("st"); Map<String, Object> map = (Map<String, Object>) MAPPER.readValue(str, Map.class); assertThat(map).hasSize(MAPPED_CNT); } @Test public void testMapValueInListConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_LIST_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = createNestedMapData(); Record record = converter.convert(ImmutableMap.of("stli", ImmutableList.of(data, data))); List<?> fieldVal = (List<?>) record.getField("stli"); Record elementVal = (Record) fieldVal.get(0); assertNestedRecordValues(elementVal); } @Test public void testMapValueInMapConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_MAP_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = createNestedMapData(); Record record = converter.convert(ImmutableMap.of("stma", ImmutableMap.of("key1", data, "key2", data))); Map<?, ?> fieldVal = (Map<?, ?>) record.getField("stma"); Record mapVal = (Record) fieldVal.get("key1"); assertNestedRecordValues(mapVal); } @Test public void testStructConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct data = createStructData(); Record record = converter.convert(data); assertRecordValues(record); } @Test public void testNestedStructConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(NESTED_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct nestedData = createNestedStructData(); Record record = converter.convert(nestedData); assertNestedRecordValues(record); } @Test @SuppressWarnings("unchecked") public void testStructToString() throws Exception { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct nestedData = createNestedStructData(); Record record = converter.convert(nestedData); String str = (String) record.getField("st"); Map<String, Object> map = (Map<String, Object>) MAPPER.readValue(str, Map.class); assertThat(map).hasSize(MAPPED_CNT); } @Test public void testStructValueInListConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_LIST_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct data = createNestedStructData(); Struct struct = new Struct(CONNECT_STRUCT_IN_LIST_SCHEMA).put("stli", ImmutableList.of(data, data)); Record record = converter.convert(struct); List<?> fieldVal = (List<?>) record.getField("stli"); Record elementVal = (Record) fieldVal.get(0); assertNestedRecordValues(elementVal); } @Test public void testStructValueInMapConvert() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_MAP_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct data = createNestedStructData(); Struct struct = new Struct(CONNECT_STRUCT_IN_MAP_SCHEMA) .put("stma", ImmutableMap.of("key1", data, "key2", data)); Record record = converter.convert(struct); Map<?, ?> fieldVal = (Map<?, ?>) record.getField("stma"); Record mapVal = (Record) fieldVal.get("key1"); assertNestedRecordValues(mapVal); } @Test public void testNameMapping() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); NameMapping nameMapping = NameMapping.of(MappedField.of(1, ImmutableList.of("renamed_ii"))); when(table.properties()) .thenReturn( ImmutableMap.of( TableProperties.DEFAULT_NAME_MAPPING, NameMappingParser.toJson(nameMapping))); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = ImmutableMap.of("renamed_ii", 123); Record record = converter.convert(data); assertThat(record.getField("ii")).isEqualTo(123); } @ParameterizedTest @ValueSource(booleans = {false, true}) public void testCaseSensitivity(boolean caseInsensitive) { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); when(config.schemaCaseInsensitive()).thenReturn(caseInsensitive); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> mapData = ImmutableMap.of("II", 123); Record record1 = converter.convert(mapData); Struct structData = new Struct(SchemaBuilder.struct().field("II", Schema.INT32_SCHEMA).build()).put("II", 123); Record record2 = converter.convert(structData); if (caseInsensitive) { assertThat(record1.getField("ii")).isEqualTo(123); assertThat(record2.getField("ii")).isEqualTo(123); } else { assertThat(record1.getField("ii")).isEqualTo(null); assertThat(record2.getField("ii")).isEqualTo(null); } } @Test public void testIntConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); int expectedInt = 123; ImmutableList.of("123", 123.0f, 123.0d, 123L, expectedInt) .forEach( input -> { int val = converter.convertInt(input); assertThat(val).isEqualTo(expectedInt); }); } @Test public void testLongConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); long expectedLong = 123L; ImmutableList.of("123", 123.0f, 123.0d, 123, expectedLong) .forEach( input -> { long val = converter.convertLong(input); assertThat(val).isEqualTo(expectedLong); }); } @Test public void testFloatConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); float expectedFloat = 123f; ImmutableList.of("123", 123, 123L, 123d, expectedFloat) .forEach( input -> { float val = converter.convertFloat(input); assertThat(val).isEqualTo(expectedFloat); }); } @Test public void testDoubleConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); double expectedDouble = 123d; ImmutableList.of("123", 123, 123L, 123f, expectedDouble) .forEach( input -> { double val = converter.convertDouble(input); assertThat(val).isEqualTo(expectedDouble); }); } @Test public void testDecimalConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); BigDecimal expected = new BigDecimal("123.45"); ImmutableList.of("123.45", 123.45d, expected) .forEach( input -> { BigDecimal decimal = converter.convertDecimal(input, DecimalType.of(10, 2)); assertThat(decimal).isEqualTo(expected); }); BigDecimal expected2 = new BigDecimal(123); ImmutableList.of("123", 123, expected2) .forEach( input -> { BigDecimal decimal = converter.convertDecimal(input, DecimalType.of(10, 0)); assertThat(decimal).isEqualTo(expected2); }); } @Test public void testDateConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); LocalDate expected = LocalDate.of(2023, 11, 15); List<Object> inputList = ImmutableList.of( "2023-11-15", expected.toEpochDay(), expected, new Date(Duration.ofDays(expected.toEpochDay()).toMillis())); inputList.forEach( input -> { Temporal ts = converter.convertDateValue(input); assertThat(ts).isEqualTo(expected); }); } @Test public void testTimeConversion() { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); LocalTime expected = LocalTime.of(7, 51, 30, 888_000_000); List<Object> inputList = ImmutableList.of( "07:51:30.888", expected.toNanoOfDay() / 1000 / 1000, expected, new Date(expected.toNanoOfDay() / 1000 / 1000)); inputList.forEach( input -> { Temporal ts = converter.convertTimeValue(input); assertThat(ts).isEqualTo(expected); }); } @Test public void testTimestampWithZoneConversion() { OffsetDateTime expected = OffsetDateTime.parse("2023-05-18T11:22:33Z"); long expectedMillis = expected.toInstant().toEpochMilli(); assertTimestampConvert(expected, expectedMillis, TimestampType.withZone()); // zone should be respected expected = OffsetDateTime.parse("2023-05-18T03:22:33-08:00"); List<Object> additionalInput = ImmutableList.of( "2023-05-18T03:22:33-08", "2023-05-18 03:22:33-08", "2023-05-18T03:22:33-08:00", "2023-05-18 03:22:33-08:00", "2023-05-18T03:22:33-0800", "2023-05-18 03:22:33-0800"); assertTimestampConvert(expected, additionalInput, TimestampType.withZone()); } @Test public void testTimestampWithoutZoneConversion() { LocalDateTime expected = LocalDateTime.parse("2023-05-18T11:22:33"); long expectedMillis = expected.atZone(ZoneOffset.UTC).toInstant().toEpochMilli(); assertTimestampConvert(expected, expectedMillis, TimestampType.withoutZone()); // zone should be ignored List<Object> additionalInput = ImmutableList.of( "2023-05-18T11:22:33-08", "2023-05-18 11:22:33-08", "2023-05-18T11:22:33-08:00", "2023-05-18 11:22:33-08:00", "2023-05-18T11:22:33-0800", "2023-05-18 11:22:33-0800"); assertTimestampConvert(expected, additionalInput, TimestampType.withoutZone()); } private void assertTimestampConvert(Temporal expected, long expectedMillis, TimestampType type) { List<Object> inputList = Lists.newArrayList( "2023-05-18T11:22:33Z", "2023-05-18 11:22:33Z", "2023-05-18T11:22:33+00", "2023-05-18 11:22:33+00", "2023-05-18T11:22:33+00:00", "2023-05-18 11:22:33+00:00", "2023-05-18T11:22:33+0000", "2023-05-18 11:22:33+0000", "2023-05-18T11:22:33", "2023-05-18 11:22:33", expectedMillis, new Date(expectedMillis), OffsetDateTime.ofInstant(Instant.ofEpochMilli(expectedMillis), ZoneOffset.UTC), LocalDateTime.ofInstant(Instant.ofEpochMilli(expectedMillis), ZoneOffset.UTC)); assertTimestampConvert(expected, inputList, type); } private void assertTimestampConvert( Temporal expected, List<Object> inputList, TimestampType type) { Table table = mock(Table.class); when(table.schema()).thenReturn(SIMPLE_SCHEMA); RecordConverter converter = new RecordConverter(table, config); inputList.forEach( input -> { Temporal ts = converter.convertTimestampValue(input, type); assertThat(ts).isEqualTo(expected); }); } @Test public void testMissingColumnDetectionMap() { Table table = mock(Table.class); when(table.schema()).thenReturn(ID_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> data = Maps.newHashMap(createMapData()); data.put("null", null); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(data, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(MAPPED_CNT); Map<String, AddColumn> newColMap = Maps.newHashMap(); addCols.forEach(addCol -> newColMap.put(addCol.name(), addCol)); assertTypesAddedFromMap(col -> newColMap.get(col).type()); // null values should be ignored assertThat(newColMap).doesNotContainKey("null"); } @Test public void testMissingColumnDetectionMapNested() { Table table = mock(Table.class); when(table.schema()).thenReturn(ID_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> nestedData = createNestedMapData(); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(nestedData, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(1); AddColumn addCol = addCols.iterator().next(); assertThat(addCol.name()).isEqualTo("st"); StructType addedType = addCol.type().asStructType(); assertThat(addedType.fields()).hasSize(MAPPED_CNT); assertTypesAddedFromMap(col -> addedType.field(col).type()); } @Test public void testMissingColumnDetectionMapListValue() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_LIST_BASIC_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Map<String, Object> nestedData = createNestedMapData(); Map<String, Object> map = ImmutableMap.of("stli", ImmutableList.of(nestedData, nestedData)); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(map, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(1); AddColumn addCol = addCols.iterator().next(); assertThat(addCol.parentName()).isEqualTo("stli.element"); assertThat(addCol.name()).isEqualTo("st"); StructType nestedElementType = addCol.type().asStructType(); assertThat(nestedElementType.fields()).hasSize(MAPPED_CNT); assertTypesAddedFromMap(col -> nestedElementType.field(col).type()); } private void assertTypesAddedFromMap(Function<String, Type> fn) { assertThat(fn.apply("i")).isInstanceOf(LongType.class); assertThat(fn.apply("l")).isInstanceOf(LongType.class); assertThat(fn.apply("d")).isInstanceOf(StringType.class); assertThat(fn.apply("t")).isInstanceOf(StringType.class); assertThat(fn.apply("ts")).isInstanceOf(StringType.class); assertThat(fn.apply("tsz")).isInstanceOf(StringType.class); assertThat(fn.apply("fl")).isInstanceOf(DoubleType.class); assertThat(fn.apply("do")).isInstanceOf(DoubleType.class); assertThat(fn.apply("dec")).isInstanceOf(StringType.class); assertThat(fn.apply("s")).isInstanceOf(StringType.class); assertThat(fn.apply("b")).isInstanceOf(BooleanType.class); assertThat(fn.apply("u")).isInstanceOf(StringType.class); assertThat(fn.apply("f")).isInstanceOf(StringType.class); assertThat(fn.apply("bi")).isInstanceOf(StringType.class); assertThat(fn.apply("li")).isInstanceOf(ListType.class); assertThat(fn.apply("ma")).isInstanceOf(StructType.class); } @Test public void testMissingColumnDetectionStruct() { Table table = mock(Table.class); when(table.schema()).thenReturn(ID_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct data = createStructData(); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(data, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(MAPPED_CNT); Map<String, AddColumn> newColMap = Maps.newHashMap(); addCols.forEach(addCol -> newColMap.put(addCol.name(), addCol)); assertTypesAddedFromStruct(col -> newColMap.get(col).type()); } @Test public void testMissingColumnDetectionStructNested() { Table table = mock(Table.class); when(table.schema()).thenReturn(ID_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct nestedData = createNestedStructData(); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(nestedData, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(1); AddColumn addCol = addCols.iterator().next(); assertThat(addCol.name()).isEqualTo("st"); StructType addedType = addCol.type().asStructType(); assertThat(addedType.fields()).hasSize(MAPPED_CNT); assertTypesAddedFromStruct(col -> addedType.field(col).type()); } @Test public void testMissingColumnDetectionStructListValue() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_LIST_BASIC_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct nestedData = createNestedStructData(); Struct struct = new Struct(CONNECT_STRUCT_IN_LIST_SCHEMA) .put("stli", ImmutableList.of(nestedData, nestedData)); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(struct, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(1); AddColumn addCol = addCols.iterator().next(); assertThat(addCol.parentName()).isEqualTo("stli.element"); assertThat(addCol.name()).isEqualTo("st"); StructType nestedElementType = addCol.type().asStructType(); assertThat(nestedElementType.fields()).hasSize(MAPPED_CNT); assertTypesAddedFromStruct(col -> nestedElementType.field(col).type()); } @Test public void testMissingColumnDetectionStructMapValue() { Table table = mock(Table.class); when(table.schema()).thenReturn(STRUCT_IN_MAP_BASIC_SCHEMA); RecordConverter converter = new RecordConverter(table, config); Struct nestedData = createNestedStructData(); Struct struct = new Struct(CONNECT_STRUCT_IN_MAP_SCHEMA) .put("stma", ImmutableMap.of("key1", nestedData, "key2", nestedData)); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(struct, consumer); Collection<AddColumn> addCols = consumer.addColumns(); assertThat(addCols).hasSize(1); AddColumn addCol = addCols.iterator().next(); assertThat(addCol.parentName()).isEqualTo("stma.value"); assertThat(addCol.name()).isEqualTo("st"); StructType nestedValueType = addCol.type().asStructType(); assertThat(nestedValueType.fields()).hasSize(MAPPED_CNT); assertTypesAddedFromStruct(col -> nestedValueType.field(col).type()); } private void assertTypesAddedFromStruct(Function<String, Type> fn) { assertThat(fn.apply("i")).isInstanceOf(IntegerType.class); assertThat(fn.apply("l")).isInstanceOf(LongType.class); assertThat(fn.apply("d")).isInstanceOf(DateType.class); assertThat(fn.apply("t")).isInstanceOf(TimeType.class); assertThat(fn.apply("ts")).isInstanceOf(TimestampType.class); assertThat(fn.apply("tsz")).isInstanceOf(TimestampType.class); assertThat(fn.apply("fl")).isInstanceOf(FloatType.class); assertThat(fn.apply("do")).isInstanceOf(DoubleType.class); assertThat(fn.apply("dec")).isInstanceOf(DecimalType.class); assertThat(fn.apply("s")).isInstanceOf(StringType.class); assertThat(fn.apply("b")).isInstanceOf(BooleanType.class); assertThat(fn.apply("u")).isInstanceOf(StringType.class); assertThat(fn.apply("f")).isInstanceOf(BinaryType.class); assertThat(fn.apply("bi")).isInstanceOf(BinaryType.class); assertThat(fn.apply("li")).isInstanceOf(ListType.class); assertThat(fn.apply("ma")).isInstanceOf(MapType.class); } @Test public void testEvolveTypeDetectionStruct() { org.apache.iceberg.Schema tableSchema = new org.apache.iceberg.Schema( NestedField.required(1, "ii", IntegerType.get()), NestedField.required(2, "ff", FloatType.get())); Table table = mock(Table.class); when(table.schema()).thenReturn(tableSchema); RecordConverter converter = new RecordConverter(table, config); Schema valueSchema = SchemaBuilder.struct().field("ii", Schema.INT64_SCHEMA).field("ff", Schema.FLOAT64_SCHEMA); Struct data = new Struct(valueSchema).put("ii", 11L).put("ff", 22d); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(data, consumer); Collection<UpdateType> updates = consumer.updateTypes(); assertThat(updates).hasSize(2); Map<String, UpdateType> updateMap = Maps.newHashMap(); updates.forEach(update -> updateMap.put(update.name(), update)); assertThat(updateMap.get("ii").type()).isInstanceOf(LongType.class); assertThat(updateMap.get("ff").type()).isInstanceOf(DoubleType.class); } @Test public void testEvolveTypeDetectionStructNested() { org.apache.iceberg.Schema structColSchema = new org.apache.iceberg.Schema( NestedField.required(1, "ii", IntegerType.get()), NestedField.required(2, "ff", FloatType.get())); org.apache.iceberg.Schema tableSchema = new org.apache.iceberg.Schema( NestedField.required(3, "i", IntegerType.get()), NestedField.required(4, "st", structColSchema.asStruct())); Table table = mock(Table.class); when(table.schema()).thenReturn(tableSchema); RecordConverter converter = new RecordConverter(table, config); Schema structSchema = SchemaBuilder.struct().field("ii", Schema.INT64_SCHEMA).field("ff", Schema.FLOAT64_SCHEMA); Schema schema = SchemaBuilder.struct().field("i", Schema.INT32_SCHEMA).field("st", structSchema); Struct structValue = new Struct(structSchema).put("ii", 11L).put("ff", 22d); Struct data = new Struct(schema).put("i", 1).put("st", structValue); SchemaUpdate.Consumer consumer = new SchemaUpdate.Consumer(); converter.convert(data, consumer); Collection<UpdateType> updates = consumer.updateTypes(); assertThat(updates).hasSize(2); Map<String, UpdateType> updateMap = Maps.newHashMap(); updates.forEach(update -> updateMap.put(update.name(), update)); assertThat(updateMap.get("st.ii").type()).isInstanceOf(LongType.class); assertThat(updateMap.get("st.ff").type()).isInstanceOf(DoubleType.class); } public static Map<String, Object> createMapData() { return ImmutableMap.<String, Object>builder() .put("i", 1) .put("l", 2L) .put("d", DATE_VAL.toString()) .put("t", TIME_VAL.toString()) .put("ts", TS_VAL.toString()) .put("tsz", TSZ_VAL.toString()) .put("fl", 1.1f) .put("do", 2.2d) .put("dec", DEC_VAL.toString()) .put("s", STR_VAL) .put("b", true) .put("u", UUID_VAL.toString()) .put("f", Base64.getEncoder().encodeToString(BYTES_VAL.array())) .put("bi", Base64.getEncoder().encodeToString(BYTES_VAL.array())) .put("li", LIST_VAL) .put("ma", MAP_VAL) .build(); } private Map<String, Object> createNestedMapData() { return ImmutableMap.<String, Object>builder().put("ii", 11).put("st", createMapData()).build(); } private Struct createStructData() { return new Struct(CONNECT_SCHEMA) .put("i", 1) .put("l", 2L) .put("d", new Date(DATE_VAL.toEpochDay() * 24 * 60 * 60 * 1000L)) .put("t", new Date(TIME_VAL.toNanoOfDay() / 1_000_000)) .put("ts", Date.from(TS_VAL.atZone(ZoneOffset.UTC).toInstant())) .put("tsz", Date.from(TSZ_VAL.toInstant())) .put("fl", 1.1f) .put("do", 2.2d) .put("dec", DEC_VAL) .put("s", STR_VAL) .put("b", true) .put("u", UUID_VAL.toString()) .put("f", BYTES_VAL) .put("bi", BYTES_VAL) .put("li", LIST_VAL) .put("ma", MAP_VAL); } private Struct createNestedStructData() { return new Struct(CONNECT_NESTED_SCHEMA).put("ii", 11).put("st", createStructData()); } private void assertRecordValues(Record record) { GenericRecord rec = (GenericRecord) record; assertThat(rec.getField("i")).isEqualTo(1); assertThat(rec.getField("l")).isEqualTo(2L); assertThat(rec.getField("d")).isEqualTo(DATE_VAL); assertThat(rec.getField("t")).isEqualTo(TIME_VAL); assertThat(rec.getField("ts")).isEqualTo(TS_VAL); assertThat(rec.getField("tsz")).isEqualTo(TSZ_VAL); assertThat(rec.getField("fl")).isEqualTo(1.1f); assertThat(rec.getField("do")).isEqualTo(2.2d); assertThat(rec.getField("dec")).isEqualTo(DEC_VAL); assertThat(rec.getField("s")).isEqualTo(STR_VAL); assertThat(rec.getField("b")).isEqualTo(true); assertThat(rec.getField("f")).isEqualTo(BYTES_VAL.array()); assertThat(rec.getField("bi")).isEqualTo(BYTES_VAL); assertThat(rec.getField("li")).isEqualTo(LIST_VAL); assertThat(rec.getField("ma")).isEqualTo(MAP_VAL); assertThat(rec.getField("u")).isEqualTo(UUID_VAL); } private void assertNestedRecordValues(Record record) { GenericRecord rec = (GenericRecord) record; assertThat(rec.getField("ii")).isEqualTo(11); assertRecordValues((GenericRecord) rec.getField("st")); } }
googleapis/google-cloud-java
37,258
java-networkconnectivity/proto-google-cloud-networkconnectivity-v1/src/main/java/com/google/cloud/networkconnectivity/v1/ServiceConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/networkconnectivity/v1/data_transfer.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.networkconnectivity.v1; /** * * * <pre> * Specifies eligibility information for the service. * </pre> * * Protobuf type {@code google.cloud.networkconnectivity.v1.ServiceConfig} */ public final class ServiceConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.networkconnectivity.v1.ServiceConfig) ServiceConfigOrBuilder { private static final long serialVersionUID = 0L; // Use ServiceConfig.newBuilder() to construct. private ServiceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ServiceConfig() { eligibilityCriteria_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ServiceConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkconnectivity.v1.DataTransferProto .internal_static_google_cloud_networkconnectivity_v1_ServiceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkconnectivity.v1.DataTransferProto .internal_static_google_cloud_networkconnectivity_v1_ServiceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkconnectivity.v1.ServiceConfig.class, com.google.cloud.networkconnectivity.v1.ServiceConfig.Builder.class); } /** * * * <pre> * The eligibility information for the service. * </pre> * * Protobuf enum {@code google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria} */ public enum EligibilityCriteria implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * The service is not eligible for Data Transfer Essentials configuration. * This is the default case. * </pre> * * <code>ELIGIBILITY_CRITERIA_UNSPECIFIED = 0;</code> */ ELIGIBILITY_CRITERIA_UNSPECIFIED(0), /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for Premium Tier. * </pre> * * <code>NETWORK_SERVICE_TIER_PREMIUM_ONLY = 1;</code> */ NETWORK_SERVICE_TIER_PREMIUM_ONLY(1), /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for Standard Tier. * </pre> * * <code>NETWORK_SERVICE_TIER_STANDARD_ONLY = 2;</code> */ NETWORK_SERVICE_TIER_STANDARD_ONLY(2), /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for the regional endpoint. * </pre> * * <code>REQUEST_ENDPOINT_REGIONAL_ENDPOINT_ONLY = 3;</code> */ REQUEST_ENDPOINT_REGIONAL_ENDPOINT_ONLY(3), UNRECOGNIZED(-1), ; /** * * * <pre> * The service is not eligible for Data Transfer Essentials configuration. * This is the default case. * </pre> * * <code>ELIGIBILITY_CRITERIA_UNSPECIFIED = 0;</code> */ public static final int ELIGIBILITY_CRITERIA_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for Premium Tier. * </pre> * * <code>NETWORK_SERVICE_TIER_PREMIUM_ONLY = 1;</code> */ public static final int NETWORK_SERVICE_TIER_PREMIUM_ONLY_VALUE = 1; /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for Standard Tier. * </pre> * * <code>NETWORK_SERVICE_TIER_STANDARD_ONLY = 2;</code> */ public static final int NETWORK_SERVICE_TIER_STANDARD_ONLY_VALUE = 2; /** * * * <pre> * The service is eligible for Data Transfer Essentials configuration only * for the regional endpoint. * </pre> * * <code>REQUEST_ENDPOINT_REGIONAL_ENDPOINT_ONLY = 3;</code> */ public static final int REQUEST_ENDPOINT_REGIONAL_ENDPOINT_ONLY_VALUE = 3; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static EligibilityCriteria valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static EligibilityCriteria forNumber(int value) { switch (value) { case 0: return ELIGIBILITY_CRITERIA_UNSPECIFIED; case 1: return NETWORK_SERVICE_TIER_PREMIUM_ONLY; case 2: return NETWORK_SERVICE_TIER_STANDARD_ONLY; case 3: return REQUEST_ENDPOINT_REGIONAL_ENDPOINT_ONLY; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<EligibilityCriteria> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<EligibilityCriteria> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<EligibilityCriteria>() { public EligibilityCriteria findValueByNumber(int number) { return EligibilityCriteria.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.networkconnectivity.v1.ServiceConfig.getDescriptor() .getEnumTypes() .get(0); } private static final EligibilityCriteria[] VALUES = values(); public static EligibilityCriteria valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private EligibilityCriteria(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria) } private int bitField0_; public static final int ELIGIBILITY_CRITERIA_FIELD_NUMBER = 1; private int eligibilityCriteria_ = 0; /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for eligibilityCriteria. */ @java.lang.Override public int getEligibilityCriteriaValue() { return eligibilityCriteria_; } /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The eligibilityCriteria. */ @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria getEligibilityCriteria() { com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria result = com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria.forNumber( eligibilityCriteria_); return result == null ? com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria.UNRECOGNIZED : result; } public static final int SUPPORT_END_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp supportEndTime_; /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the supportEndTime field is set. */ @java.lang.Override public boolean hasSupportEndTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The supportEndTime. */ @java.lang.Override public com.google.protobuf.Timestamp getSupportEndTime() { return supportEndTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : supportEndTime_; } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getSupportEndTimeOrBuilder() { return supportEndTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : supportEndTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (eligibilityCriteria_ != com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria .ELIGIBILITY_CRITERIA_UNSPECIFIED .getNumber()) { output.writeEnum(1, eligibilityCriteria_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getSupportEndTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (eligibilityCriteria_ != com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria .ELIGIBILITY_CRITERIA_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, eligibilityCriteria_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getSupportEndTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.networkconnectivity.v1.ServiceConfig)) { return super.equals(obj); } com.google.cloud.networkconnectivity.v1.ServiceConfig other = (com.google.cloud.networkconnectivity.v1.ServiceConfig) obj; if (eligibilityCriteria_ != other.eligibilityCriteria_) return false; if (hasSupportEndTime() != other.hasSupportEndTime()) return false; if (hasSupportEndTime()) { if (!getSupportEndTime().equals(other.getSupportEndTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ELIGIBILITY_CRITERIA_FIELD_NUMBER; hash = (53 * hash) + eligibilityCriteria_; if (hasSupportEndTime()) { hash = (37 * hash) + SUPPORT_END_TIME_FIELD_NUMBER; hash = (53 * hash) + getSupportEndTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.networkconnectivity.v1.ServiceConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Specifies eligibility information for the service. * </pre> * * Protobuf type {@code google.cloud.networkconnectivity.v1.ServiceConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.networkconnectivity.v1.ServiceConfig) com.google.cloud.networkconnectivity.v1.ServiceConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.networkconnectivity.v1.DataTransferProto .internal_static_google_cloud_networkconnectivity_v1_ServiceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.networkconnectivity.v1.DataTransferProto .internal_static_google_cloud_networkconnectivity_v1_ServiceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.networkconnectivity.v1.ServiceConfig.class, com.google.cloud.networkconnectivity.v1.ServiceConfig.Builder.class); } // Construct using com.google.cloud.networkconnectivity.v1.ServiceConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSupportEndTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; eligibilityCriteria_ = 0; supportEndTime_ = null; if (supportEndTimeBuilder_ != null) { supportEndTimeBuilder_.dispose(); supportEndTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.networkconnectivity.v1.DataTransferProto .internal_static_google_cloud_networkconnectivity_v1_ServiceConfig_descriptor; } @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig getDefaultInstanceForType() { return com.google.cloud.networkconnectivity.v1.ServiceConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig build() { com.google.cloud.networkconnectivity.v1.ServiceConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig buildPartial() { com.google.cloud.networkconnectivity.v1.ServiceConfig result = new com.google.cloud.networkconnectivity.v1.ServiceConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.networkconnectivity.v1.ServiceConfig result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.eligibilityCriteria_ = eligibilityCriteria_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.supportEndTime_ = supportEndTimeBuilder_ == null ? supportEndTime_ : supportEndTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.networkconnectivity.v1.ServiceConfig) { return mergeFrom((com.google.cloud.networkconnectivity.v1.ServiceConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.networkconnectivity.v1.ServiceConfig other) { if (other == com.google.cloud.networkconnectivity.v1.ServiceConfig.getDefaultInstance()) return this; if (other.eligibilityCriteria_ != 0) { setEligibilityCriteriaValue(other.getEligibilityCriteriaValue()); } if (other.hasSupportEndTime()) { mergeSupportEndTime(other.getSupportEndTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { eligibilityCriteria_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { input.readMessage(getSupportEndTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int eligibilityCriteria_ = 0; /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for eligibilityCriteria. */ @java.lang.Override public int getEligibilityCriteriaValue() { return eligibilityCriteria_; } /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for eligibilityCriteria to set. * @return This builder for chaining. */ public Builder setEligibilityCriteriaValue(int value) { eligibilityCriteria_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The eligibilityCriteria. */ @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria getEligibilityCriteria() { com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria result = com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria.forNumber( eligibilityCriteria_); return result == null ? com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria.UNRECOGNIZED : result; } /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The eligibilityCriteria to set. * @return This builder for chaining. */ public Builder setEligibilityCriteria( com.google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; eligibilityCriteria_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. The eligibility criteria for the service. * </pre> * * <code> * .google.cloud.networkconnectivity.v1.ServiceConfig.EligibilityCriteria eligibility_criteria = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearEligibilityCriteria() { bitField0_ = (bitField0_ & ~0x00000001); eligibilityCriteria_ = 0; onChanged(); return this; } private com.google.protobuf.Timestamp supportEndTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> supportEndTimeBuilder_; /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the supportEndTime field is set. */ public boolean hasSupportEndTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The supportEndTime. */ public com.google.protobuf.Timestamp getSupportEndTime() { if (supportEndTimeBuilder_ == null) { return supportEndTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : supportEndTime_; } else { return supportEndTimeBuilder_.getMessage(); } } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setSupportEndTime(com.google.protobuf.Timestamp value) { if (supportEndTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } supportEndTime_ = value; } else { supportEndTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setSupportEndTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (supportEndTimeBuilder_ == null) { supportEndTime_ = builderForValue.build(); } else { supportEndTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeSupportEndTime(com.google.protobuf.Timestamp value) { if (supportEndTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && supportEndTime_ != null && supportEndTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getSupportEndTimeBuilder().mergeFrom(value); } else { supportEndTime_ = value; } } else { supportEndTimeBuilder_.mergeFrom(value); } if (supportEndTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearSupportEndTime() { bitField0_ = (bitField0_ & ~0x00000002); supportEndTime_ = null; if (supportEndTimeBuilder_ != null) { supportEndTimeBuilder_.dispose(); supportEndTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.Timestamp.Builder getSupportEndTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getSupportEndTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.protobuf.TimestampOrBuilder getSupportEndTimeOrBuilder() { if (supportEndTimeBuilder_ != null) { return supportEndTimeBuilder_.getMessageOrBuilder(); } else { return supportEndTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : supportEndTime_; } } /** * * * <pre> * Output only. The end time for eligibility criteria support. If not * specified, no planned end time is set. * </pre> * * <code> * .google.protobuf.Timestamp support_end_time = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getSupportEndTimeFieldBuilder() { if (supportEndTimeBuilder_ == null) { supportEndTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getSupportEndTime(), getParentForChildren(), isClean()); supportEndTime_ = null; } return supportEndTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.networkconnectivity.v1.ServiceConfig) } // @@protoc_insertion_point(class_scope:google.cloud.networkconnectivity.v1.ServiceConfig) private static final com.google.cloud.networkconnectivity.v1.ServiceConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.networkconnectivity.v1.ServiceConfig(); } public static com.google.cloud.networkconnectivity.v1.ServiceConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ServiceConfig> PARSER = new com.google.protobuf.AbstractParser<ServiceConfig>() { @java.lang.Override public ServiceConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ServiceConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ServiceConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.networkconnectivity.v1.ServiceConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-tuweni
37,359
crypto/src/main/java/org/apache/tuweni/crypto/SECP256K1.java
/* * Copyright 2011 Google Inc. * Copyright 2014 Andreas Schildbach * Copyright 2014-2016 the libsecp256k1 contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tuweni.crypto; import static java.nio.file.StandardOpenOption.READ; import static org.apache.tuweni.crypto.Hash.keccak256; import static org.apache.tuweni.crypto.SECP256K1.Parameters.CURVE; import static org.apache.tuweni.crypto.SECP256K1.Parameters.PARAMETER_SPEC; import static org.apache.tuweni.io.file.Files.atomicReplace; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.bytes.MutableBytes; import org.apache.tuweni.units.bigints.UInt256; import java.io.IOException; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.channels.FileChannel; import java.nio.file.Path; import java.security.InvalidAlgorithmParameterException; import java.security.KeyFactory; import java.security.KeyPairGenerator; import java.security.NoSuchAlgorithmException; import java.security.NoSuchProviderException; import java.security.SecureRandom; import java.security.spec.ECGenParameterSpec; import java.util.Arrays; import java.util.Objects; import javax.crypto.Cipher; import javax.security.auth.Destroyable; import org.bouncycastle.asn1.sec.SECNamedCurves; import org.bouncycastle.asn1.x9.X9ECParameters; import org.bouncycastle.asn1.x9.X9IntegerConverter; import org.bouncycastle.crypto.agreement.ECDHBasicAgreement; import org.bouncycastle.crypto.digests.SHA256Digest; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.crypto.params.ECPublicKeyParameters; import org.bouncycastle.crypto.signers.ECDSASigner; import org.bouncycastle.crypto.signers.HMacDSAKCalculator; import org.bouncycastle.jcajce.provider.asymmetric.ec.BCECPrivateKey; import org.bouncycastle.jcajce.provider.asymmetric.ec.BCECPublicKey; import org.bouncycastle.jcajce.provider.asymmetric.util.EC5Util; import org.bouncycastle.jcajce.provider.asymmetric.util.ECUtil; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.jce.spec.ECParameterSpec; import org.bouncycastle.jce.spec.ECPrivateKeySpec; import org.bouncycastle.jce.spec.ECPublicKeySpec; import org.bouncycastle.math.ec.ECAlgorithms; import org.bouncycastle.math.ec.ECPoint; import org.bouncycastle.math.ec.FixedPointCombMultiplier; import org.bouncycastle.math.ec.custom.sec.SecP256K1Curve; import org.jetbrains.annotations.Nullable; /* * Adapted from the BitcoinJ ECKey (Apache 2 License) implementation: * https://github.com/bitcoinj/bitcoinj/blob/master/core/src/main/java/org/bitcoinj/core/ECKey.java * */ /** * An Elliptic Curve Digital Signature using parameters as used by Bitcoin, and defined in Standards for Efficient * Cryptography (SEC) (Certicom Research, http://www.secg.org/sec2-v2.pdf). * * <p> * This class depends upon the BouncyCastle library being available and added as a {@link java.security.Provider}. See * https://www.bouncycastle.org/wiki/display/JA1/Provider+Installation. * * <p> * BouncyCastle can be included using the gradle dependency 'org.bouncycastle:bcprov-jdk15on'. */ public final class SECP256K1 { private SECP256K1() {} private static final String ALGORITHM = "ECDSA"; private static final String CURVE_NAME = "secp256k1"; private static final String PROVIDER = "BC"; // Lazily initialize parameters by using java initialization on demand public static final class Parameters { public static final ECDomainParameters CURVE; static final ECParameterSpec PARAMETER_SPEC; static final BigInteger CURVE_ORDER; static final BigInteger HALF_CURVE_ORDER; static final KeyPairGenerator KEY_PAIR_GENERATOR; static final X9IntegerConverter X_9_INTEGER_CONVERTER; static { try { Class.forName("org.bouncycastle.asn1.sec.SECNamedCurves"); } catch (ClassNotFoundException e) { throw new IllegalStateException( "BouncyCastle is not available on the classpath, see https://www.bouncycastle.org/latest_releases.html"); } X9ECParameters params = SECNamedCurves.getByName(CURVE_NAME); CURVE = new ECDomainParameters(params.getCurve(), params.getG(), params.getN(), params.getH()); PARAMETER_SPEC = new ECParameterSpec( params.getCurve(), CURVE.getG(), CURVE.getN(), CURVE.getH()); CURVE_ORDER = CURVE.getN(); HALF_CURVE_ORDER = CURVE_ORDER.shiftRight(1); if (CURVE_ORDER.compareTo(SecP256K1Curve.q) >= 0) { throw new IllegalStateException("secp256k1.n should be smaller than secp256k1.q, but is not"); } try { KEY_PAIR_GENERATOR = KeyPairGenerator.getInstance(ALGORITHM, PROVIDER); } catch (NoSuchProviderException e) { throw new IllegalStateException( "BouncyCastleProvider is not available, see https://www.bouncycastle.org/wiki/display/JA1/Provider+Installation", e); } catch (NoSuchAlgorithmException e) { throw new IllegalStateException("Algorithm should be available but was not", e); } ECGenParameterSpec ecGenParameterSpec = new ECGenParameterSpec(CURVE_NAME); try { KEY_PAIR_GENERATOR.initialize(ecGenParameterSpec, new SecureRandom()); } catch (InvalidAlgorithmParameterException e) { throw new IllegalStateException("Algorithm parameter should be available but was not", e); } X_9_INTEGER_CONVERTER = new X9IntegerConverter(); } } // Decompress a compressed public key (x co-ord and low-bit of y-coord). @Nullable private static ECPoint decompressKey(BigInteger xBN, boolean yBit) { byte[] compEnc = Parameters.X_9_INTEGER_CONVERTER .integerToBytes(xBN, 1 + Parameters.X_9_INTEGER_CONVERTER.getByteLength(Parameters.CURVE.getCurve())); compEnc[0] = (byte) (yBit ? 0x03 : 0x02); try { return Parameters.CURVE.getCurve().decodePoint(compEnc); } catch (IllegalArgumentException e) { // the compressed key was invalid return null; } } /** * Given the components of a signature and a selector value, recover and return the public key that generated the * signature according to the algorithm in SEC1v2 section 4.1.6. * * <p> * The recovery id is an index from 0 to 3 which indicates which of the 4 possible keys is the correct one. Because * the key recovery operation yields multiple potential keys, the correct key must either be stored alongside the * signature, or you must be willing to try each recovery id in turn until you find one that outputs the key you are * expecting. * * <p> * If this method returns null it means recovery was not possible and recovery id should be iterated. * * <p> * Given the above two points, a correct usage of this method is inside a for loop from 0 to 3, and if the output is * null OR a key that is not the one you expect, you try again with the next recovery id. * * @param v Which possible key to recover - can be null if either key can be attempted. * @param r The R component of the signature. * @param s The S component of the signature. * @param messageHash Hash of the data that was signed. * @return A ECKey containing only the public part, or {@code null} if recovery wasn't possible. */ @Nullable private static BigInteger recoverFromSignature(int v, BigInteger r, BigInteger s, Bytes32 messageHash) { assert (v == 0 || v == 1); assert (r.signum() >= 0); assert (s.signum() >= 0); assert (messageHash != null); // Compressed keys require you to know an extra bit of data about the y-coord as there are two possibilities. // So it's encoded in the recovery id (v). ECPoint R = decompressKey(r, (v & 1) == 1); // 1.4. If nR != point at infinity, then do another iteration of Step 1 (callers responsibility). if (R == null || !R.multiply(Parameters.CURVE_ORDER).isInfinity()) { return null; } // 1.5. Compute e from M using Steps 2 and 3 of ECDSA signature verification. BigInteger e = messageHash.toUnsignedBigInteger(); // 1.6. For k from 1 to 2 do the following. (loop is outside this function via iterating v) // 1.6.1. Compute a candidate public key as: // Q = mi(r) * (sR - eG) // // Where mi(x) is the modular multiplicative inverse. We transform this into the following: // Q = (mi(r) * s ** R) + (mi(r) * -e ** G) // Where -e is the modular additive inverse of e, that is z such that z + e = 0 (mod n). // In the above equation ** is point multiplication and + is point addition (the EC group // operator). // // We can find the additive inverse by subtracting e from zero then taking the mod. For example the additive // inverse of 3 modulo 11 is 8 because 3 + 8 mod 11 = 0, and -3 mod 11 = 8. BigInteger eInv = BigInteger.ZERO.subtract(e).mod(Parameters.CURVE_ORDER); BigInteger rInv = r.modInverse(Parameters.CURVE_ORDER); BigInteger srInv = rInv.multiply(s).mod(Parameters.CURVE_ORDER); BigInteger eInvrInv = rInv.multiply(eInv).mod(Parameters.CURVE_ORDER); ECPoint q = ECAlgorithms.sumOfTwoMultiplies(Parameters.CURVE.getG(), eInvrInv, R, srInv); if (q.isInfinity()) { return null; } byte[] qBytes = q.getEncoded(false); // We remove the prefix return new BigInteger(1, Arrays.copyOfRange(qBytes, 1, qBytes.length)); } /** * Encrypts bytes using a public key. * @param publicKey the public key for encryption * @param payload the payload to encrypt * @return the encrypted data */ public static Bytes encrypt(SECP256K1.PublicKey publicKey, Bytes payload) { try { ECPoint ecPoint = publicKey.asEcPoint(); ECPublicKeySpec keySpec = new ECPublicKeySpec(ecPoint, PARAMETER_SPEC); KeyFactory keyFactory = KeyFactory.getInstance("EC"); java.security.PublicKey bcKey = keyFactory.generatePublic(keySpec); Cipher iesCipher = Cipher.getInstance("ECIES", "BC"); iesCipher.init(Cipher.ENCRYPT_MODE, bcKey); byte[] output = iesCipher.doFinal(payload.toArrayUnsafe()); return Bytes.wrap(output); } catch(Exception e) { throw new EncryptionException(e); } } public static Bytes decrypt(SECP256K1.SecretKey secretKey, Bytes encrypted) { try { ECPrivateKeySpec keySpec = new ECPrivateKeySpec(secretKey.bytes().toUnsignedBigInteger(), PARAMETER_SPEC); KeyFactory keyFactory = KeyFactory.getInstance("EC"); java.security.PrivateKey bcKey = keyFactory.generatePrivate(keySpec); Cipher iesCipher = Cipher.getInstance("ECIES", "BC"); iesCipher.init(Cipher.DECRYPT_MODE, bcKey); byte[] output = iesCipher.doFinal(encrypted.toArrayUnsafe()); return Bytes.wrap(output); } catch (Exception e) { throw new DecryptionException(e); } } /** * Generates an ECDSA signature. * * @param data The data to sign. * @param keyPair The keypair to sign using. * @return The signature. */ public static Signature sign(byte[] data, KeyPair keyPair) { return signHashed(keccak256(data), keyPair); } /** * Generates an ECDSA signature. * * @param data The data to sign. * @param keyPair The keypair to sign using. * @return The signature. */ public static Signature sign(Bytes data, KeyPair keyPair) { return signHashed(keccak256(data), keyPair); } /** * Generates an ECDSA signature. * * @param hash The keccak256 hash of the data to sign. * @param keyPair The keypair to sign using. * @return The signature. */ public static Signature signHashed(byte[] hash, KeyPair keyPair) { return signHashed(Bytes32.wrap(hash), keyPair); } /** * Generates an ECDSA signature. * * @param hash The keccak256 hash of the data to sign. * @param keyPair The keypair to sign using. * @return The signature. */ public static Signature signHashed(Bytes32 hash, KeyPair keyPair) { ECDSASigner signer = new ECDSASigner(new HMacDSAKCalculator(new SHA256Digest())); ECPrivateKeyParameters privKey = new ECPrivateKeyParameters(keyPair.secretKey().bytes().toUnsignedBigInteger(), Parameters.CURVE); signer.init(true, privKey); BigInteger[] components = signer.generateSignature(hash.toArrayUnsafe()); BigInteger r = components[0]; BigInteger s = components[1]; // Automatically adjust the S component to be less than or equal to half the curve // order, if necessary. This is required because for every signature (r,s) the signature // (r, -s (mod N)) is a valid signature of the same message. However, we dislike the // ability to modify the bits of a Bitcoin transaction after it's been signed, as that // violates various assumed invariants. Thus in future only one of those forms will be // considered legal and the other will be banned. if (s.compareTo(Parameters.HALF_CURVE_ORDER) > 0) { // The order of the curve is the number of valid points that exist on that curve. // If S is in the upper half of the number of valid points, then bring it back to // the lower half. Otherwise, imagine that: // N = 10 // s = 8, so (-8 % 10 == 2) thus both (r, 8) and (r, 2) are valid solutions. // 10 - 8 == 2, giving us always the latter solution, which is canonical. s = Parameters.CURVE_ORDER.subtract(s); } // Now we have to work backwards to figure out the recovery id needed to recover the signature. // On this curve, there are only two possible values for the recovery id. int recId = -1; BigInteger publicKeyBI = keyPair.publicKey().bytes().toUnsignedBigInteger(); for (int i = 0; i < 2; i++) { BigInteger k = recoverFromSignature(i, r, s, hash); if (k != null && k.equals(publicKeyBI)) { recId = i; break; } } if (recId == -1) { // this should never happen throw new RuntimeException("Unexpected error - could not construct a recoverable key."); } byte v = (byte) recId; return new Signature(v, r, s); } /** * Verifies the given ECDSA signature against the message bytes using the public key bytes. * * @param data The data to verify. * @param signature The signature. * @param publicKey The public key. * @return True if the verification is successful. */ public static boolean verify(byte[] data, Signature signature, PublicKey publicKey) { return verifyHashed(keccak256(data), signature, publicKey); } /** * Verifies the given ECDSA signature against the message bytes using the public key bytes. * * @param data The data to verify. * @param signature The signature. * @param publicKey The public key. * @return True if the verification is successful. */ public static boolean verify(Bytes data, Signature signature, PublicKey publicKey) { return verifyHashed(keccak256(data), signature, publicKey); } /** * Verifies the given ECDSA signature against the message bytes using the public key bytes. * * @param hash The keccak256 hash of the data to verify. * @param signature The signature. * @param publicKey The public key. * @return True if the verification is successful. */ public static boolean verifyHashed(Bytes32 hash, Signature signature, PublicKey publicKey) { return verifyHashed(hash.toArrayUnsafe(), signature, publicKey); } /** * Verifies the given ECDSA signature against the message bytes using the public key bytes. * * @param hash The keccak256 hash of the data to verify. * @param signature The signature. * @param publicKey The public key. * @return True if the verification is successful. */ public static boolean verifyHashed(byte[] hash, Signature signature, PublicKey publicKey) { ECDSASigner signer = new ECDSASigner(); Bytes toDecode = Bytes.wrap(Bytes.of((byte) 4), publicKey.bytes()); ECPublicKeyParameters params = new ECPublicKeyParameters(Parameters.CURVE.getCurve().decodePoint(toDecode.toArray()), Parameters.CURVE); signer.init(false, params); try { return signer.verifySignature(hash, signature.r, signature.s); } catch (NullPointerException e) { // Bouncy Castle contains a bug that can cause NPEs given specially crafted signatures. Those signatures // are inherently invalid/attack sigs so we just fail them here rather than crash the thread. return false; } } /** * Calculates an ECDH key agreement between the private and the public key of another party, formatted as a 32 bytes * array. * * @param privKey the private key * @param theirPubKey the public key * @return shared secret as 32 bytes */ public static Bytes32 calculateKeyAgreement(SecretKey privKey, PublicKey theirPubKey) { if (privKey == null) { throw new NullPointerException("missing private key"); } if (theirPubKey == null) { throw new NullPointerException("missing remote public key"); } ECPrivateKeyParameters privKeyP = new ECPrivateKeyParameters(privKey.bytes().toUnsignedBigInteger(), Parameters.CURVE); ECPublicKeyParameters pubKeyP = new ECPublicKeyParameters(theirPubKey.asEcPoint(), Parameters.CURVE); ECDHBasicAgreement agreement = new ECDHBasicAgreement(); agreement.init(privKeyP); return UInt256.valueOf(agreement.calculateAgreement(pubKeyP)); } public static Bytes deriveECDHKeyAgreement(Bytes srcPrivKey, Bytes destPubKey) { ECPoint pudDestPoint = SECP256K1.PublicKey.fromBytes(destPubKey).asEcPoint(); ECPoint mult = pudDestPoint.multiply(srcPrivKey.toUnsignedBigInteger()); return Bytes.wrap(mult.getEncoded(true)); } /** * A SECP256K1 private key. */ public static class SecretKey implements Destroyable { private Bytes32 keyBytes; @Override protected void finalize() { destroy(); } @Override public void destroy() { if (keyBytes != null) { byte[] b = keyBytes.toArrayUnsafe(); keyBytes = null; Arrays.fill(b, (byte) 0); } } /** * Create the private key from a {@link BigInteger}. * * @param key The integer describing the key. * @return The private key. * @throws IllegalArgumentException If the integer would overflow 32 bytes. */ public static SecretKey fromInteger(BigInteger key) { if (key == null) { throw new NullPointerException("key cannot be null"); } byte[] bytes = key.toByteArray(); int offset = 0; while (bytes[offset] == 0) { ++offset; } if ((bytes.length - offset) > Bytes32.SIZE) { throw new IllegalArgumentException("key integer is too large"); } return fromBytes(Bytes32.leftPad(Bytes.wrap(bytes, offset, bytes.length - offset))); } /** * Create the private key from bytes. * * @param bytes The key bytes. * @return The private key. */ public static SecretKey fromBytes(Bytes32 bytes) { return new SecretKey(bytes.copy()); } /** * Load a private key from a file. * * @param file The file to read the key from. * @return The private key. * @throws IOException On a filesystem error. * @throws InvalidSEC256K1SecretKeyStoreException If the file does not contain a valid key. */ public static SecretKey load(Path file) throws IOException, InvalidSEC256K1SecretKeyStoreException { // use buffers for all secret key data transfer, so they can be overwritten on completion ByteBuffer byteBuffer = ByteBuffer.allocate(65); CharBuffer charBuffer = CharBuffer.allocate(64); try { FileChannel channel = FileChannel.open(file, READ); while (byteBuffer.hasRemaining() && channel.read(byteBuffer) > 0) { // no body } channel.close(); if (byteBuffer.remaining() > 1) { throw new InvalidSEC256K1SecretKeyStoreException(); } byteBuffer.flip(); for (int i = 0; i < 64; ++i) { charBuffer.put((char) byteBuffer.get()); } if (byteBuffer.limit() == 65 && byteBuffer.get(64) != '\n' && byteBuffer.get(64) != '\r') { throw new InvalidSEC256K1SecretKeyStoreException(); } charBuffer.flip(); return SecretKey.fromBytes(Bytes32.fromHexString(charBuffer)); } catch (IllegalArgumentException ex) { throw new InvalidSEC256K1SecretKeyStoreException(); } finally { Arrays.fill(byteBuffer.array(), (byte) 0); Arrays.fill(charBuffer.array(), (char) 0); } } private SecretKey(Bytes32 bytes) { if (bytes == null) { throw new NullPointerException("bytes cannot be null"); } this.keyBytes = bytes; } /** * Write the secret key to a file. * * @param file The file to write to. * @throws IOException On a filesystem error. */ public void store(Path file) throws IOException { if (keyBytes == null) { throw new NullPointerException("SecretKey has been destroyed"); } // use buffers for all secret key data transfer, so they can be overwritten on completion byte[] bytes = new byte[64]; CharBuffer hexChars = keyBytes.appendHexTo(CharBuffer.allocate(64)); try { hexChars.flip(); for (int i = 0; i < 64; ++i) { bytes[i] = (byte) hexChars.get(); } atomicReplace(file, bytes); } finally { Arrays.fill(bytes, (byte) 0); Arrays.fill(hexChars.array(), (char) 0); } } @Override public boolean equals(Object obj) { if (!(obj instanceof SecretKey)) { return false; } if (keyBytes == null) { throw new NullPointerException("SecretKey has been destroyed"); } SecretKey other = (SecretKey) obj; return this.keyBytes.equals(other.keyBytes); } @Override public int hashCode() { if (keyBytes == null) { throw new NullPointerException("SecretKey has been destroyed"); } return keyBytes.hashCode(); } /** * Provides the bytes of the key. * @return The bytes of the key. */ public Bytes32 bytes() { if (keyBytes == null) { throw new NullPointerException("SecretKey has been destroyed"); } return keyBytes; } /** * Provides the bytes of the key. * @return The bytes of the key. */ public byte[] bytesArray() { if (keyBytes == null) { throw new NullPointerException("SecretKey has been destroyed"); } return keyBytes.toArrayUnsafe(); } } /** * A SECP256K1 public key. */ public static class PublicKey { private static final int BYTE_LENGTH = 64; private final Bytes keyBytes; /** * Create the public key from a secret key. * * @param secretKey The secret key. * @return The associated public key. */ public static PublicKey fromSecretKey(SecretKey secretKey) { BigInteger privKey = secretKey.bytes().toUnsignedBigInteger(); /* * TODO: FixedPointCombMultiplier currently doesn't support scalars longer than the group * order, but that could change in future versions. */ if (privKey.bitLength() > Parameters.CURVE_ORDER.bitLength()) { privKey = privKey.mod(Parameters.CURVE_ORDER); } ECPoint point = new FixedPointCombMultiplier().multiply(Parameters.CURVE.getG(), privKey); return PublicKey.fromBytes(Bytes.wrap(Arrays.copyOfRange(point.getEncoded(false), 1, 65))); } private static Bytes toBytes64(byte[] backing) { if (backing.length == BYTE_LENGTH) { return Bytes.wrap(backing); } else if (backing.length > BYTE_LENGTH) { return Bytes.wrap(backing, backing.length - BYTE_LENGTH, BYTE_LENGTH); } else { MutableBytes res = MutableBytes.create(BYTE_LENGTH); Bytes.wrap(backing).copyTo(res, BYTE_LENGTH - backing.length); return res; } } /** * Create the public key from a secret key. * * @param privateKey The secret key. * @return The associated public key. */ public static PublicKey fromInteger(BigInteger privateKey) { if (privateKey == null) { throw new NullPointerException("privateKey cannot be null"); } return fromBytes(toBytes64(privateKey.toByteArray())); } /** * Create the public key from bytes. * * @param bytes The key bytes. * @return The public key. */ public static PublicKey fromBytes(Bytes bytes) { return new PublicKey(bytes); } /** * Create the public key from a hex string. * * @param str The hexadecimal string to parse, which may or may not start with "0x". * @return The public key. */ public static PublicKey fromHexString(CharSequence str) { return new PublicKey(Bytes.fromHexString(str)); } /** * Recover a public key using a digital signature and the data it signs. * * @param data The signed data. * @param signature The digital signature. * @return The associated public key, or {@code null} if recovery wasn't possible. */ @Nullable public static PublicKey recoverFromSignature(byte[] data, Signature signature) { return recoverFromHashAndSignature(keccak256(data), signature); } /** * Recover a public key using a digital signature and the data it signs. * * @param data The signed data. * @param signature The digital signature. * @return The associated public key, or {@code null} if recovery wasn't possible. */ @Nullable public static PublicKey recoverFromSignature(Bytes data, Signature signature) { return recoverFromHashAndSignature(keccak256(data), signature); } /** * Recover a public key using a digital signature and a keccak256 hash of the data it signs. * * @param hash The keccak256 hash of the signed data. * @param signature The digital signature. * @return The associated public key, or {@code null} if recovery wasn't possible. */ @Nullable public static PublicKey recoverFromHashAndSignature(byte[] hash, Signature signature) { return recoverFromHashAndSignature(Bytes32.wrap(hash), signature); } /** * Recover a public key using a digital signature and a keccak256 hash of the data it signs. * * @param hash The keccak256 hash of the signed data. * @param signature The digital signature. * @return The associated public key, or {@code null} if recovery wasn't possible. */ @Nullable public static PublicKey recoverFromHashAndSignature(Bytes32 hash, Signature signature) { BigInteger publicKeyBI = SECP256K1.recoverFromSignature(signature.v(), signature.r(), signature.s(), hash); return (publicKeyBI != null) ? fromInteger(publicKeyBI) : null; } private PublicKey(Bytes bytes) { if (bytes == null) { throw new NullPointerException("bytes cannot be null"); } if (bytes.size() != BYTE_LENGTH) { throw new IllegalArgumentException(String.format("Key must be %s bytes long, got %s", BYTE_LENGTH, bytes.size())); } this.keyBytes = bytes; } @Override public boolean equals(Object other) { if (!(other instanceof PublicKey)) { return false; } PublicKey that = (PublicKey) other; return this.keyBytes.equals(that.keyBytes); } @Override public int hashCode() { return keyBytes.hashCode(); } /** * Provides the bytes of the key. * @return The bytes of the key. */ public Bytes bytes() { return keyBytes; } /** * Provides the bytes of the key. * * @return The bytes of the key. */ public byte[] bytesArray() { return keyBytes.toArrayUnsafe(); } /** * Computes the public key as a point on the elliptic curve. * * @return the public key as a BouncyCastle elliptic curve point */ public ECPoint asEcPoint() { // 0x04 is the prefix for uncompressed keys. Bytes val = Bytes.concatenate(Bytes.of(0x04), keyBytes); return CURVE.getCurve().decodePoint(val.toArrayUnsafe()); } @Override public String toString() { return keyBytes.toString(); } /** * Provides this key represented as hexadecimal, starting with "0x". * @return This key represented as hexadecimal, starting with "0x". */ public String toHexString() { return keyBytes.toHexString(); } } /** * A SECP256K1 key pair. */ public static class KeyPair { private final SecretKey secretKey; private final PublicKey publicKey; /** * Create a keypair from a private and public key. * * @param secretKey The private key. * @param publicKey The public key. * @return The key pair. */ public static KeyPair create(SecretKey secretKey, PublicKey publicKey) { return new KeyPair(secretKey, publicKey); } /** * Create a keypair using only a private key. * * @param secretKey The private key. * @return The key pair. */ public static KeyPair fromSecretKey(SecretKey secretKey) { return new KeyPair(secretKey, PublicKey.fromSecretKey(secretKey)); } /** * Generate a new keypair. * * Entropy for the generation is drawn from {@link SecureRandom}. * * @return A new keypair. */ public static KeyPair random() { java.security.KeyPair rawKeyPair = Parameters.KEY_PAIR_GENERATOR.generateKeyPair(); BCECPrivateKey privateKey = (BCECPrivateKey) rawKeyPair.getPrivate(); BCECPublicKey publicKey = (BCECPublicKey) rawKeyPair.getPublic(); BigInteger privateKeyValue = privateKey.getD(); // Ethereum does not use encoded public keys like bitcoin - see // https://en.bitcoin.it/wiki/Elliptic_Curve_Digital_Signature_Algorithm for details // Additionally, as the first bit is a constant prefix (0x04) we ignore this value byte[] publicKeyBytes = publicKey.getQ().getEncoded(false); BigInteger publicKeyValue = new BigInteger(1, Arrays.copyOfRange(publicKeyBytes, 1, publicKeyBytes.length)); return new KeyPair(SecretKey.fromInteger(privateKeyValue), PublicKey.fromInteger(publicKeyValue)); } /** * Load a key pair from a path. * * @param file The file containing a private key. * @return The key pair. * @throws IOException On a filesystem error. * @throws InvalidSEC256K1SecretKeyStoreException If the file does not contain a valid key. */ public static KeyPair load(Path file) throws IOException, InvalidSEC256K1SecretKeyStoreException { return fromSecretKey(SecretKey.load(file)); } private KeyPair(SecretKey secretKey, PublicKey publicKey) { if (secretKey == null) { throw new NullPointerException("secretKey cannot be null"); } if (publicKey == null) { throw new NullPointerException("publicKey cannot be null"); } this.secretKey = secretKey; this.publicKey = publicKey; } @Override public int hashCode() { return Objects.hash(secretKey, publicKey); } @Override public boolean equals(Object other) { if (!(other instanceof KeyPair)) { return false; } KeyPair that = (KeyPair) other; return this.secretKey.equals(that.secretKey) && this.publicKey.equals(that.publicKey); } /** * Provides the secret key * @return The secret key. */ public SecretKey secretKey() { return secretKey; } /** * Provides the public key * @return The public key. */ public PublicKey publicKey() { return publicKey; } /** * Write the key pair to a file. * * @param file The file to write to. * @throws IOException On a filesystem error. */ public void store(Path file) throws IOException { secretKey.store(file); } } /** * A SECP256K1 digital signature. */ public static class Signature { /* * Parameter v is the recovery id to reconstruct the public key used to create the signature. It must be in * the range 0 to 3 and indicates which of the 4 possible keys is the correct one. Because the key recovery * operation yields multiple potential keys, the correct key must either be stored alongside the signature, * or you must be willing to try each recovery id in turn until you find one that outputs the key you are * expecting. */ private final byte v; private final BigInteger r; private final BigInteger s; /** * Create a signature from bytes. * * @param bytes The signature bytes. * @return The signature. */ public static Signature fromBytes(Bytes bytes) { if (bytes == null) { throw new NullPointerException("bytes cannot be null"); } if (bytes.size() != 65) { throw new IllegalArgumentException(String.format("Signature must be 65 bytes, but got %s instead", bytes.size())); } BigInteger r = bytes.slice(0, 32).toUnsignedBigInteger(); BigInteger s = bytes.slice(32, 32).toUnsignedBigInteger(); return new Signature(bytes.get(64), r, s); } /** * Create a signature from parameters. * * @param v The v-value (recovery id). * @param r The r-value. * @param s The s-value. * @return The signature. * @throws IllegalArgumentException If any argument has an invalid range. */ public static Signature create(byte v, BigInteger r, BigInteger s) { return new Signature(v, r, s); } Signature(byte v, BigInteger r, BigInteger s) { if (v != 0 && v != 1) { throw new IllegalArgumentException(String.format("Invalid v-value, should be 0 or 1, got %s", v)); } if (r == null) { throw new NullPointerException("r cannot be null"); } if (s == null) { throw new NullPointerException("s cannot be null"); } if (r.compareTo(BigInteger.ONE) < 0 || r.compareTo(Parameters.CURVE_ORDER) > 0) { throw new IllegalArgumentException(String.format("Invalid r-value, should be >= 1 and < %s, got %s", Parameters.CURVE_ORDER, r)); } if (s.compareTo(BigInteger.ONE) < 0 || s.compareTo(Parameters.CURVE_ORDER) > 0) { throw new IllegalArgumentException(String.format("Invalid s-value, should be >= 1 and < %s, got %s", Parameters.CURVE_ORDER, s)); } this.v = v; this.r = r; this.s = s; } /** * Provides the v-value of the signature. * @return The v-value (recovery id) of the signature. */ public byte v() { return v; } /** * Provides the r-value of the signature. * @return The r-value of the signature. */ public BigInteger r() { return r; } /** * Provides the s-value of the signature. * @return The s-value of the signature. */ public BigInteger s() { return s; } /** * Check if the signature is canonical. * * Every signature (r,s) has an equivalent signature (r, -s (mod N)) that is also valid for the same message. The * canonical signature is considered the signature with the s-value less than or equal to half the curve order. * * @return {@code true} if this is the canonical form of the signature, and {@code false} otherwise. */ public boolean isCanonical() { return s.compareTo(Parameters.HALF_CURVE_ORDER) <= 0; } @Override public boolean equals(Object other) { if (!(other instanceof Signature)) { return false; } Signature that = (Signature) other; return this.r.equals(that.r) && this.s.equals(that.s) && this.v == that.v; } /** * Provides the bytes of the signature. * * @return The bytes of the signature. */ public Bytes bytes() { MutableBytes signature = MutableBytes.create(65); UInt256.valueOf(r).copyTo(signature, 0); UInt256.valueOf(s).copyTo(signature, 32); signature.set(64, v); return signature; } @Override public int hashCode() { return Objects.hash(r, s, v); } @Override public String toString() { return "Signature{" + "r=" + r + ", s=" + s + ", v=" + v + '}'; } } }
googleapis/google-cloud-java
37,247
java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/ListDeploymentsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/telcoautomation/v1/telcoautomation.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.telcoautomation.v1; /** * * * <pre> * Response object for `ListDeployments`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.ListDeploymentsResponse} */ public final class ListDeploymentsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.ListDeploymentsResponse) ListDeploymentsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListDeploymentsResponse.newBuilder() to construct. private ListDeploymentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDeploymentsResponse() { deployments_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDeploymentsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.class, com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.Builder.class); } public static final int DEPLOYMENTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.telcoautomation.v1.Deployment> deployments_; /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.telcoautomation.v1.Deployment> getDeploymentsList() { return deployments_; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.telcoautomation.v1.DeploymentOrBuilder> getDeploymentsOrBuilderList() { return deployments_; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ @java.lang.Override public int getDeploymentsCount() { return deployments_.size(); } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ @java.lang.Override public com.google.cloud.telcoautomation.v1.Deployment getDeployments(int index) { return deployments_.get(index); } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ @java.lang.Override public com.google.cloud.telcoautomation.v1.DeploymentOrBuilder getDeploymentsOrBuilder( int index) { return deployments_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < deployments_.size(); i++) { output.writeMessage(1, deployments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < deployments_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, deployments_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.telcoautomation.v1.ListDeploymentsResponse)) { return super.equals(obj); } com.google.cloud.telcoautomation.v1.ListDeploymentsResponse other = (com.google.cloud.telcoautomation.v1.ListDeploymentsResponse) obj; if (!getDeploymentsList().equals(other.getDeploymentsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDeploymentsCount() > 0) { hash = (37 * hash) + DEPLOYMENTS_FIELD_NUMBER; hash = (53 * hash) + getDeploymentsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.telcoautomation.v1.ListDeploymentsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response object for `ListDeployments`. * </pre> * * Protobuf type {@code google.cloud.telcoautomation.v1.ListDeploymentsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.ListDeploymentsResponse) com.google.cloud.telcoautomation.v1.ListDeploymentsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.class, com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.Builder.class); } // Construct using com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (deploymentsBuilder_ == null) { deployments_ = java.util.Collections.emptyList(); } else { deployments_ = null; deploymentsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.telcoautomation.v1.TelcoautomationProto .internal_static_google_cloud_telcoautomation_v1_ListDeploymentsResponse_descriptor; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsResponse getDefaultInstanceForType() { return com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsResponse build() { com.google.cloud.telcoautomation.v1.ListDeploymentsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsResponse buildPartial() { com.google.cloud.telcoautomation.v1.ListDeploymentsResponse result = new com.google.cloud.telcoautomation.v1.ListDeploymentsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.telcoautomation.v1.ListDeploymentsResponse result) { if (deploymentsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { deployments_ = java.util.Collections.unmodifiableList(deployments_); bitField0_ = (bitField0_ & ~0x00000001); } result.deployments_ = deployments_; } else { result.deployments_ = deploymentsBuilder_.build(); } } private void buildPartial0(com.google.cloud.telcoautomation.v1.ListDeploymentsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.telcoautomation.v1.ListDeploymentsResponse) { return mergeFrom((com.google.cloud.telcoautomation.v1.ListDeploymentsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.telcoautomation.v1.ListDeploymentsResponse other) { if (other == com.google.cloud.telcoautomation.v1.ListDeploymentsResponse.getDefaultInstance()) return this; if (deploymentsBuilder_ == null) { if (!other.deployments_.isEmpty()) { if (deployments_.isEmpty()) { deployments_ = other.deployments_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDeploymentsIsMutable(); deployments_.addAll(other.deployments_); } onChanged(); } } else { if (!other.deployments_.isEmpty()) { if (deploymentsBuilder_.isEmpty()) { deploymentsBuilder_.dispose(); deploymentsBuilder_ = null; deployments_ = other.deployments_; bitField0_ = (bitField0_ & ~0x00000001); deploymentsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDeploymentsFieldBuilder() : null; } else { deploymentsBuilder_.addAllMessages(other.deployments_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.telcoautomation.v1.Deployment m = input.readMessage( com.google.cloud.telcoautomation.v1.Deployment.parser(), extensionRegistry); if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(m); } else { deploymentsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.telcoautomation.v1.Deployment> deployments_ = java.util.Collections.emptyList(); private void ensureDeploymentsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { deployments_ = new java.util.ArrayList<com.google.cloud.telcoautomation.v1.Deployment>(deployments_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Deployment, com.google.cloud.telcoautomation.v1.Deployment.Builder, com.google.cloud.telcoautomation.v1.DeploymentOrBuilder> deploymentsBuilder_; /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public java.util.List<com.google.cloud.telcoautomation.v1.Deployment> getDeploymentsList() { if (deploymentsBuilder_ == null) { return java.util.Collections.unmodifiableList(deployments_); } else { return deploymentsBuilder_.getMessageList(); } } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public int getDeploymentsCount() { if (deploymentsBuilder_ == null) { return deployments_.size(); } else { return deploymentsBuilder_.getCount(); } } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public com.google.cloud.telcoautomation.v1.Deployment getDeployments(int index) { if (deploymentsBuilder_ == null) { return deployments_.get(index); } else { return deploymentsBuilder_.getMessage(index); } } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder setDeployments(int index, com.google.cloud.telcoautomation.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.set(index, value); onChanged(); } else { deploymentsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder setDeployments( int index, com.google.cloud.telcoautomation.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.set(index, builderForValue.build()); onChanged(); } else { deploymentsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder addDeployments(com.google.cloud.telcoautomation.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.add(value); onChanged(); } else { deploymentsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder addDeployments(int index, com.google.cloud.telcoautomation.v1.Deployment value) { if (deploymentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDeploymentsIsMutable(); deployments_.add(index, value); onChanged(); } else { deploymentsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder addDeployments( com.google.cloud.telcoautomation.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(builderForValue.build()); onChanged(); } else { deploymentsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder addDeployments( int index, com.google.cloud.telcoautomation.v1.Deployment.Builder builderForValue) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.add(index, builderForValue.build()); onChanged(); } else { deploymentsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder addAllDeployments( java.lang.Iterable<? extends com.google.cloud.telcoautomation.v1.Deployment> values) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, deployments_); onChanged(); } else { deploymentsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder clearDeployments() { if (deploymentsBuilder_ == null) { deployments_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { deploymentsBuilder_.clear(); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public Builder removeDeployments(int index) { if (deploymentsBuilder_ == null) { ensureDeploymentsIsMutable(); deployments_.remove(index); onChanged(); } else { deploymentsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public com.google.cloud.telcoautomation.v1.Deployment.Builder getDeploymentsBuilder(int index) { return getDeploymentsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public com.google.cloud.telcoautomation.v1.DeploymentOrBuilder getDeploymentsOrBuilder( int index) { if (deploymentsBuilder_ == null) { return deployments_.get(index); } else { return deploymentsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public java.util.List<? extends com.google.cloud.telcoautomation.v1.DeploymentOrBuilder> getDeploymentsOrBuilderList() { if (deploymentsBuilder_ != null) { return deploymentsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(deployments_); } } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public com.google.cloud.telcoautomation.v1.Deployment.Builder addDeploymentsBuilder() { return getDeploymentsFieldBuilder() .addBuilder(com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()); } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public com.google.cloud.telcoautomation.v1.Deployment.Builder addDeploymentsBuilder(int index) { return getDeploymentsFieldBuilder() .addBuilder(index, com.google.cloud.telcoautomation.v1.Deployment.getDefaultInstance()); } /** * * * <pre> * The list of requested deployments. * </pre> * * <code>repeated .google.cloud.telcoautomation.v1.Deployment deployments = 1;</code> */ public java.util.List<com.google.cloud.telcoautomation.v1.Deployment.Builder> getDeploymentsBuilderList() { return getDeploymentsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Deployment, com.google.cloud.telcoautomation.v1.Deployment.Builder, com.google.cloud.telcoautomation.v1.DeploymentOrBuilder> getDeploymentsFieldBuilder() { if (deploymentsBuilder_ == null) { deploymentsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.telcoautomation.v1.Deployment, com.google.cloud.telcoautomation.v1.Deployment.Builder, com.google.cloud.telcoautomation.v1.DeploymentOrBuilder>( deployments_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); deployments_ = null; } return deploymentsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.ListDeploymentsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.ListDeploymentsResponse) private static final com.google.cloud.telcoautomation.v1.ListDeploymentsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.ListDeploymentsResponse(); } public static com.google.cloud.telcoautomation.v1.ListDeploymentsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDeploymentsResponse> PARSER = new com.google.protobuf.AbstractParser<ListDeploymentsResponse>() { @java.lang.Override public ListDeploymentsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDeploymentsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDeploymentsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.telcoautomation.v1.ListDeploymentsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/qpid-broker-j
36,928
broker-core/src/main/java/org/apache/qpid/server/filter/selector/SelectorParser.java
/* SelectorParser.java */ /* Generated By:JavaCC: Do not edit this line. SelectorParser.java */ /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.qpid.server.filter.selector; import java.io.StringReader; import java.util.ArrayList; import org.apache.qpid.server.filter.ArithmeticExpression; import org.apache.qpid.server.filter.BooleanExpression; import org.apache.qpid.server.filter.ComparisonExpression; import org.apache.qpid.server.filter.ConstantExpression; import org.apache.qpid.server.filter.Expression; import org.apache.qpid.server.filter.LogicExpression; import org.apache.qpid.server.filter.PropertyExpression; import org.apache.qpid.server.filter.PropertyExpressionFactory; import org.apache.qpid.server.filter.UnaryExpression; /** * JMS Selector Parser generated by JavaCC * * Do not edit this .java file directly - it is autogenerated from SelectorParser.jj */ public class SelectorParser<E> implements SelectorParserConstants { private PropertyExpressionFactory<E> _factory; public SelectorParser() { this(new StringReader("")); } public void setPropertyExpressionFactory(PropertyExpressionFactory<E> factory) { _factory = factory; } public BooleanExpression<E> parse(String sql) throws ParseException { this.ReInit(new StringReader(sql)); return this.JmsSelector(); } private BooleanExpression<E> asBooleanExpression(Expression<E> value) throws ParseException { if (value instanceof BooleanExpression) { return (BooleanExpression<E>) value; } if (value instanceof PropertyExpression) { return UnaryExpression.createBooleanCast( (Expression<E>) value ); } throw new ParseException("Expression will not result in a boolean value: " + value); } // ---------------------------------------------------------------------------- // Grammer // ---------------------------------------------------------------------------- final public BooleanExpression<E> JmsSelector() throws ParseException {Expression<E> left=null; left = orExpression(); jj_consume_token(0); {if ("" != null) return asBooleanExpression(left);} throw new Error("Missing return statement in function"); } final public Expression<E> orExpression() throws ParseException {Expression<E> left; Expression<E> right; left = andExpression(); label_1: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case OR:{ ; break; } default: break label_1; } jj_consume_token(OR); right = andExpression(); left = LogicExpression.createOR(asBooleanExpression(left), asBooleanExpression(right)); } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> andExpression() throws ParseException {Expression<E> left; Expression<E> right; left = equalityExpression(); label_2: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case AND:{ ; break; } default: break label_2; } jj_consume_token(AND); right = equalityExpression(); left = LogicExpression.createAND(asBooleanExpression(left), asBooleanExpression(right)); } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> equalityExpression() throws ParseException {Expression<E> left; Expression<E> right; left = comparisonExpression(); label_3: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case IS: case 27: case 28:{ ; break; } default: break label_3; } if (jj_2_1(2)) { jj_consume_token(27); right = comparisonExpression(); left = ComparisonExpression.createEqual(left, right); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 28:{ jj_consume_token(28); right = comparisonExpression(); left = ComparisonExpression.createNotEqual(left, right); break; } default: if (jj_2_2(2)) { jj_consume_token(IS); jj_consume_token(NULL); left = ComparisonExpression.createIsNull(left); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case IS:{ jj_consume_token(IS); jj_consume_token(NOT); jj_consume_token(NULL); left = ComparisonExpression.createIsNotNull(left); break; } default: jj_consume_token(-1); throw new ParseException(); } } } } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> comparisonExpression() throws ParseException {Expression<E> left; Expression<E> right; Expression<E> low; Expression<E> high; String t, u; boolean not; ArrayList<String> list; left = addExpression(); label_4: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case NOT: case BETWEEN: case LIKE: case IN: case 29: case 30: case 31: case 32:{ ; break; } default: break label_4; } if (jj_2_3(2)) { jj_consume_token(29); right = addExpression(); left = ComparisonExpression.createGreaterThan(left, right); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 30:{ jj_consume_token(30); right = addExpression(); left = ComparisonExpression.createGreaterThanEqual(left, right); break; } case 31:{ jj_consume_token(31); right = addExpression(); left = ComparisonExpression.createLessThan(left, right); break; } case 32:{ jj_consume_token(32); right = addExpression(); left = ComparisonExpression.createLessThanEqual(left, right); break; } case LIKE:{ u=null; jj_consume_token(LIKE); t = stringLiteral(); switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ESCAPE:{ jj_consume_token(ESCAPE); u = stringLiteral(); break; } default: ; } left = ComparisonExpression.createLike(left, t, u); break; } default: if (jj_2_4(2)) { u=null; jj_consume_token(NOT); jj_consume_token(LIKE); t = stringLiteral(); switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ESCAPE:{ jj_consume_token(ESCAPE); u = stringLiteral(); break; } default: ; } left = ComparisonExpression.createNotLike(left, t, u); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case BETWEEN:{ jj_consume_token(BETWEEN); low = addExpression(); jj_consume_token(AND); high = addExpression(); left = ComparisonExpression.createBetween(left, low, high); break; } default: if (jj_2_5(2)) { jj_consume_token(NOT); jj_consume_token(BETWEEN); low = addExpression(); jj_consume_token(AND); high = addExpression(); left = ComparisonExpression.createNotBetween(left, low, high); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case IN:{ jj_consume_token(IN); jj_consume_token(33); t = stringLiteral(); list = new ArrayList<>(); list.add( t ); label_5: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 34:{ ; break; } default: break label_5; } jj_consume_token(34); t = stringLiteral(); list.add( t ); } jj_consume_token(35); left = ComparisonExpression.createInFilter(left, list, false ); break; } default: if (jj_2_6(2)) { jj_consume_token(NOT); jj_consume_token(IN); jj_consume_token(33); t = stringLiteral(); list = new ArrayList<>(); list.add( t ); label_6: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 34:{ ; break; } default: break label_6; } jj_consume_token(34); t = stringLiteral(); list.add( t ); } jj_consume_token(35); left = ComparisonExpression.createNotInFilter(left, list, false); } else { jj_consume_token(-1); throw new ParseException(); } } } } } } } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> addExpression() throws ParseException {Expression<E> left; Expression<E> right; left = multExpr(); label_7: while (true) { if (jj_2_7(2147483647)) { ; } else { break label_7; } switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 36:{ jj_consume_token(36); right = multExpr(); left = ArithmeticExpression.createPlus(left, right); break; } case 37:{ jj_consume_token(37); right = multExpr(); left = ArithmeticExpression.createMinus(left, right); break; } default: jj_consume_token(-1); throw new ParseException(); } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> multExpr() throws ParseException {Expression<E> left; Expression<E> right; left = unaryExpr(); label_8: while (true) { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 38: case 39: case 40:{ ; break; } default: break label_8; } if (jj_2_8(2)) { jj_consume_token(38); right = unaryExpr(); left = ArithmeticExpression.createMultiply(left, right); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 39:{ jj_consume_token(39); right = unaryExpr(); left = ArithmeticExpression.createDivide(left, right); break; } case 40:{ jj_consume_token(40); right = unaryExpr(); left = ArithmeticExpression.createMod(left, right); break; } default: jj_consume_token(-1); throw new ParseException(); } } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> unaryExpr() throws ParseException {String s=null; Expression<E> left=null; if (jj_2_9(2147483647)) { jj_consume_token(36); left = unaryExpr(); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case 37:{ jj_consume_token(37); left = unaryExpr(); left = UnaryExpression.createNegate(left); break; } default: if (jj_2_10(2147483647)) { jj_consume_token(NOT); left = equalityExpression(); left = UnaryExpression.createNOT( asBooleanExpression(left) ); } else if (jj_2_11(2147483647)) { jj_consume_token(NOT); left = unaryExpr(); left = UnaryExpression.createNOT( asBooleanExpression(left) ); } else { switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case TRUE: case FALSE: case NULL: case DECIMAL_LITERAL: case HEX_LITERAL: case OCTAL_LITERAL: case FLOATING_POINT_LITERAL: case STRING_LITERAL: case ID: case QUOTED_ID: case 33:{ left = primaryExpr(); break; } default: jj_consume_token(-1); throw new ParseException(); } } } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public Expression<E> primaryExpr() throws ParseException {Expression<E> left=null; switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case TRUE: case FALSE: case NULL: case DECIMAL_LITERAL: case HEX_LITERAL: case OCTAL_LITERAL: case FLOATING_POINT_LITERAL: case STRING_LITERAL:{ left = literal(); break; } case ID: case QUOTED_ID:{ left = variable(); break; } default: if (jj_2_12(2)) { jj_consume_token(33); left = orExpression(); jj_consume_token(35); } else { jj_consume_token(-1); throw new ParseException(); } } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public ConstantExpression<E> literal() throws ParseException {Token t; String s; ConstantExpression<E> left=null; switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case STRING_LITERAL:{ s = stringLiteral(); left = new ConstantExpression<>(s); break; } case DECIMAL_LITERAL:{ t = jj_consume_token(DECIMAL_LITERAL); left = ConstantExpression.createFromDecimal(t.image); break; } case HEX_LITERAL:{ t = jj_consume_token(HEX_LITERAL); left = ConstantExpression.createFromHex(t.image); break; } case OCTAL_LITERAL:{ t = jj_consume_token(OCTAL_LITERAL); left = ConstantExpression.createFromOctal(t.image); break; } case FLOATING_POINT_LITERAL:{ t = jj_consume_token(FLOATING_POINT_LITERAL); left = ConstantExpression.createFloat(t.image); break; } case TRUE:{ jj_consume_token(TRUE); left = ConstantExpression.TRUE; break; } case FALSE:{ jj_consume_token(FALSE); left = ConstantExpression.FALSE; break; } case NULL:{ jj_consume_token(NULL); left = ConstantExpression.NULL; break; } default: jj_consume_token(-1); throw new ParseException(); } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } final public String stringLiteral() throws ParseException {Token t; StringBuffer rc = new StringBuffer(); boolean first=true; t = jj_consume_token(STRING_LITERAL); // Decode the sting value. String image = t.image; for( int i=1; i < image.length()-1; i++ ) { char c = image.charAt(i); if( c == (char) 0x27 )//single quote { i++; } rc.append(c); } {if ("" != null) return rc.toString();} throw new Error("Missing return statement in function"); } final public PropertyExpression<E> variable() throws ParseException {Token t; StringBuffer rc = new StringBuffer(); PropertyExpression<E> left=null; switch ((jj_ntk==-1)?jj_ntk_f():jj_ntk) { case ID:{ t = jj_consume_token(ID); left = _factory.createPropertyExpression(t.image); break; } case QUOTED_ID:{ t = jj_consume_token(QUOTED_ID); // Decode the sting value. String image = t.image; for( int i=1; i < image.length()-1; i++ ) { char c = image.charAt(i); if( c == '"' ) { i++; } rc.append(c); } {if ("" != null) return _factory.createPropertyExpression(rc.toString());} break; } default: jj_consume_token(-1); throw new ParseException(); } {if ("" != null) return left;} throw new Error("Missing return statement in function"); } private boolean jj_2_1(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_1()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_2(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_2()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_3(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_3()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_4(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_4()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_5(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_5()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_6(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_6()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_7(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_7()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_8(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_8()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_9(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_9()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_10(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_10()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_11(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_11()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_2_12(int xla) { jj_la = xla; jj_lastpos = jj_scanpos = token; try { return (!jj_3_12()); } catch(LookaheadSuccess ls) { return true; } } private boolean jj_3R_literal_468_5_38() { Token xsp; xsp = jj_scanpos; if (jj_3R_literal_469_9_47()) { jj_scanpos = xsp; if (jj_3R_literal_476_9_48()) { jj_scanpos = xsp; if (jj_3R_literal_483_9_49()) { jj_scanpos = xsp; if (jj_3R_literal_490_9_50()) { jj_scanpos = xsp; if (jj_3R_literal_497_9_51()) { jj_scanpos = xsp; if (jj_3R_literal_504_9_52()) { jj_scanpos = xsp; if (jj_3R_literal_511_9_53()) { jj_scanpos = xsp; if (jj_3R_literal_518_9_54()) return true; } } } } } } } return false; } private boolean jj_3R_comparisonExpression_277_5_9() { if (jj_3R_addExpression_365_5_10()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_comparisonExpression_279_9_26()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3R_variable_560_5_39() { Token xsp; xsp = jj_scanpos; if (jj_3R_variable_561_9_55()) { jj_scanpos = xsp; if (jj_3R_variable_566_9_56()) return true; } return false; } private boolean jj_3R_addExpression_365_5_10() { if (jj_3R_multExpr_391_5_11()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_addExpression_367_13_31()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3_12() { if (jj_scan_token(33)) return true; if (jj_3R_orExpression_203_5_14()) return true; if (jj_scan_token(35)) return true; return false; } private boolean jj_3R_primaryExpr_452_9_30() { if (jj_3R_variable_560_5_39()) return true; return false; } private boolean jj_3R_primaryExpr_450_9_29() { if (jj_3R_literal_468_5_38()) return true; return false; } private boolean jj_3R_equalityExpression_256_9_28() { if (jj_scan_token(IS)) return true; if (jj_scan_token(NOT)) return true; if (jj_scan_token(NULL)) return true; return false; } private boolean jj_3_11() { if (jj_scan_token(NOT)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_primaryExpr_449_5_25() { Token xsp; xsp = jj_scanpos; if (jj_3R_primaryExpr_450_9_29()) { jj_scanpos = xsp; if (jj_3R_primaryExpr_452_9_30()) { jj_scanpos = xsp; if (jj_3_12()) return true; } } return false; } private boolean jj_3R_comparisonExpression_345_11_46() { if (jj_scan_token(34)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; return false; } private boolean jj_3R_comparisonExpression_311_44_44() { if (jj_scan_token(ESCAPE)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; return false; } private boolean jj_3_2() { if (jj_scan_token(IS)) return true; if (jj_scan_token(NULL)) return true; return false; } private boolean jj_3R_unaryExpr_437_13_20() { if (jj_3R_primaryExpr_449_5_25()) return true; return false; } private boolean jj_3R_stringLiteral_537_5_42() { if (jj_scan_token(STRING_LITERAL)) return true; return false; } private boolean jj_3_10() { if (jj_scan_token(NOT)) return true; if (jj_3R_equalityExpression_239_5_13()) return true; return false; } private boolean jj_3R_equalityExpression_246_9_27() { if (jj_scan_token(28)) return true; if (jj_3R_comparisonExpression_277_5_9()) return true; return false; } private boolean jj_3R_unaryExpr_432_13_19() { if (jj_scan_token(NOT)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3_6() { if (jj_scan_token(NOT)) return true; if (jj_scan_token(IN)) return true; if (jj_scan_token(33)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_comparisonExpression_345_11_46()) { jj_scanpos = xsp; break; } } if (jj_scan_token(35)) return true; return false; } private boolean jj_3_9() { if (jj_scan_token(36)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3_1() { if (jj_scan_token(27)) return true; if (jj_3R_comparisonExpression_277_5_9()) return true; return false; } private boolean jj_3R_equalityExpression_241_9_21() { Token xsp; xsp = jj_scanpos; if (jj_3_1()) { jj_scanpos = xsp; if (jj_3R_equalityExpression_246_9_27()) { jj_scanpos = xsp; if (jj_3_2()) { jj_scanpos = xsp; if (jj_3R_equalityExpression_256_9_28()) return true; } } } return false; } private boolean jj_3R_comparisonExpression_331_11_45() { if (jj_scan_token(34)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; return false; } private boolean jj_3R_unaryExpr_427_9_18() { if (jj_scan_token(NOT)) return true; if (jj_3R_equalityExpression_239_5_13()) return true; return false; } private boolean jj_3R_comparisonExpression_302_38_43() { if (jj_scan_token(ESCAPE)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; return false; } private boolean jj_3R_unaryExpr_422_13_17() { if (jj_scan_token(37)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_equalityExpression_239_5_13() { if (jj_3R_comparisonExpression_277_5_9()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_equalityExpression_241_9_21()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3R_unaryExpr_420_13_16() { if (jj_scan_token(36)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_literal_518_9_54() { if (jj_scan_token(NULL)) return true; return false; } private boolean jj_3R_comparisonExpression_326_9_37() { if (jj_scan_token(IN)) return true; if (jj_scan_token(33)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_comparisonExpression_331_11_45()) { jj_scanpos = xsp; break; } } if (jj_scan_token(35)) return true; return false; } private boolean jj_3R_unaryExpr_419_9_12() { Token xsp; xsp = jj_scanpos; if (jj_3R_unaryExpr_420_13_16()) { jj_scanpos = xsp; if (jj_3R_unaryExpr_422_13_17()) { jj_scanpos = xsp; if (jj_3R_unaryExpr_427_9_18()) { jj_scanpos = xsp; if (jj_3R_unaryExpr_432_13_19()) { jj_scanpos = xsp; if (jj_3R_unaryExpr_437_13_20()) return true; } } } } return false; } private boolean jj_3R_literal_511_9_53() { if (jj_scan_token(FALSE)) return true; return false; } private boolean jj_3_5() { if (jj_scan_token(NOT)) return true; if (jj_scan_token(BETWEEN)) return true; if (jj_3R_addExpression_365_5_10()) return true; if (jj_scan_token(AND)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_andExpression_223_9_58() { if (jj_scan_token(AND)) return true; if (jj_3R_equalityExpression_239_5_13()) return true; return false; } private boolean jj_3R_comparisonExpression_316_9_36() { if (jj_scan_token(BETWEEN)) return true; if (jj_3R_addExpression_365_5_10()) return true; if (jj_scan_token(AND)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_literal_504_9_52() { if (jj_scan_token(TRUE)) return true; return false; } private boolean jj_3R_andExpression_221_5_22() { if (jj_3R_equalityExpression_239_5_13()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_andExpression_223_9_58()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3R_multExpr_403_9_24() { if (jj_scan_token(40)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_literal_497_9_51() { if (jj_scan_token(FLOATING_POINT_LITERAL)) return true; return false; } private boolean jj_3_4() { if (jj_scan_token(NOT)) return true; if (jj_scan_token(LIKE)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; Token xsp; xsp = jj_scanpos; if (jj_3R_comparisonExpression_311_44_44()) jj_scanpos = xsp; return false; } private boolean jj_3R_multExpr_398_9_23() { if (jj_scan_token(39)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_orExpression_205_9_57() { if (jj_scan_token(OR)) return true; if (jj_3R_andExpression_221_5_22()) return true; return false; } private boolean jj_3R_literal_490_9_50() { if (jj_scan_token(OCTAL_LITERAL)) return true; return false; } private boolean jj_3R_comparisonExpression_299_9_35() { if (jj_scan_token(LIKE)) return true; if (jj_3R_stringLiteral_537_5_42()) return true; Token xsp; xsp = jj_scanpos; if (jj_3R_comparisonExpression_302_38_43()) jj_scanpos = xsp; return false; } private boolean jj_3_8() { if (jj_scan_token(38)) return true; if (jj_3R_unaryExpr_419_9_12()) return true; return false; } private boolean jj_3R_multExpr_393_9_15() { Token xsp; xsp = jj_scanpos; if (jj_3_8()) { jj_scanpos = xsp; if (jj_3R_multExpr_398_9_23()) { jj_scanpos = xsp; if (jj_3R_multExpr_403_9_24()) return true; } } return false; } private boolean jj_3R_comparisonExpression_294_9_34() { if (jj_scan_token(32)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_orExpression_203_5_14() { if (jj_3R_andExpression_221_5_22()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_orExpression_205_9_57()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3R_literal_483_9_49() { if (jj_scan_token(HEX_LITERAL)) return true; return false; } private boolean jj_3R_multExpr_391_5_11() { if (jj_3R_unaryExpr_419_9_12()) return true; Token xsp; while (true) { xsp = jj_scanpos; if (jj_3R_multExpr_393_9_15()) { jj_scanpos = xsp; break; } } return false; } private boolean jj_3R_comparisonExpression_289_9_33() { if (jj_scan_token(31)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_addExpression_374_17_41() { if (jj_scan_token(37)) return true; if (jj_3R_multExpr_391_5_11()) return true; return false; } private boolean jj_3_7() { Token xsp; xsp = jj_scanpos; if (jj_scan_token(36)) { jj_scanpos = xsp; if (jj_scan_token(37)) return true; } if (jj_3R_multExpr_391_5_11()) return true; return false; } private boolean jj_3R_literal_476_9_48() { if (jj_scan_token(DECIMAL_LITERAL)) return true; return false; } private boolean jj_3R_comparisonExpression_284_9_32() { if (jj_scan_token(30)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_addExpression_369_17_40() { if (jj_scan_token(36)) return true; if (jj_3R_multExpr_391_5_11()) return true; return false; } private boolean jj_3R_variable_566_9_56() { if (jj_scan_token(QUOTED_ID)) return true; return false; } private boolean jj_3R_literal_469_9_47() { if (jj_3R_stringLiteral_537_5_42()) return true; return false; } private boolean jj_3_3() { if (jj_scan_token(29)) return true; if (jj_3R_addExpression_365_5_10()) return true; return false; } private boolean jj_3R_comparisonExpression_279_9_26() { Token xsp; xsp = jj_scanpos; if (jj_3_3()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_284_9_32()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_289_9_33()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_294_9_34()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_299_9_35()) { jj_scanpos = xsp; if (jj_3_4()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_316_9_36()) { jj_scanpos = xsp; if (jj_3_5()) { jj_scanpos = xsp; if (jj_3R_comparisonExpression_326_9_37()) { jj_scanpos = xsp; if (jj_3_6()) return true; } } } } } } } } } return false; } private boolean jj_3R_variable_561_9_55() { if (jj_scan_token(ID)) return true; return false; } private boolean jj_3R_addExpression_367_13_31() { Token xsp; xsp = jj_scanpos; if (jj_3R_addExpression_369_17_40()) { jj_scanpos = xsp; if (jj_3R_addExpression_374_17_41()) return true; } return false; } /** Generated Token Manager. */ public SelectorParserTokenManager token_source; SimpleCharStream jj_input_stream; /** Current token. */ public Token token; /** Next token. */ public Token jj_nt; private int jj_ntk; private Token jj_scanpos, jj_lastpos; private int jj_la; /** Constructor with InputStream. */ public SelectorParser(java.io.InputStream stream) { this(stream, null); } /** Constructor with InputStream and supplied encoding */ public SelectorParser(java.io.InputStream stream, String encoding) { try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source = new SelectorParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; } /** Reinitialise. */ public void ReInit(java.io.InputStream stream) { ReInit(stream, null); } /** Reinitialise. */ public void ReInit(java.io.InputStream stream, String encoding) { try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; } /** Constructor. */ public SelectorParser(java.io.Reader stream) { jj_input_stream = new SimpleCharStream(stream, 1, 1); token_source = new SelectorParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; } /** Reinitialise. */ public void ReInit(java.io.Reader stream) { if (jj_input_stream == null) { jj_input_stream = new SimpleCharStream(stream, 1, 1); } else { jj_input_stream.ReInit(stream, 1, 1); } if (token_source == null) { token_source = new SelectorParserTokenManager(jj_input_stream); } token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; } /** Constructor with generated Token Manager. */ public SelectorParser(SelectorParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; } /** Reinitialise. */ public void ReInit(SelectorParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; } private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { return token; } token = oldToken; throw generateParseException(); } @SuppressWarnings("serial") static private final class LookaheadSuccess extends Error { @Override public Throwable fillInStackTrace() { return this; } } static private final LookaheadSuccess jj_ls = new LookaheadSuccess(); private boolean jj_scan_token(int kind) { if (jj_scanpos == jj_lastpos) { jj_la--; if (jj_scanpos.next == null) { jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken(); } else { jj_lastpos = jj_scanpos = jj_scanpos.next; } } else { jj_scanpos = jj_scanpos.next; } if (jj_scanpos.kind != kind) return true; if (jj_la == 0 && jj_scanpos == jj_lastpos) throw jj_ls; return false; } /** Get the next Token. */ final public Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; return token; } /** Get the specific Token. */ final public Token getToken(int index) { Token t = token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; } private int jj_ntk_f() { if ((jj_nt=token.next) == null) return (jj_ntk = (token.next=token_source.getNextToken()).kind); else return (jj_ntk = jj_nt.kind); } /** Generate ParseException. */ public ParseException generateParseException() { Token errortok = token.next; int line = errortok.beginLine, column = errortok.beginColumn; String mess = (errortok.kind == 0) ? tokenImage[0] : errortok.image; return new ParseException("Parse error at line " + line + ", column " + column + ". Encountered: " + mess); } private boolean trace_enabled; /** Trace enabled. */ final public boolean trace_enabled() { return trace_enabled; } /** Enable tracing. */ final public void enable_tracing() { } /** Disable tracing. */ final public void disable_tracing() { } }
google/guice
36,988
core/test/com/google/inject/spi/ProviderMethodsTest.java
/* * Copyright (C) 2007 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.inject.spi; import static com.google.common.truth.Truth.assertThat; import static com.google.inject.Asserts.assertContains; import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.junit.Assume.assumeTrue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.AbstractModule; import com.google.inject.Binder; import com.google.inject.Binding; import com.google.inject.BindingAnnotation; import com.google.inject.CreationException; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import com.google.inject.Provider; import com.google.inject.Provides; import com.google.inject.ProvisionException; import com.google.inject.Singleton; import com.google.inject.Stage; import com.google.inject.TypeLiteral; import com.google.inject.internal.Errors; import com.google.inject.internal.InternalFlags; import com.google.inject.internal.InternalFlags.CustomClassLoadingOption; import com.google.inject.internal.ProviderMethod; import com.google.inject.internal.ProviderMethodsModule; import com.google.inject.name.Named; import com.google.inject.name.Names; import com.google.inject.util.Providers; import com.google.inject.util.Types; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Handler; import java.util.logging.LogRecord; import java.util.logging.Logger; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** @author crazybob@google.com (Bob Lee) */ @SuppressWarnings("ProvidesMethodOutsideOfModule") @RunWith(JUnit4.class) public class ProviderMethodsTest implements Module { @SuppressWarnings("unchecked") @Test public void testProviderMethods() { Injector injector = Guice.createInjector(this); Bob bob = injector.getInstance(Bob.class); assertEquals("A Bob", bob.getName()); Bob clone = injector.getInstance(Bob.class); assertEquals("A Bob", clone.getName()); assertNotSame(bob, clone); assertSame(bob.getDaughter(), clone.getDaughter()); Key<Bob> soleBobKey = Key.get(Bob.class, Sole.class); assertSame(injector.getInstance(soleBobKey), injector.getInstance(soleBobKey)); } @Override public void configure(Binder binder) {} interface Bob { String getName(); Dagny getDaughter(); } interface Dagny { int getAge(); } @Provides Bob provideBob(final Dagny dagny) { return new Bob() { @Override public String getName() { return "A Bob"; } @Override public Dagny getDaughter() { return dagny; } }; } @Provides @Singleton @Sole Bob provideSoleBob(final Dagny dagny) { return new Bob() { @Override public String getName() { return "Only Bob"; } @Override public Dagny getDaughter() { return dagny; } }; } @Provides @Singleton Dagny provideDagny() { return new Dagny() { @Override public int getAge() { return 1; } }; } @Retention(RUNTIME) @Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD}) @BindingAnnotation @interface Sole {} @Test public void testCircularDependency() { Injector injector = Guice.createInjector( new AbstractModule() { @Provides Foo newFoo(final Bar bar) { return new Foo() { @Override public Bar getBar() { return bar; } @Override public int getI() { return 5; } }; } @Provides Bar newBar(final Foo foo) { return new Bar() { @Override public Foo getFoo() { return foo; } @Override public int getI() { return 10; } }; } }); Foo foo = injector.getInstance(Foo.class); assertEquals(5, foo.getI()); assertEquals(10, foo.getBar().getI()); assertEquals(5, foo.getBar().getFoo().getI()); } public interface Foo { Bar getBar(); int getI(); } public interface Bar { Foo getFoo(); int getI(); } @Test public void testMultipleBindingAnnotations() { try { Guice.createInjector( new AbstractModule() { @Provides @Named("A") @Blue public String provideString() { return "a"; } }); fail(); } catch (CreationException expected) { assertContains( expected.getMessage(), "more than one annotation annotated with @BindingAnnotation:", "Named", "Blue", "at ProviderMethodsTest$5.provideString(ProviderMethodsTest.java:"); } } @Retention(RUNTIME) @BindingAnnotation @interface Blue {} @Test public void testGenericProviderMethods() { Injector injector = Guice.createInjector(new ProvideTs<String>("A", "B") {}, new ProvideTs<Integer>(1, 2) {}); assertEquals("A", injector.getInstance(Key.get(String.class, Names.named("First")))); assertEquals("B", injector.getInstance(Key.get(String.class, Names.named("Second")))); assertEquals( ImmutableSet.of("A", "B"), injector.getInstance(Key.get(Types.setOf(String.class)))); assertEquals(1, injector.getInstance(Key.get(Integer.class, Names.named("First"))).intValue()); assertEquals(2, injector.getInstance(Key.get(Integer.class, Names.named("Second"))).intValue()); assertEquals(ImmutableSet.of(1, 2), injector.getInstance(Key.get(Types.setOf(Integer.class)))); } abstract class ProvideTs<T> extends AbstractModule { final T first; final T second; protected ProvideTs(T first, T second) { this.first = first; this.second = second; } @Named("First") @Provides T provideFirst() { return first; } @Named("Second") @Provides T provideSecond() { return second; } @Provides Set<T> provideBoth(@Named("First") T first, @Named("Second") T second) { return ImmutableSet.of(first, second); } } @Test public void testAutomaticProviderMethods() { Injector injector = Guice.createInjector( (Module) new AbstractModule() { private int next = 1; @Provides @Named("count") public Integer provideCount() { return next++; } }); assertEquals(1, injector.getInstance(Key.get(Integer.class, Names.named("count"))).intValue()); assertEquals(2, injector.getInstance(Key.get(Integer.class, Names.named("count"))).intValue()); assertEquals(3, injector.getInstance(Key.get(Integer.class, Names.named("count"))).intValue()); } /** * If the user installs provider methods for the module manually, that shouldn't cause a double * binding of the provider methods' types. */ @Test public void testAutomaticProviderMethodsDoNotCauseDoubleBinding() { Module installsSelf = new AbstractModule() { @Override protected void configure() { install(this); bind(Integer.class).toInstance(5); } @Provides public String provideString(Integer count) { return "A" + count; } }; Injector injector = Guice.createInjector(installsSelf); assertEquals("A5", injector.getInstance(String.class)); } @Test public void testWildcardProviderMethods() { final List<String> strings = ImmutableList.of("A", "B", "C"); final List<Number> numbers = ImmutableList.<Number>of(1, 2, 3); Injector injector = Guice.createInjector( new AbstractModule() { @Override protected void configure() { @SuppressWarnings("unchecked") Key<List<? super Integer>> listOfSupertypesOfInteger = (Key<List<? super Integer>>) Key.get(Types.listOf(Types.supertypeOf(Integer.class))); bind(listOfSupertypesOfInteger).toInstance(numbers); } @Provides public List<? extends CharSequence> provideCharSequences() { return strings; } @Provides public Class<?> provideType() { return Float.class; } }); assertSame(strings, injector.getInstance(HasWildcardInjection.class).charSequences); assertSame(numbers, injector.getInstance(HasWildcardInjection.class).numbers); assertSame(Float.class, injector.getInstance(HasWildcardInjection.class).type); } static class HasWildcardInjection { @Inject List<? extends CharSequence> charSequences; @Inject List<? super Integer> numbers; @Inject Class<?> type; } @Test public void testProviderMethodDependenciesAreExposed() throws Exception { Module module = new AbstractModule() { @Override protected void configure() { bind(Integer.class).toInstance(50); bindConstant().annotatedWith(Names.named("units")).to("Kg"); } @Provides @Named("weight") String provideWeight(Integer count, @Named("units") String units) { return count + units; } }; Injector injector = Guice.createInjector(module); ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) injector.getBinding(Key.get(String.class, Names.named("weight"))); Method method = module.getClass().getDeclaredMethod("provideWeight", Integer.class, String.class); InjectionPoint point = new InjectionPoint(TypeLiteral.get(module.getClass()), method, false); assertEquals( ImmutableSet.<Dependency<?>>of( new Dependency<Integer>(point, Key.get(Integer.class), false, 0), new Dependency<String>(point, Key.get(String.class, Names.named("units")), false, 1)), binding.getDependencies()); } @Test public void testNonModuleProviderMethods() { final Object methodsObject = new Object() { @Provides @Named("foo") String provideFoo() { return "foo-value"; } }; Module module = new AbstractModule() { @Override protected void configure() { install(ProviderMethodsModule.forObject(methodsObject)); } }; Injector injector = Guice.createInjector(module); Key<String> key = Key.get(String.class, Names.named("foo")); assertEquals("foo-value", injector.getInstance(key)); // Test the provider method object itself. This makes sure getInstance works, since GIN uses it List<Element> elements = Elements.getElements(module); assertEquals(1, elements.size()); Element element = elements.get(0); assertTrue( element + " instanceof ProviderInstanceBinding", element instanceof ProviderInstanceBinding); ProviderInstanceBinding<?> binding = (ProviderInstanceBinding<?>) element; jakarta.inject.Provider<?> provider = binding.getUserSuppliedProvider(); assertTrue(provider instanceof ProviderMethod); assertEquals(methodsObject, ((ProviderMethod) provider).getInstance()); assertSame(provider, binding.getProviderInstance()); } @Test public void testVoidProviderMethods() { try { Guice.createInjector( new AbstractModule() { @Provides void provideFoo() {} }); fail(); } catch (CreationException expected) { assertContains( expected.getMessage(), "Provider methods must return a value. Do not return void.", "at ProviderMethodsTest$14.provideFoo(ProviderMethodsTest.java:"); } } @Test public void testInjectsJustOneLogger() { AtomicReference<Logger> loggerRef = new AtomicReference<>(); Injector injector = Guice.createInjector(new FooModule(loggerRef)); assertNull(loggerRef.get()); injector.getInstance(Integer.class); Logger lastLogger = loggerRef.getAndSet(null); assertNotNull(lastLogger); injector.getInstance(Integer.class); assertSame(lastLogger, loggerRef.get()); assertEquals(FooModule.class.getName(), lastLogger.getName()); } private static class FooModule extends AbstractModule { private final AtomicReference<Logger> loggerRef; public FooModule(AtomicReference<Logger> loggerRef) { this.loggerRef = loggerRef; } @SuppressWarnings("unused") @Provides Integer foo(Logger logger) { loggerRef.set(logger); return 42; } } @Test public void testSpi() throws Exception { Module m1 = new AbstractModule() { @Provides @Named("foo") String provideFoo(Integer dep) { return "foo"; } }; Module m2 = new AbstractModule() { @Provides Integer provideInt(@Named("foo") String dep) { return 42; } }; Injector injector = Guice.createInjector(m1, m2); Binding<String> stringBinding = injector.getBinding(Key.get(String.class, Names.named("foo"))); ProvidesMethodBinding<String> stringMethod = stringBinding.acceptTargetVisitor(new BindingCapturer<String>()); assertEquals(m1, stringMethod.getEnclosingInstance()); assertEquals( m1.getClass().getDeclaredMethod("provideFoo", Integer.class), stringMethod.getMethod()); assertEquals( ((HasDependencies) stringBinding).getDependencies(), stringMethod.getDependencies()); assertEquals(Key.get(String.class, Names.named("foo")), stringMethod.getKey()); Binding<Integer> intBinding = injector.getBinding(Integer.class); ProvidesMethodBinding<Integer> intMethod = intBinding.acceptTargetVisitor(new BindingCapturer<Integer>()); assertEquals(m2, intMethod.getEnclosingInstance()); assertEquals( m2.getClass().getDeclaredMethod("provideInt", String.class), intMethod.getMethod()); assertEquals(((HasDependencies) intBinding).getDependencies(), intMethod.getDependencies()); assertEquals(Key.get(Integer.class), intMethod.getKey()); } private static class BindingCapturer<T> extends DefaultBindingTargetVisitor<T, ProvidesMethodBinding<T>> implements ProvidesMethodTargetVisitor<T, ProvidesMethodBinding<T>> { @Override @SuppressWarnings("unchecked") public ProvidesMethodBinding<T> visit( ProvidesMethodBinding<? extends T> providesMethodBinding) { return (ProvidesMethodBinding<T>) providesMethodBinding; } @Override protected ProvidesMethodBinding<T> visitOther(Binding<? extends T> binding) { throw new IllegalStateException("unexpected visit of: " + binding); } } @Test public void testProvidesMethodVisibility() { Injector injector = Guice.createInjector(new VisibilityModule()); assertEquals(42, injector.getInstance(Integer.class).intValue()); assertEquals(42L, injector.getInstance(Long.class).longValue()); assertEquals(42D, injector.getInstance(Double.class).doubleValue(), 0.0); assertEquals(42F, injector.getInstance(Float.class).floatValue(), 0.0f); } private static class VisibilityModule extends AbstractModule { @SuppressWarnings("unused") @Provides Integer foo() { return 42; } @SuppressWarnings("unused") @Provides private Long bar() { return 42L; } @SuppressWarnings("unused") @Provides protected Double baz() { return 42D; } @SuppressWarnings("unused") @Provides public Float quux() { return 42F; } } @Test public void testProvidesMethodInheritenceHierarchy() { try { Guice.createInjector(new Sub1Module(), new Sub2Module()); fail("Expected injector creation failure"); } catch (CreationException expected) { // both of our super class bindings cause errors assertContains( expected.getMessage(), "Long was bound multiple times.", "Integer was bound multiple times."); } } @Test public void testProvidesMethodsDefinedInSuperClass() { Injector injector = Guice.createInjector(new Sub1Module()); assertEquals(42, injector.getInstance(Integer.class).intValue()); assertEquals(42L, injector.getInstance(Long.class).longValue()); assertEquals(42D, injector.getInstance(Double.class).doubleValue(), 0.0); } private static class BaseModule extends AbstractModule { @Provides Integer foo() { return 42; } @Provides Long bar() { return 42L; } } private static class Sub1Module extends BaseModule { @Provides Double baz() { return 42D; } } private static class Sub2Module extends BaseModule { @Provides Float quux() { return 42F; } } @Test public void testShareFastClass() { if (InternalFlags.getUseMethodHandlesOption()) { // This test is not relevant for method handles. return; } // FastClass is only used when bytecode generation is enabled and this test relies on package // access which CHILD loading doesn't have. assumeTrue( InternalFlags.isBytecodeGenEnabled() && InternalFlags.getCustomClassLoadingOption() != CustomClassLoadingOption.CHILD); CallerInspecterModule module = new CallerInspecterModule(); Guice.createInjector(Stage.PRODUCTION, module); assertEquals(module.fooCallerClass, module.barCallerClass); assertTrue(module.fooCallerClass.contains("$$FastClassByGuice$$")); } private static class CallerInspecterModule extends AbstractModule { // start them off as unequal String barCallerClass = "not_set_bar"; String fooCallerClass = "not_set_foo"; @Provides @Singleton Integer foo() { this.fooCallerClass = new Exception().getStackTrace()[1].getClassName(); return 42; } @Provides @Singleton Long bar() { this.barCallerClass = new Exception().getStackTrace()[1].getClassName(); return 42L; } } @Test public void testShareFastClassWithSuperClass() { if (InternalFlags.getUseMethodHandlesOption()) { return; } // FastClass is only used when bytecode generation is enabled and this test relies on package // access which CHILD loading doesn't have. assumeTrue( InternalFlags.isBytecodeGenEnabled() && InternalFlags.getCustomClassLoadingOption() != CustomClassLoadingOption.CHILD); CallerInspecterSubClassModule module = new CallerInspecterSubClassModule(); Guice.createInjector(Stage.PRODUCTION, module); assertEquals( "Expected provider methods in the same class to share fastclass classes", module.fooCallerClass, module.barCallerClass); assertFalse( "Did not expect provider methods in the subclasses to share fastclass classes " + "with their parent classes", module.bazCallerClass.equals(module.barCallerClass)); } private static class CallerInspecterSubClassModule extends CallerInspecterModule { String bazCallerClass; @Override protected void configure() {} @Provides @Singleton Double baz() { this.bazCallerClass = new Exception().getStackTrace()[1].getClassName(); return 42D; } } /*end[AOP]*/ static class SuperClassModule extends AbstractModule { @Provides Number providerMethod() { return 1D; } @Provides @Named("rawlist") @SuppressWarnings("rawtypes") // Testing rawtypes. List rawProvider(@Named("list") List<String> f) { return f; } @Provides @Named("unrawlist") @SuppressWarnings({"unchecked", "rawtypes"}) // Testing rawtypes List<String> rawParameterProvider(@Named("rawlist") List f) { return f; } @Provides @Named("list") List<String> annotatedGenericProviderMethod() { return new ArrayList<String>(); } @Provides @Named("collection") Collection<String> annotatedGenericParameterProviderMethod(@Named("list") List<String> foo) { return foo; } @Provides private String privateProviderMethod() { return "hello"; } } @Test public void testOverrideProviderMethod_overrideHasProvides() { class SubClassModule extends SuperClassModule { @Override @Provides Number providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$1SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_overrideHasProvides_withNewAnnotation() { class SubClassModule extends SuperClassModule { @Override @Provides @Named("foo") Number providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$2SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_overrideDoesntHaveProvides() { class SubClassModule extends SuperClassModule { @Override Number providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$3SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_overrideDoesntHaveProvides_withNewAnnotation() { class SubClassModule extends SuperClassModule { @Override @Named("foo") Number providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$4SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_covariantOverrideDoesntHaveProvides() { class SubClassModule extends SuperClassModule { @Override Double providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$5SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_covariantOverrideHasProvides() { class SubClassModule extends SuperClassModule { @Override @Provides Double providerMethod() { return 2D; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.providerMethod()", "overridden by: ProviderMethodsTest$6SubClassModule.providerMethod()"); } } @Test public void testOverrideProviderMethod_fakeOverridePrivateMethod() { class SubClassModule extends SuperClassModule { // not actually an override, just looks like it String privateProviderMethod() { return "sub"; } } assertEquals("hello", Guice.createInjector(new SubClassModule()).getInstance(String.class)); } @Test public void testOverrideProviderMethod_subclassRawTypes_returnType() { class SubClassModule extends SuperClassModule { @Override @SuppressWarnings({"unchecked", "rawtypes"}) // Testing rawtypes. List annotatedGenericProviderMethod() { return super.annotatedGenericProviderMethod(); } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.annotatedGenericProviderMethod()", "overridden by: ProviderMethodsTest$8SubClassModule.annotatedGenericProviderMethod()"); } } @Test public void testOverrideProviderMethod_subclassRawTypes_parameterType() { class SubClassModule extends SuperClassModule { @SuppressWarnings({"unchecked", "rawtypes"}) // Testing rawtypes @Override Collection<String> annotatedGenericParameterProviderMethod(List foo) { return super.annotatedGenericParameterProviderMethod(foo); } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: " + "ProviderMethodsTest$SuperClassModule" + ".annotatedGenericParameterProviderMethod()", "overridden by: " + "ProviderMethodsTest$9SubClassModule" + ".annotatedGenericParameterProviderMethod()"); } } @Test public void testOverrideProviderMethod_superclassRawTypes_returnType() { class SubClassModule extends SuperClassModule { // remove the rawtype from the override @Override List<String> rawProvider(List<String> f) { return f; } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$SuperClassModule.rawProvider()", "overridden by: ProviderMethodsTest$10SubClassModule.rawProvider()"); } } abstract static class GenericSuperModule<T> extends AbstractModule { @Provides String provide(T thing) { return thing.toString(); } } // This is a tricky case where signatures don't match, but it is an override (facilitated via a // bridge method) @Test public void testOverrideProviderMethod_erasureBasedOverrides() { class SubClassModule extends GenericSuperModule<Integer> { @Override String provide(Integer thing) { return thing.toString(); } @Override protected void configure() { bind(Integer.class).toInstance(3); } } try { Guice.createInjector(new SubClassModule()); fail(); } catch (CreationException e) { assertContains( e.getMessage(), "Overriding @Provides methods is not allowed.", "@Provides method: ProviderMethodsTest$GenericSuperModule.provide()", "overridden by: ProviderMethodsTest$11SubClassModule.provide()"); } } static class RestrictedSuper extends AbstractModule { @Provides public String provideFoo() { return "foo"; } } public static class ExposedSub extends RestrictedSuper {} @Test public void testOverrideProviderMethod_increasedVisibility() { // ensure we don't detect the synthetic provideFoo method in ExposedSub as an override (it is, // but since it is synthetic it would be annoying to throw an error on it). assertEquals("foo", Guice.createInjector(new ExposedSub()).getInstance(String.class)); } interface ProviderInterface<T> { T getT(); } static class ModuleImpl extends AbstractModule implements ProviderInterface<String> { @Override @Provides public String getT() { return "string"; } @Provides public Object getObject() { return new Object(); } /* javac will synthesize a bridge method for getT with the types erased, equivalent to: * @Provides public Object getT() { ... } */ } @Test public void testIgnoreSyntheticBridgeMethods() { Guice.createInjector(new ModuleImpl()); } @Test public void testScopedProviderMethodThrowsException() { Injector injector = Guice.createInjector( new AbstractModule() { @Provides @Singleton int provideInt() { throw new RuntimeException("boom"); } }); Provider<Integer> intProvider = injector.getProvider(Integer.class); try { intProvider.get(); fail(); } catch (ProvisionException pe) { // by default assertContains asserts that the last item doesn't repeat... which is the main // thing we are testing for assertContains(pe.getMessage(), "RuntimeException: boom", "provideInt"); } } @Test public void testNullability() throws Exception { Module module = new AbstractModule() { @Override protected void configure() { bind(String.class).toProvider(Providers.<String>of(null)); } @SuppressWarnings("unused") @Provides Integer fail(String foo) { return 1; } @SuppressWarnings("unused") @Provides Long succeed(@Nullable String foo) { return 2L; } }; Injector injector = Guice.createInjector(module); InjectionPoint fooPoint = InjectionPoint.forMethod( module.getClass().getDeclaredMethod("fail", String.class), TypeLiteral.get(module.getClass())); Dependency<?> fooDependency = Iterables.getOnlyElement(fooPoint.getDependencies()); runNullableTest(injector, fooDependency, module); injector.getInstance(Long.class); } @Test public void testModuleBindings() throws Exception { Module module = new AbstractModule() { @Provides Integer fail() { return 1; } }; // sanity check that the injector works Injector injector = Guice.createInjector(module); assertEquals(1, injector.getInstance(Integer.class).intValue()); ProviderInstanceBinding<Integer> injectorBinding = (ProviderInstanceBinding<Integer>) injector.getBinding(Integer.class); assertEquals(1, injectorBinding.getUserSuppliedProvider().get().intValue()); ProviderInstanceBinding<?> moduleBinding = (ProviderInstanceBinding<?>) Iterables.getOnlyElement(Elements.getElements(module)); try { moduleBinding.getUserSuppliedProvider().get(); fail(); } catch (IllegalStateException ise) { assertThat(ise) .hasMessageThat() .isEqualTo( "This Provider cannot be used until the Injector has been created and this binding" + " has been initialized."); } } static final class DeduplicateModule extends AbstractModule { @Provides String provideString() { return ""; } } @Test public void testDeduplicateProviderMethodsBindings_sameInstance() { Module module = new DeduplicateModule(); Guice.createInjector(Stage.PRODUCTION, module, module); } @Test public void testDeduplicateProviderMethodsBindings_differentInstances() { try { Guice.createInjector(Stage.PRODUCTION, new DeduplicateModule(), new DeduplicateModule()); fail(); } catch (CreationException expected) { assertContains(expected.getMessage(), "String was bound multiple times."); } } static final class DeduplicateStaticModule extends AbstractModule { @Provides static String provideString() { return ""; } } @Test public void testDeduplicateProviderMethodsBindings_sameInstance_staticMethod() { Module module = new DeduplicateStaticModule(); Guice.createInjector(Stage.PRODUCTION, module, module); } @Test public void testDeduplicateProviderMethodsBindings_differentInstances_staticMethod() { Guice.createInjector( Stage.PRODUCTION, new DeduplicateStaticModule(), new DeduplicateStaticModule()); } private void runNullableTest(Injector injector, Dependency<?> dependency, Module module) { switch (InternalFlags.getNullableProvidesOption()) { case ERROR: validateNullableFails(injector, module); break; case IGNORE: validateNullableIgnored(injector); break; case WARN: validateNullableWarns(injector, dependency); break; } } private void validateNullableFails(Injector injector, Module module) { try { injector.getInstance(Integer.class); fail(); } catch (ProvisionException expected) { String moduleName = module.getClass().getName().replace("com.google.inject.spi.", ""); assertContains( expected.getMessage(), "null returned by binding at " + moduleName + ".configure(", "but the 1st parameter foo of " + moduleName + ".fail(", "is not @Nullable", "for 1st parameter", "while locating Integer"); assertEquals(1, expected.getErrorMessages().size()); } } private void validateNullableIgnored(Injector injector) { injector.getInstance(Integer.class); // no exception } private void validateNullableWarns(Injector injector, Dependency<?> dependency) { final List<LogRecord> logRecords = Lists.newArrayList(); final Handler fakeHandler = new Handler() { @Override public void publish(LogRecord logRecord) { logRecords.add(logRecord); } @Override public void flush() {} @Override public void close() throws SecurityException {} }; Logger.getLogger(Guice.class.getName()).addHandler(fakeHandler); try { injector.getInstance(Integer.class); // no exception, but assert it does log. LogRecord record = Iterables.getOnlyElement(logRecords); assertEquals( "Guice injected null into {0} (a {1}), please mark it @Nullable." + " Use -Dguice_check_nullable_provides_params=ERROR to turn this into an" + " error.", record.getMessage()); assertEquals(Errors.convert(dependency.getKey()), record.getParameters()[1]); } finally { Logger.getLogger(Guice.class.getName()).removeHandler(fakeHandler); } } @Retention(RetentionPolicy.RUNTIME) @interface Nullable {} }
apache/hbase
37,494
hbase-server/src/main/java/org/apache/hadoop/hbase/master/replication/ReplicationPeerManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.master.replication; import com.google.errorprone.annotations.RestrictedApi; import java.io.IOException; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ClusterMetrics; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.ReplicationPeerNotFoundException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionRegistryFactory; import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil; import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv; import org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait; import org.apache.hadoop.hbase.procedure2.ProcedureExecutor; import org.apache.hadoop.hbase.replication.BaseReplicationEndpoint; import org.apache.hadoop.hbase.replication.HBaseReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationEndpoint; import org.apache.hadoop.hbase.replication.ReplicationException; import org.apache.hadoop.hbase.replication.ReplicationGroupOffset; import org.apache.hadoop.hbase.replication.ReplicationPeerConfig; import org.apache.hadoop.hbase.replication.ReplicationPeerConfigBuilder; import org.apache.hadoop.hbase.replication.ReplicationPeerDescription; import org.apache.hadoop.hbase.replication.ReplicationPeerStorage; import org.apache.hadoop.hbase.replication.ReplicationQueueData; import org.apache.hadoop.hbase.replication.ReplicationQueueId; import org.apache.hadoop.hbase.replication.ReplicationQueueStorage; import org.apache.hadoop.hbase.replication.ReplicationStorageFactory; import org.apache.hadoop.hbase.replication.ReplicationUtils; import org.apache.hadoop.hbase.replication.SyncReplicationState; import org.apache.hadoop.hbase.replication.ZKReplicationQueueStorageForMigration; import org.apache.hadoop.hbase.replication.ZKReplicationQueueStorageForMigration.MigrationIterator; import org.apache.hadoop.hbase.replication.ZKReplicationQueueStorageForMigration.ZkLastPushedSeqId; import org.apache.hadoop.hbase.replication.ZKReplicationQueueStorageForMigration.ZkReplicationQueueData; import org.apache.hadoop.hbase.util.FutureUtils; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.zookeeper.ZKClusterId; import org.apache.hadoop.hbase.zookeeper.ZKConfig; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hbase.thirdparty.com.google.common.collect.Maps; /** * Manages and performs all replication admin operations. * <p> * Used to add/remove a replication peer. * <p> * Implement {@link ConfigurationObserver} mainly for recreating {@link ReplicationPeerStorage}, for * supporting migrating across different replication peer storages without restarting master. */ @InterfaceAudience.Private public class ReplicationPeerManager implements ConfigurationObserver { private static final Logger LOG = LoggerFactory.getLogger(ReplicationPeerManager.class); private volatile ReplicationPeerStorage peerStorage; private final ReplicationQueueStorage queueStorage; private final ConcurrentMap<String, ReplicationPeerDescription> peers; private final ImmutableMap<SyncReplicationState, EnumSet<SyncReplicationState>> allowedTransition = Maps.immutableEnumMap(ImmutableMap.of(SyncReplicationState.ACTIVE, EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE, SyncReplicationState.STANDBY), SyncReplicationState.STANDBY, EnumSet.of(SyncReplicationState.DOWNGRADE_ACTIVE), SyncReplicationState.DOWNGRADE_ACTIVE, EnumSet.of(SyncReplicationState.STANDBY, SyncReplicationState.ACTIVE))); private final String clusterId; private volatile Configuration conf; // for dynamic recreating ReplicationPeerStorage. private final FileSystem fs; private final ZKWatcher zk; @FunctionalInterface interface ReplicationQueueStorageInitializer { void initialize() throws IOException; } private final ReplicationQueueStorageInitializer queueStorageInitializer; // we will mock this class in UT so leave the constructor as package private and not mark the // class as final, since mockito can not mock a final class ReplicationPeerManager(FileSystem fs, ZKWatcher zk, ReplicationPeerStorage peerStorage, ReplicationQueueStorage queueStorage, ConcurrentMap<String, ReplicationPeerDescription> peers, Configuration conf, String clusterId, ReplicationQueueStorageInitializer queueStorageInitializer) { this.fs = fs; this.zk = zk; this.peerStorage = peerStorage; this.queueStorage = queueStorage; this.peers = peers; this.conf = conf; this.clusterId = clusterId; this.queueStorageInitializer = queueStorageInitializer; } private void checkQueuesDeleted(String peerId) throws ReplicationException, DoNotRetryIOException { List<ReplicationQueueId> queueIds = queueStorage.listAllQueueIds(peerId); if (!queueIds.isEmpty()) { throw new DoNotRetryIOException("There are still " + queueIds.size() + " undeleted queue(s) for peerId: " + peerId + ", first is " + queueIds.get(0)); } if (queueStorage.getAllPeersFromHFileRefsQueue().contains(peerId)) { throw new DoNotRetryIOException("Undeleted queue for peer " + peerId + " in hfile-refs"); } } private void initializeQueueStorage() throws IOException { queueStorageInitializer.initialize(); } void preAddPeer(String peerId, ReplicationPeerConfig peerConfig) throws ReplicationException, IOException { if (peerId.contains("-")) { throw new DoNotRetryIOException("Found invalid peer name: " + peerId); } checkPeerConfig(peerConfig); if (peerConfig.isSyncReplication()) { checkSyncReplicationPeerConfigConflict(peerConfig); } if (peers.containsKey(peerId)) { throw new DoNotRetryIOException("Replication peer " + peerId + " already exists"); } // lazy create table initializeQueueStorage(); // make sure that there is no queues with the same peer id. This may happen when we create a // peer with the same id with a old deleted peer. If the replication queues for the old peer // have not been cleaned up yet then we should not create the new peer, otherwise the old wal // file may also be replicated. checkQueuesDeleted(peerId); } private ReplicationPeerDescription checkPeerExists(String peerId) throws DoNotRetryIOException { ReplicationPeerDescription desc = peers.get(peerId); if (desc == null) { throw new ReplicationPeerNotFoundException(peerId); } return desc; } private void checkPeerInDAStateIfSyncReplication(String peerId) throws DoNotRetryIOException { ReplicationPeerDescription desc = peers.get(peerId); if ( desc != null && desc.getPeerConfig().isSyncReplication() && !SyncReplicationState.DOWNGRADE_ACTIVE.equals(desc.getSyncReplicationState()) ) { throw new DoNotRetryIOException( "Couldn't remove synchronous replication peer with state=" + desc.getSyncReplicationState() + ", Transit the synchronous replication state to be DOWNGRADE_ACTIVE firstly."); } } ReplicationPeerConfig preRemovePeer(String peerId) throws DoNotRetryIOException { ReplicationPeerDescription pd = checkPeerExists(peerId); checkPeerInDAStateIfSyncReplication(peerId); return pd.getPeerConfig(); } void preEnablePeer(String peerId) throws DoNotRetryIOException { ReplicationPeerDescription desc = checkPeerExists(peerId); if (desc.isEnabled()) { throw new DoNotRetryIOException("Replication peer " + peerId + " has already been enabled"); } } void preDisablePeer(String peerId) throws DoNotRetryIOException { ReplicationPeerDescription desc = checkPeerExists(peerId); if (!desc.isEnabled()) { throw new DoNotRetryIOException("Replication peer " + peerId + " has already been disabled"); } } /** * Return the old peer description. Can never be null. */ ReplicationPeerDescription preUpdatePeerConfig(String peerId, ReplicationPeerConfig peerConfig) throws DoNotRetryIOException { checkPeerConfig(peerConfig); ReplicationPeerDescription desc = checkPeerExists(peerId); ReplicationPeerConfig oldPeerConfig = desc.getPeerConfig(); if (!isStringEquals(peerConfig.getClusterKey(), oldPeerConfig.getClusterKey())) { throw new DoNotRetryIOException( "Changing the cluster key on an existing peer is not allowed. Existing key '" + oldPeerConfig.getClusterKey() + "' for peer " + peerId + " does not match new key '" + peerConfig.getClusterKey() + "'"); } if ( !isStringEquals(peerConfig.getReplicationEndpointImpl(), oldPeerConfig.getReplicationEndpointImpl()) ) { throw new DoNotRetryIOException("Changing the replication endpoint implementation class " + "on an existing peer is not allowed. Existing class '" + oldPeerConfig.getReplicationEndpointImpl() + "' for peer " + peerId + " does not match new class '" + peerConfig.getReplicationEndpointImpl() + "'"); } if (!isStringEquals(peerConfig.getRemoteWALDir(), oldPeerConfig.getRemoteWALDir())) { throw new DoNotRetryIOException( "Changing the remote wal dir on an existing peer is not allowed. Existing remote wal " + "dir '" + oldPeerConfig.getRemoteWALDir() + "' for peer " + peerId + " does not match new remote wal dir '" + peerConfig.getRemoteWALDir() + "'"); } if (oldPeerConfig.isSyncReplication()) { if (!ReplicationUtils.isNamespacesAndTableCFsEqual(oldPeerConfig, peerConfig)) { throw new DoNotRetryIOException( "Changing the replicated namespace/table config on a synchronous replication " + "peer(peerId: " + peerId + ") is not allowed."); } } return desc; } /** Returns the old desciption of the peer */ ReplicationPeerDescription preTransitPeerSyncReplicationState(String peerId, SyncReplicationState state) throws DoNotRetryIOException { ReplicationPeerDescription desc = checkPeerExists(peerId); SyncReplicationState fromState = desc.getSyncReplicationState(); EnumSet<SyncReplicationState> allowedToStates = allowedTransition.get(fromState); if (allowedToStates == null || !allowedToStates.contains(state)) { throw new DoNotRetryIOException("Can not transit current cluster state from " + fromState + " to " + state + " for peer id=" + peerId); } return desc; } public void addPeer(String peerId, ReplicationPeerConfig peerConfig, boolean enabled) throws ReplicationException { if (peers.containsKey(peerId)) { // this should be a retry, just return return; } peerConfig = ReplicationPeerConfigUtil.updateReplicationBasePeerConfigs(conf, peerConfig); ReplicationPeerConfig copiedPeerConfig = ReplicationPeerConfig.newBuilder(peerConfig).build(); SyncReplicationState syncReplicationState = copiedPeerConfig.isSyncReplication() ? SyncReplicationState.DOWNGRADE_ACTIVE : SyncReplicationState.NONE; peerStorage.addPeer(peerId, copiedPeerConfig, enabled, syncReplicationState); peers.put(peerId, new ReplicationPeerDescription(peerId, enabled, copiedPeerConfig, syncReplicationState)); } public void removePeer(String peerId) throws ReplicationException { if (!peers.containsKey(peerId)) { // this should be a retry, just return return; } peerStorage.removePeer(peerId); peers.remove(peerId); } private void setPeerState(String peerId, boolean enabled) throws ReplicationException { ReplicationPeerDescription desc = peers.get(peerId); if (desc.isEnabled() == enabled) { // this should be a retry, just return return; } peerStorage.setPeerState(peerId, enabled); peers.put(peerId, new ReplicationPeerDescription(peerId, enabled, desc.getPeerConfig(), desc.getSyncReplicationState())); } public boolean getPeerState(String peerId) throws ReplicationException { ReplicationPeerDescription desc = peers.get(peerId); if (desc != null) { return desc.isEnabled(); } else { throw new ReplicationException("Replication Peer of " + peerId + " does not exist."); } } public void enablePeer(String peerId) throws ReplicationException { setPeerState(peerId, true); } public void disablePeer(String peerId) throws ReplicationException { setPeerState(peerId, false); } public void updatePeerConfig(String peerId, ReplicationPeerConfig peerConfig) throws ReplicationException { // the checking rules are too complicated here so we give up checking whether this is a retry. ReplicationPeerDescription desc = peers.get(peerId); ReplicationPeerConfig oldPeerConfig = desc.getPeerConfig(); ReplicationPeerConfigBuilder newPeerConfigBuilder = ReplicationPeerConfig.newBuilder(peerConfig); // we need to use the new conf to overwrite the old one. newPeerConfigBuilder.putAllConfiguration(oldPeerConfig.getConfiguration()); newPeerConfigBuilder.putAllConfiguration(peerConfig.getConfiguration()); ReplicationPeerConfig newPeerConfig = newPeerConfigBuilder.build(); peerStorage.updatePeerConfig(peerId, newPeerConfig); peers.put(peerId, new ReplicationPeerDescription(peerId, desc.isEnabled(), newPeerConfig, desc.getSyncReplicationState())); } public List<ReplicationPeerDescription> listPeers(Pattern pattern) { if (pattern == null) { return new ArrayList<>(peers.values()); } return peers.values().stream().filter(r -> pattern.matcher(r.getPeerId()).matches()) .collect(Collectors.toList()); } public Optional<ReplicationPeerConfig> getPeerConfig(String peerId) { ReplicationPeerDescription desc = peers.get(peerId); return desc != null ? Optional.of(desc.getPeerConfig()) : Optional.empty(); } void removeAllLastPushedSeqIds(String peerId) throws ReplicationException { queueStorage.removeLastSequenceIds(peerId); } public void setPeerNewSyncReplicationState(String peerId, SyncReplicationState state) throws ReplicationException { peerStorage.setPeerNewSyncReplicationState(peerId, state); } public void transitPeerSyncReplicationState(String peerId, SyncReplicationState newState) throws ReplicationException { if (peerStorage.getPeerNewSyncReplicationState(peerId) != SyncReplicationState.NONE) { // Only transit if this is not a retry peerStorage.transitPeerSyncReplicationState(peerId); } ReplicationPeerDescription desc = peers.get(peerId); if (desc.getSyncReplicationState() != newState) { // Only recreate the desc if this is not a retry peers.put(peerId, new ReplicationPeerDescription(peerId, desc.isEnabled(), desc.getPeerConfig(), newState)); } } public void removeAllQueues(String peerId) throws ReplicationException { // Here we need two passes to address the problem of claimQueue. Maybe a claimQueue is still // on-going when the refresh peer config procedure is done, if a RS which has already been // scanned claims the queue of a RS which has not been scanned yet, we will miss that queue in // the scan here, and if the RS who has claimed the queue crashed before creating recovered // source, then the queue will leave there until the another RS detects the crash and helps // removing the queue. // A two pass scan can solve the problem. Anyway, the queue will not disappear during the // claiming, it will either under the old RS or under the new RS, and a queue can only be // claimed once after the refresh peer procedure done(as the next claim queue will just delete // it), so we can make sure that a two pass scan will finally find the queue and remove it, // unless it has already been removed by others. queueStorage.removeAllQueues(peerId); queueStorage.removeAllQueues(peerId); } public void removeAllQueuesAndHFileRefs(String peerId) throws ReplicationException { removeAllQueues(peerId); queueStorage.removePeerFromHFileRefs(peerId); } private void checkClusterKey(String clusterKey, ReplicationEndpoint endpoint) throws DoNotRetryIOException { if (endpoint != null && !(endpoint instanceof HBaseReplicationEndpoint)) { return; } // Endpoints implementing HBaseReplicationEndpoint need to check cluster key URI connectionUri = ConnectionRegistryFactory.tryParseAsConnectionURI(clusterKey); try { if (connectionUri != null) { ConnectionRegistryFactory.validate(connectionUri); } else { ZKConfig.validateClusterKey(clusterKey); } } catch (IOException e) { throw new DoNotRetryIOException("Invalid cluster key: " + clusterKey, e); } if (endpoint != null && endpoint.canReplicateToSameCluster()) { return; } // make sure we do not replicate to same cluster String peerClusterId; try { if (connectionUri != null) { // fetch cluster id through standard admin API try (Connection conn = ConnectionFactory.createConnection(connectionUri, conf); Admin admin = conn.getAdmin()) { peerClusterId = admin.getClusterMetrics(EnumSet.of(ClusterMetrics.Option.CLUSTER_ID)).getClusterId(); } } else { // Create the peer cluster config for get peer cluster id Configuration peerConf = HBaseConfiguration.createClusterConf(conf, clusterKey); try (ZKWatcher zkWatcher = new ZKWatcher(peerConf, this + "check-peer-cluster-id", null)) { peerClusterId = ZKClusterId.readClusterIdZNode(zkWatcher); } } } catch (IOException | KeeperException e) { // we just want to check whether we will replicate to the same cluster, so if we get an error // while getting the cluster id of the peer cluster, it means we are not connecting to // ourselves, as we are still alive. So here we just log the error and continue LOG.warn("Can't get peerClusterId for clusterKey=" + clusterKey, e); return; } // In rare case, zookeeper setting may be messed up. That leads to the incorrect // peerClusterId value, which is the same as the source clusterId if (clusterId.equals(peerClusterId)) { throw new DoNotRetryIOException("Invalid cluster key: " + clusterKey + ", should not replicate to itself for HBaseInterClusterReplicationEndpoint"); } } private void checkPeerConfig(ReplicationPeerConfig peerConfig) throws DoNotRetryIOException { String replicationEndpointImpl = peerConfig.getReplicationEndpointImpl(); ReplicationEndpoint endpoint = null; if (!StringUtils.isBlank(replicationEndpointImpl)) { try { // try creating a instance endpoint = Class.forName(replicationEndpointImpl).asSubclass(ReplicationEndpoint.class) .getDeclaredConstructor().newInstance(); } catch (Throwable e) { throw new DoNotRetryIOException( "Can not instantiate configured replication endpoint class=" + replicationEndpointImpl, e); } } checkClusterKey(peerConfig.getClusterKey(), endpoint); if (peerConfig.replicateAllUserTables()) { // If replicate_all flag is true, it means all user tables will be replicated to peer cluster. // Then allow config exclude namespaces or exclude table-cfs which can't be replicated to peer // cluster. if ( (peerConfig.getNamespaces() != null && !peerConfig.getNamespaces().isEmpty()) || (peerConfig.getTableCFsMap() != null && !peerConfig.getTableCFsMap().isEmpty()) ) { throw new DoNotRetryIOException("Need clean namespaces or table-cfs config firstly " + "when you want replicate all cluster"); } checkNamespacesAndTableCfsConfigConflict(peerConfig.getExcludeNamespaces(), peerConfig.getExcludeTableCFsMap()); } else { // If replicate_all flag is false, it means all user tables can't be replicated to peer // cluster. Then allow to config namespaces or table-cfs which will be replicated to peer // cluster. if ( (peerConfig.getExcludeNamespaces() != null && !peerConfig.getExcludeNamespaces().isEmpty()) || (peerConfig.getExcludeTableCFsMap() != null && !peerConfig.getExcludeTableCFsMap().isEmpty()) ) { throw new DoNotRetryIOException( "Need clean exclude-namespaces or exclude-table-cfs config firstly" + " when replicate_all flag is false"); } checkNamespacesAndTableCfsConfigConflict(peerConfig.getNamespaces(), peerConfig.getTableCFsMap()); } if (peerConfig.isSyncReplication()) { checkPeerConfigForSyncReplication(peerConfig); } checkConfiguredWALEntryFilters(peerConfig); } private void checkPeerConfigForSyncReplication(ReplicationPeerConfig peerConfig) throws DoNotRetryIOException { // This is used to reduce the difficulty for implementing the sync replication state transition // as we need to reopen all the related regions. // TODO: Add namespace, replicat_all flag back if (peerConfig.replicateAllUserTables()) { throw new DoNotRetryIOException( "Only support replicated table config for sync replication peer"); } if (peerConfig.getNamespaces() != null && !peerConfig.getNamespaces().isEmpty()) { throw new DoNotRetryIOException( "Only support replicated table config for sync replication peer"); } if (peerConfig.getTableCFsMap() == null || peerConfig.getTableCFsMap().isEmpty()) { throw new DoNotRetryIOException("Need config replicated tables for sync replication peer"); } for (List<String> cfs : peerConfig.getTableCFsMap().values()) { if (cfs != null && !cfs.isEmpty()) { throw new DoNotRetryIOException( "Only support replicated table config for sync replication peer"); } } Path remoteWALDir = new Path(peerConfig.getRemoteWALDir()); if (!remoteWALDir.isAbsolute()) { throw new DoNotRetryIOException( "The remote WAL directory " + peerConfig.getRemoteWALDir() + " is not absolute"); } URI remoteWALDirUri = remoteWALDir.toUri(); if (remoteWALDirUri.getScheme() == null || remoteWALDirUri.getAuthority() == null) { throw new DoNotRetryIOException("The remote WAL directory " + peerConfig.getRemoteWALDir() + " is not qualified, you must provide scheme and authority"); } } private void checkSyncReplicationPeerConfigConflict(ReplicationPeerConfig peerConfig) throws DoNotRetryIOException { for (TableName tableName : peerConfig.getTableCFsMap().keySet()) { for (Map.Entry<String, ReplicationPeerDescription> entry : peers.entrySet()) { ReplicationPeerConfig rpc = entry.getValue().getPeerConfig(); if (rpc.isSyncReplication() && rpc.getTableCFsMap().containsKey(tableName)) { throw new DoNotRetryIOException( "Table " + tableName + " has been replicated by peer " + entry.getKey()); } } } } /** * Set a namespace in the peer config means that all tables in this namespace will be replicated * to the peer cluster. * <ol> * <li>If peer config already has a namespace, then not allow set any table of this namespace to * the peer config.</li> * <li>If peer config already has a table, then not allow set this table's namespace to the peer * config.</li> * </ol> * <p> * Set a exclude namespace in the peer config means that all tables in this namespace can't be * replicated to the peer cluster. * <ol> * <li>If peer config already has a exclude namespace, then not allow set any exclude table of * this namespace to the peer config.</li> * <li>If peer config already has a exclude table, then not allow set this table's namespace as a * exclude namespace.</li> * </ol> */ private void checkNamespacesAndTableCfsConfigConflict(Set<String> namespaces, Map<TableName, ? extends Collection<String>> tableCfs) throws DoNotRetryIOException { if (namespaces == null || namespaces.isEmpty()) { return; } if (tableCfs == null || tableCfs.isEmpty()) { return; } for (Map.Entry<TableName, ? extends Collection<String>> entry : tableCfs.entrySet()) { TableName table = entry.getKey(); if (namespaces.contains(table.getNamespaceAsString())) { throw new DoNotRetryIOException("Table-cfs " + table + " is conflict with namespaces " + table.getNamespaceAsString() + " in peer config"); } } } private void checkConfiguredWALEntryFilters(ReplicationPeerConfig peerConfig) throws DoNotRetryIOException { String filterCSV = peerConfig.getConfiguration() .get(BaseReplicationEndpoint.REPLICATION_WALENTRYFILTER_CONFIG_KEY); if (filterCSV != null && !filterCSV.isEmpty()) { String[] filters = filterCSV.split(","); for (String filter : filters) { try { Class.forName(filter).getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new DoNotRetryIOException("Configured WALEntryFilter " + filter + " could not be created. Failing add/update peer operation.", e); } } } } public List<String> getSerialPeerIdsBelongsTo(TableName tableName) { return peers.values().stream().filter(p -> p.getPeerConfig().isSerial()) .filter(p -> p.getPeerConfig().needToReplicate(tableName)).map(p -> p.getPeerId()) .collect(Collectors.toList()); } @RestrictedApi(explanation = "Should only be called in tests", link = "", allowedOnPath = ".*/src/test/.*") public ReplicationPeerStorage getPeerStorage() { return peerStorage; } public ReplicationQueueStorage getQueueStorage() { return queueStorage; } private static Pair<ReplicationQueueStorage, ReplicationQueueStorageInitializer> createReplicationQueueStorage(MasterServices services) throws IOException { Configuration conf = services.getConfiguration(); TableName replicationQueueTableName = TableName.valueOf(conf.get(ReplicationStorageFactory.REPLICATION_QUEUE_TABLE_NAME, ReplicationStorageFactory.REPLICATION_QUEUE_TABLE_NAME_DEFAULT.getNameAsString())); ReplicationQueueStorageInitializer initializer; if (services.getTableDescriptors().exists(replicationQueueTableName)) { // no need to create the table initializer = () -> { }; } else { // lazy create the replication table. initializer = new ReplicationQueueStorageInitializer() { private volatile boolean created = false; @Override public void initialize() throws IOException { if (created) { return; } synchronized (this) { if (created) { return; } if (services.getTableDescriptors().exists(replicationQueueTableName)) { created = true; return; } long procId = services.createSystemTable(ReplicationStorageFactory .createReplicationQueueTableDescriptor(replicationQueueTableName)); ProcedureExecutor<MasterProcedureEnv> procExec = services.getMasterProcedureExecutor(); ProcedureSyncWait.waitFor(procExec.getEnvironment(), TimeUnit.MINUTES.toMillis(1), "Creating table " + replicationQueueTableName, () -> procExec.isFinished(procId)); } } }; } return Pair.newPair(ReplicationStorageFactory.getReplicationQueueStorage( services.getConnection(), conf, replicationQueueTableName), initializer); } public static ReplicationPeerManager create(MasterServices services, String clusterId) throws ReplicationException, IOException { Configuration conf = services.getConfiguration(); FileSystem fs = services.getMasterFileSystem().getFileSystem(); ZKWatcher zk = services.getZooKeeper(); ReplicationPeerStorage peerStorage = ReplicationStorageFactory.getReplicationPeerStorage(fs, zk, conf); Pair<ReplicationQueueStorage, ReplicationQueueStorageInitializer> pair = createReplicationQueueStorage(services); ReplicationQueueStorage queueStorage = pair.getFirst(); ConcurrentMap<String, ReplicationPeerDescription> peers = new ConcurrentHashMap<>(); for (String peerId : peerStorage.listPeerIds()) { ReplicationPeerConfig peerConfig = peerStorage.getPeerConfig(peerId); if ( ReplicationUtils.LEGACY_REGION_REPLICATION_ENDPOINT_NAME .equals(peerConfig.getReplicationEndpointImpl()) ) { // If memstore region replication is enabled, there will be a special replication peer // usually called 'region_replica_replication'. We do not need to load it or migrate its // replication queue data since we do not rely on general replication framework for // region replication in 3.x now, please see HBASE-26233 for more details. // We can not delete it now since region server with old version still want to update // the replicated wal position to zk, if we delete the replication queue zk node, rs // will crash. See HBASE-29169 for more details. // In MigrateReplicationQueueFromZkToTableProcedure, finally we will call a deleteAllData on // the old replication queue storage, to make sure that we will delete the the queue data // for this peer and also the peer info in replication peer storage LOG.info("Found old region replica replication peer '{}', skip loading it", peerId); continue; } peerConfig = ReplicationPeerConfigUtil.updateReplicationBasePeerConfigs(conf, peerConfig); peerStorage.updatePeerConfig(peerId, peerConfig); boolean enabled = peerStorage.isPeerEnabled(peerId); SyncReplicationState state = peerStorage.getPeerSyncReplicationState(peerId); peers.put(peerId, new ReplicationPeerDescription(peerId, enabled, peerConfig, state)); } return new ReplicationPeerManager(fs, zk, peerStorage, queueStorage, peers, conf, clusterId, pair.getSecond()); } /** * For replication peer cluster key or endpoint class, null and empty string is same. So here * don't use {@link StringUtils#equals(CharSequence, CharSequence)} directly. */ private boolean isStringEquals(String s1, String s2) { if (StringUtils.isBlank(s1)) { return StringUtils.isBlank(s2); } return s1.equals(s2); } @Override public void onConfigurationChange(Configuration conf) { this.conf = conf; this.peerStorage = ReplicationStorageFactory.getReplicationPeerStorage(fs, zk, conf); } private ReplicationQueueData convert(ZkReplicationQueueData zkData) { Map<String, ReplicationGroupOffset> groupOffsets = new HashMap<>(); zkData.getWalOffsets().forEach((wal, offset) -> { String walGroup = AbstractFSWALProvider.getWALPrefixFromWALName(wal); groupOffsets.compute(walGroup, (k, oldOffset) -> { if (oldOffset == null) { return new ReplicationGroupOffset(wal, offset); } // we should record the first wal's offset long oldWalTs = AbstractFSWALProvider.getTimestamp(oldOffset.getWal()); long walTs = AbstractFSWALProvider.getTimestamp(wal); if (walTs < oldWalTs) { return new ReplicationGroupOffset(wal, offset); } return oldOffset; }); }); return new ReplicationQueueData(zkData.getQueueId(), ImmutableMap.copyOf(groupOffsets)); } private void migrateQueues(ZKReplicationQueueStorageForMigration oldQueueStorage) throws Exception { MigrationIterator<Pair<ServerName, List<ZkReplicationQueueData>>> iter = oldQueueStorage.listAllQueues(); for (;;) { Pair<ServerName, List<ZkReplicationQueueData>> pair = iter.next(); if (pair == null) { return; } queueStorage.batchUpdateQueues(pair.getFirst(), pair.getSecond().stream().filter(data -> peers.containsKey(data.getQueueId().getPeerId())) .map(this::convert).collect(Collectors.toList())); } } private void migrateLastPushedSeqIds(ZKReplicationQueueStorageForMigration oldQueueStorage) throws Exception { MigrationIterator<List<ZkLastPushedSeqId>> iter = oldQueueStorage.listAllLastPushedSeqIds(); for (;;) { List<ZkLastPushedSeqId> list = iter.next(); if (list == null) { return; } queueStorage.batchUpdateLastSequenceIds(list.stream() .filter(data -> peers.containsKey(data.getPeerId())).collect(Collectors.toList())); } } private void migrateHFileRefs(ZKReplicationQueueStorageForMigration oldQueueStorage) throws Exception { MigrationIterator<Pair<String, List<String>>> iter = oldQueueStorage.listAllHFileRefs(); for (;;) { Pair<String, List<String>> pair = iter.next(); if (pair == null) { return; } if (peers.containsKey(pair.getFirst())) { queueStorage.batchUpdateHFileRefs(pair.getFirst(), pair.getSecond()); } } } private interface ExceptionalRunnable { void run() throws Exception; } private CompletableFuture<?> runAsync(ExceptionalRunnable task, ExecutorService executor) { CompletableFuture<?> future = new CompletableFuture<>(); executor.execute(() -> { try { task.run(); future.complete(null); } catch (Exception e) { future.completeExceptionally(e); } }); return future; } // this is for upgrading from 2.x to 3.x, in 3.x we will not load the 'region_replica_replication' // peer, but we still need to know whether we have it on the old storage boolean hasRegionReplicaReplicationPeer() throws ReplicationException { return peerStorage.listPeerIds().stream() .anyMatch(p -> p.equals(ServerRegionReplicaUtil.REGION_REPLICA_REPLICATION_PEER)); } /** * Submit the migration tasks to the given {@code executor}. */ CompletableFuture<Void> migrateQueuesFromZk(ZKWatcher zookeeper, ExecutorService executor) { // the replication queue table creation is asynchronous and will be triggered by addPeer, so // here we need to manually initialize it since we will not call addPeer. try { initializeQueueStorage(); } catch (IOException e) { return FutureUtils.failedFuture(e); } ZKReplicationQueueStorageForMigration oldStorage = new ZKReplicationQueueStorageForMigration(zookeeper, conf); return CompletableFuture.allOf(runAsync(() -> migrateQueues(oldStorage), executor), runAsync(() -> migrateLastPushedSeqIds(oldStorage), executor), runAsync(() -> migrateHFileRefs(oldStorage), executor)); } void deleteLegacyRegionReplicaReplicationPeer() throws ReplicationException { for (String peerId : peerStorage.listPeerIds()) { ReplicationPeerConfig peerConfig = peerStorage.getPeerConfig(peerId); if ( ReplicationUtils.LEGACY_REGION_REPLICATION_ENDPOINT_NAME .equals(peerConfig.getReplicationEndpointImpl()) ) { LOG.info("Delete old region replica replication peer '{}'", peerId); peerStorage.removePeer(peerId); } } } }
apache/hop
37,324
plugins/transforms/combinationlookup/src/main/java/org/apache/hop/pipeline/transforms/combinationlookup/CombinationLookupDialog.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hop.pipeline.transforms.combinationlookup; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.StringUtils; import org.apache.hop.core.Const; import org.apache.hop.core.DbCache; import org.apache.hop.core.SqlStatement; import org.apache.hop.core.database.Database; import org.apache.hop.core.database.DatabaseMeta; import org.apache.hop.core.exception.HopException; import org.apache.hop.core.row.IRowMeta; import org.apache.hop.core.util.Utils; import org.apache.hop.core.variables.IVariables; import org.apache.hop.i18n.BaseMessages; import org.apache.hop.pipeline.PipelineMeta; import org.apache.hop.pipeline.transform.TransformMeta; import org.apache.hop.ui.core.ConstUi; import org.apache.hop.ui.core.PropsUi; import org.apache.hop.ui.core.database.dialog.DatabaseExplorerDialog; import org.apache.hop.ui.core.database.dialog.SqlEditor; import org.apache.hop.ui.core.dialog.BaseDialog; import org.apache.hop.ui.core.dialog.EnterSelectionDialog; import org.apache.hop.ui.core.dialog.ErrorDialog; import org.apache.hop.ui.core.dialog.MessageBox; import org.apache.hop.ui.core.widget.ColumnInfo; import org.apache.hop.ui.core.widget.MetaSelectionLine; import org.apache.hop.ui.core.widget.TableView; import org.apache.hop.ui.core.widget.TextVar; import org.apache.hop.ui.pipeline.transform.BaseTransformDialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.SelectionListener; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; public class CombinationLookupDialog extends BaseTransformDialog { private static final Class<?> PKG = CombinationLookupDialog.class; private MetaSelectionLine<DatabaseMeta> wConnection; private TextVar wSchema; private TextVar wTable; private Text wCommit; private Text wCachesize; private Button wPreloadCache; private Text wTk; private Label wlAutoinc; private Button wAutoinc; private Label wlTableMax; private Button wTableMax; private Label wlSeqButton; private Button wSeqButton; private Text wSeq; private Button wReplace; private Button wHashcode; private TableView wKey; private Label wlHashfield; private Text wHashfield; private Text wLastUpdateField; private ColumnInfo[] ciKey; private final CombinationLookupMeta input; private DatabaseMeta databaseMeta; private final List<String> inputFields = new ArrayList<>(); /** List of ColumnInfo that should have the field names of the selected database table */ private final List<ColumnInfo> tableFieldColumns = new ArrayList<>(); public CombinationLookupDialog( Shell parent, IVariables variables, CombinationLookupMeta transformMeta, PipelineMeta pipelineMeta) { super(parent, variables, transformMeta, pipelineMeta); input = transformMeta; } @Override public String open() { Shell parent = getParent(); shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN); PropsUi.setLook(shell); setShellImage(shell, input); FormLayout formLayout = new FormLayout(); formLayout.marginWidth = PropsUi.getFormMargin(); formLayout.marginHeight = PropsUi.getFormMargin(); shell.setLayout(formLayout); shell.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Shell.Title")); int middle = props.getMiddlePct(); int margin = PropsUi.getMargin(); ModifyListener lsMod = e -> input.setChanged(); ModifyListener lsTableMod = arg0 -> { input.setChanged(); setTableFieldCombo(); }; SelectionListener lsSelection = new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { input.setChanged(); setTableFieldCombo(); } }; backupChanged = input.hasChanged(); databaseMeta = input.getDatabaseMeta(); // TransformName line wlTransformName = new Label(shell, SWT.RIGHT); wlTransformName.setText( BaseMessages.getString(PKG, "CombinationLookupDialog.TransformName.Label")); PropsUi.setLook(wlTransformName); fdlTransformName = new FormData(); fdlTransformName.left = new FormAttachment(0, 0); fdlTransformName.right = new FormAttachment(middle, -margin); fdlTransformName.top = new FormAttachment(0, margin); wlTransformName.setLayoutData(fdlTransformName); wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wTransformName.setText(transformName); PropsUi.setLook(wTransformName); wTransformName.addModifyListener(lsMod); fdTransformName = new FormData(); fdTransformName.left = new FormAttachment(middle, 0); fdTransformName.top = new FormAttachment(0, margin); fdTransformName.right = new FormAttachment(100, 0); wTransformName.setLayoutData(fdTransformName); // Connection line wConnection = addConnectionLine(shell, wTransformName, input.getDatabaseMeta(), lsMod); wConnection.addSelectionListener(lsSelection); wConnection.addModifyListener( e -> { // We have new content: change connection: databaseMeta = findDatabase(wConnection.getText()); setAutoincUse(); setSequence(); input.setChanged(); }); // Schema line... Label wlSchema = new Label(shell, SWT.RIGHT); wlSchema.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.TargetSchema.Label")); PropsUi.setLook(wlSchema); FormData fdlSchema = new FormData(); fdlSchema.left = new FormAttachment(0, 0); fdlSchema.right = new FormAttachment(middle, -margin); fdlSchema.top = new FormAttachment(wConnection, margin); wlSchema.setLayoutData(fdlSchema); Button wbSchema = new Button(shell, SWT.PUSH | SWT.CENTER); PropsUi.setLook(wbSchema); wbSchema.setText(BaseMessages.getString(PKG, "System.Button.Browse")); FormData fdbSchema = new FormData(); fdbSchema.top = new FormAttachment(wConnection, margin); fdbSchema.right = new FormAttachment(100, 0); wbSchema.setLayoutData(fdbSchema); wSchema = new TextVar(variables, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wSchema); wSchema.addModifyListener(lsTableMod); FormData fdSchema = new FormData(); fdSchema.left = new FormAttachment(middle, 0); fdSchema.top = new FormAttachment(wConnection, margin); fdSchema.right = new FormAttachment(wbSchema, -margin); wSchema.setLayoutData(fdSchema); // Table line... Label wlTable = new Label(shell, SWT.RIGHT); wlTable.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Target.Label")); PropsUi.setLook(wlTable); FormData fdlTable = new FormData(); fdlTable.left = new FormAttachment(0, 0); fdlTable.right = new FormAttachment(middle, -margin); fdlTable.top = new FormAttachment(wbSchema, margin); wlTable.setLayoutData(fdlTable); Button wbTable = new Button(shell, SWT.PUSH | SWT.CENTER); PropsUi.setLook(wbTable); wbTable.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.BrowseTable.Button")); FormData fdbTable = new FormData(); fdbTable.right = new FormAttachment(100, 0); fdbTable.top = new FormAttachment(wbSchema, margin); wbTable.setLayoutData(fdbTable); wTable = new TextVar(variables, shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wTable); wTable.addModifyListener(lsTableMod); FormData fdTable = new FormData(); fdTable.left = new FormAttachment(middle, 0); fdTable.top = new FormAttachment(wbSchema, margin); fdTable.right = new FormAttachment(wbTable, -margin); wTable.setLayoutData(fdTable); // Commit size ... Label wlCommit = new Label(shell, SWT.RIGHT); wlCommit.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Commitsize.Label")); PropsUi.setLook(wlCommit); FormData fdlCommit = new FormData(); fdlCommit.left = new FormAttachment(0, 0); fdlCommit.right = new FormAttachment(middle, -margin); fdlCommit.top = new FormAttachment(wTable, margin); wlCommit.setLayoutData(fdlCommit); wCommit = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wCommit); wCommit.addModifyListener(lsMod); FormData fdCommit = new FormData(); fdCommit.top = new FormAttachment(wTable, margin); fdCommit.left = new FormAttachment(middle, 0); fdCommit.right = new FormAttachment(middle + (100 - middle) / 3, -margin); wCommit.setLayoutData(fdCommit); // Cache size Label wlCachesize = new Label(shell, SWT.RIGHT); wlCachesize.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Cachesize.Label")); PropsUi.setLook(wlCachesize); FormData fdlCachesize = new FormData(); fdlCachesize.top = new FormAttachment(wTable, margin); fdlCachesize.left = new FormAttachment(wCommit, margin); fdlCachesize.right = new FormAttachment(middle + 2 * (100 - middle) / 3, -margin); wlCachesize.setLayoutData(fdlCachesize); wCachesize = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wCachesize); wCachesize.addModifyListener(lsMod); FormData fdCachesize = new FormData(); fdCachesize.top = new FormAttachment(wTable, margin); fdCachesize.left = new FormAttachment(wlCachesize, margin); fdCachesize.right = new FormAttachment(100, 0); wCachesize.setLayoutData(fdCachesize); wCachesize.setToolTipText( BaseMessages.getString(PKG, "CombinationLookupDialog.Cachesize.ToolTip")); // Preload Cache wPreloadCache = new Button(shell, SWT.CHECK); wPreloadCache.setText( BaseMessages.getString(PKG, "CombinationLookupDialog.PreloadCache.Label")); PropsUi.setLook(wPreloadCache); FormData fdPreloadCache = new FormData(); fdPreloadCache.top = new FormAttachment(wCachesize, margin); fdPreloadCache.left = new FormAttachment(wlCachesize, margin); fdPreloadCache.right = new FormAttachment(100, 0); wPreloadCache.setLayoutData(fdPreloadCache); // // The Lookup fields: usually the (business) key // Label wlKey = new Label(shell, SWT.NONE); wlKey.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Keyfields.Label")); PropsUi.setLook(wlKey); FormData fdlKey = new FormData(); fdlKey.left = new FormAttachment(0, 0); fdlKey.top = new FormAttachment(wPreloadCache, margin); fdlKey.right = new FormAttachment(100, 0); wlKey.setLayoutData(fdlKey); int nrKeyCols = 2; int nrKeyRows = input.getFields().getKeyFields().size(); ciKey = new ColumnInfo[nrKeyCols]; ciKey[0] = new ColumnInfo( BaseMessages.getString(PKG, "CombinationLookupDialog.ColumnInfo.DimensionField"), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] {""}, false); ciKey[1] = new ColumnInfo( BaseMessages.getString(PKG, "CombinationLookupDialog.ColumnInfo.FieldInStream"), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] {""}, false); tableFieldColumns.add(ciKey[0]); wKey = new TableView( variables, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL, ciKey, nrKeyRows, lsMod, props); // THE BUTTONS wOk = new Button(shell, SWT.PUSH); wOk.setText(BaseMessages.getString(PKG, "System.Button.OK")); Button wGet = new Button(shell, SWT.PUSH); wGet.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.GetFields.Button")); Button wCreate = new Button(shell, SWT.PUSH); wCreate.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.SQL.Button")); wCancel = new Button(shell, SWT.PUSH); wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); setButtonPositions(new Button[] {wOk, wGet, wCreate, wCancel}, margin, null); // Last update field: Label wlLastUpdateField = new Label(shell, SWT.RIGHT); wlLastUpdateField.setText( BaseMessages.getString(PKG, "CombinationLookupDialog.LastUpdateField.Label")); PropsUi.setLook(wlLastUpdateField); FormData fdlLastUpdateField = new FormData(); fdlLastUpdateField.left = new FormAttachment(0, 0); fdlLastUpdateField.right = new FormAttachment(middle, -margin); fdlLastUpdateField.bottom = new FormAttachment(wOk, -2 * margin); wlLastUpdateField.setLayoutData(fdlLastUpdateField); wLastUpdateField = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wLastUpdateField); wLastUpdateField.addModifyListener(lsMod); FormData fdLastUpdateField = new FormData(); fdLastUpdateField.left = new FormAttachment(middle, 0); fdLastUpdateField.right = new FormAttachment(100, 0); fdLastUpdateField.bottom = new FormAttachment(wOk, -2 * margin); wLastUpdateField.setLayoutData(fdLastUpdateField); // Hash field: wlHashfield = new Label(shell, SWT.RIGHT); wlHashfield.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Hashfield.Label")); PropsUi.setLook(wlHashfield); FormData fdlHashfield = new FormData(); fdlHashfield.left = new FormAttachment(0, 0); fdlHashfield.right = new FormAttachment(middle, -margin); fdlHashfield.bottom = new FormAttachment(wLastUpdateField, -margin); wlHashfield.setLayoutData(fdlHashfield); wHashfield = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wHashfield); wHashfield.addModifyListener(lsMod); FormData fdHashfield = new FormData(); fdHashfield.left = new FormAttachment(middle, 0); fdHashfield.right = new FormAttachment(100, 0); fdHashfield.bottom = new FormAttachment(wLastUpdateField, -margin); wHashfield.setLayoutData(fdHashfield); // Output the input rows or one (1) log-record? Label wlHashcode = new Label(shell, SWT.RIGHT); wlHashcode.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Hashcode.Label")); PropsUi.setLook(wlHashcode); FormData fdlHashcode = new FormData(); fdlHashcode.left = new FormAttachment(0, 0); fdlHashcode.right = new FormAttachment(middle, -margin); fdlHashcode.bottom = new FormAttachment(wHashfield, -margin); wlHashcode.setLayoutData(fdlHashcode); wHashcode = new Button(shell, SWT.CHECK); PropsUi.setLook(wHashcode); FormData fdHashcode = new FormData(); fdHashcode.left = new FormAttachment(middle, 0); fdHashcode.right = new FormAttachment(100, 0); fdHashcode.bottom = new FormAttachment(wlHashcode, 0, SWT.CENTER); wHashcode.setLayoutData(fdHashcode); wHashcode.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { enableFields(); } }); // Replace lookup fields in the output stream? Label wlReplace = new Label(shell, SWT.RIGHT); wlReplace.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Replace.Label")); PropsUi.setLook(wlReplace); FormData fdlReplace = new FormData(); fdlReplace.left = new FormAttachment(0, 0); fdlReplace.right = new FormAttachment(middle, -margin); fdlReplace.bottom = new FormAttachment(wHashcode, -margin); wlReplace.setLayoutData(fdlReplace); wReplace = new Button(shell, SWT.CHECK); PropsUi.setLook(wReplace); FormData fdReplace = new FormData(); fdReplace.left = new FormAttachment(middle, 0); fdReplace.bottom = new FormAttachment(wlReplace, 0, SWT.CENTER); fdReplace.right = new FormAttachment(100, 0); wReplace.setLayoutData(fdReplace); wReplace.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { enableFields(); } }); Label wlTechGroup = new Label(shell, SWT.RIGHT); wlTechGroup.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.TechGroup.Label")); PropsUi.setLook(wlTechGroup); FormData fdlTechGroup = new FormData(); fdlTechGroup.left = new FormAttachment(0, 0); fdlTechGroup.right = new FormAttachment(middle, -margin); wlTechGroup.setLayoutData(fdlTechGroup); Composite gTechGroup = new Composite(shell, SWT.NONE); GridLayout gridLayout = new GridLayout(3, false); gTechGroup.setLayout(gridLayout); PropsUi.setLook(gTechGroup); FormData fdTechGroup = new FormData(); fdTechGroup.left = new FormAttachment(middle, 0); fdTechGroup.bottom = new FormAttachment(wReplace, -margin); fdTechGroup.right = new FormAttachment(100, 0); gTechGroup.setLayoutData(fdTechGroup); fdlTechGroup.top = new FormAttachment(gTechGroup, margin, SWT.TOP); // Use maximum of table + 1 wTableMax = new Button(gTechGroup, SWT.RADIO); PropsUi.setLook(wTableMax); wTableMax.setSelection(false); GridData gdTableMax = new GridData(); wTableMax.setLayoutData(gdTableMax); wTableMax.setToolTipText( BaseMessages.getString(PKG, "CombinationLookupDialog.TableMaximum.Tooltip", Const.CR)); wlTableMax = new Label(gTechGroup, SWT.LEFT); wlTableMax.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.TableMaximum.Label")); PropsUi.setLook(wlTableMax); GridData gdlTableMax = new GridData(GridData.FILL_BOTH); gdlTableMax.horizontalSpan = 2; gdlTableMax.verticalSpan = 1; wlTableMax.setLayoutData(gdlTableMax); // Sequence Check Button wSeqButton = new Button(gTechGroup, SWT.RADIO); PropsUi.setLook(wSeqButton); wSeqButton.setSelection(false); GridData gdSeqButton = new GridData(); wSeqButton.setLayoutData(gdSeqButton); wSeqButton.setToolTipText( BaseMessages.getString(PKG, "CombinationLookupDialog.Sequence.Tooltip", Const.CR)); wlSeqButton = new Label(gTechGroup, SWT.LEFT); wlSeqButton.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Sequence.Label")); PropsUi.setLook(wlSeqButton); GridData gdlSeqButton = new GridData(); wlSeqButton.setLayoutData(gdlSeqButton); wSeq = new Text(gTechGroup, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wSeq); wSeq.addModifyListener(lsMod); GridData gdSeq = new GridData(GridData.FILL_HORIZONTAL); wSeq.setLayoutData(gdSeq); wSeq.addFocusListener( new FocusListener() { @Override public void focusGained(FocusEvent arg0) { input .getFields() .getReturnFields() .setTechKeyCreation(CombinationLookupMeta.CREATION_METHOD_SEQUENCE); wSeqButton.setSelection(true); wAutoinc.setSelection(false); wTableMax.setSelection(false); } @Override public void focusLost(FocusEvent arg0) { // No action } }); // Use an autoincrement field? wAutoinc = new Button(gTechGroup, SWT.RADIO); PropsUi.setLook(wAutoinc); wAutoinc.setSelection(false); GridData gdAutoinc = new GridData(); wAutoinc.setLayoutData(gdAutoinc); wAutoinc.setToolTipText( BaseMessages.getString(PKG, "CombinationLookupDialog.AutoincButton.Tooltip", Const.CR)); wlAutoinc = new Label(gTechGroup, SWT.LEFT); wlAutoinc.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.Autoincrement.Label")); PropsUi.setLook(wlAutoinc); GridData gdlAutoinc = new GridData(); wlAutoinc.setLayoutData(gdlAutoinc); setTableMax(); setSequence(); setAutoincUse(); // Technical key field: Label wlTk = new Label(shell, SWT.RIGHT); wlTk.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.TechnicalKey.Label")); PropsUi.setLook(wlTk); FormData fdlTk = new FormData(); fdlTk.left = new FormAttachment(0, 0); fdlTk.right = new FormAttachment(middle, -margin); fdlTk.bottom = new FormAttachment(gTechGroup, -margin); wlTk.setLayoutData(fdlTk); wTk = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); PropsUi.setLook(wTk); FormData fdTk = new FormData(); fdTk.left = new FormAttachment(middle, 0); fdTk.bottom = new FormAttachment(gTechGroup, -margin); fdTk.right = new FormAttachment(100, 0); wTk.setLayoutData(fdTk); FormData fdKey = new FormData(); fdKey.left = new FormAttachment(0, 0); fdKey.top = new FormAttachment(wlKey, margin); fdKey.right = new FormAttachment(100, 0); fdKey.bottom = new FormAttachment(wTk, -margin); wKey.setLayoutData(fdKey); // // Search the fields in the background // final Runnable runnable = () -> { TransformMeta transformMeta = pipelineMeta.findTransform(transformName); if (transformMeta != null) { try { IRowMeta row = pipelineMeta.getPrevTransformFields(variables, transformMeta); // Remember these fields... for (int i = 0; i < row.size(); i++) { inputFields.add(row.getValueMeta(i).getName()); } setComboBoxes(); } catch (HopException e) { logError(BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message")); } } }; new Thread(runnable).start(); // Add listeners wOk.addListener(SWT.Selection, e -> ok()); wGet.addListener(SWT.Selection, e -> get()); wCreate.addListener(SWT.Selection, e -> create()); wCancel.addListener(SWT.Selection, e -> cancel()); wbSchema.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { getSchemaNames(); } }); wbTable.addSelectionListener( new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { getTableName(); } }); getData(); setTableFieldCombo(); BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel()); return transformName; } protected void setComboBoxes() { // Something was changed in the row. // String[] fieldNames = ConstUi.sortFieldNames(inputFields); ciKey[1].setComboValues(fieldNames); } public void enableFields() { wHashfield.setEnabled(wHashcode.getSelection()); wlHashfield.setEnabled(wHashcode.getSelection()); } private void setTableFieldCombo() { if (wTable.isDisposed() || wConnection.isDisposed() || wSchema.isDisposed()) { return; } shell.getDisplay().asyncExec(this::getTableFieldComboValues); } private void getTableFieldComboValues() { final String tableName = wTable.getText(); if (StringUtils.isEmpty(tableName)) { return; } final String connectionName = wConnection.getText(); final String schemaName = wSchema.getText(); // clear for (ColumnInfo colInfo : tableFieldColumns) { colInfo.setComboValues(new String[] {}); } DatabaseMeta databaseMeta = findDatabase(connectionName); if (databaseMeta == null) { return; } try (Database database = new Database(loggingObject, variables, databaseMeta)) { database.connect(); String schemaTable = databaseMeta.getQuotedSchemaTableCombination(variables, schemaName, tableName); IRowMeta rowMeta = database.getTableFields(schemaTable); if (rowMeta != null) { String[] fieldNames = rowMeta.getFieldNames(); if (null != fieldNames) { for (ColumnInfo colInfo : tableFieldColumns) { colInfo.setComboValues(fieldNames); } } } } catch (Exception e) { // ignore any errors here. The combo box options will not be // filled, but it's no problem for the user } } public void setAutoincUse() { boolean enable = (databaseMeta == null) || (databaseMeta.supportsAutoinc() && databaseMeta.supportsAutoGeneratedKeys()); wlAutoinc.setEnabled(enable); wAutoinc.setEnabled(enable); if (!enable && wAutoinc.getSelection()) { wAutoinc.setSelection(false); wSeqButton.setSelection(false); wTableMax.setSelection(true); } } public void setTableMax() { wlTableMax.setEnabled(true); wTableMax.setEnabled(true); } public void setSequence() { boolean seq = (databaseMeta == null) || databaseMeta.supportsSequences(); wSeq.setEnabled(seq); wlSeqButton.setEnabled(seq); wSeqButton.setEnabled(seq); if (!seq && wSeqButton.getSelection()) { wAutoinc.setSelection(false); wSeqButton.setSelection(false); wTableMax.setSelection(true); } } /** Copy information from the meta-data input to the dialog fields. */ public void getData() { logDebug(BaseMessages.getString(PKG, "CombinationLookupDialog.Log.GettingKeyInfo")); CFields fields = input.getFields(); List<KeyField> keyFields = fields.getKeyFields(); ReturnFields returnFields = fields.getReturnFields(); for (int i = 0; i < keyFields.size(); i++) { KeyField keyField = keyFields.get(i); TableItem item = wKey.table.getItem(i); item.setText(1, Const.NVL(keyField.getLookup(), "")); item.setText(2, Const.NVL(keyField.getName(), "")); } wPreloadCache.setSelection(input.isPreloadCache()); wReplace.setSelection(input.isReplaceFields()); wHashcode.setSelection(input.isUseHash()); wHashfield.setEnabled(input.isUseHash()); wlHashfield.setEnabled(input.isUseHash()); String techKeyCreation = returnFields.getTechKeyCreation(); if (techKeyCreation == null) { // Determine the creation of the technical key for // backwards compatibility. Can probably be removed at // version 3.x or so (Sven Boden). DatabaseMeta dbMeta = input.getDatabaseMeta(); if (dbMeta == null || !dbMeta.supportsAutoinc()) { returnFields.setUseAutoIncrement(false); } wAutoinc.setSelection(returnFields.isUseAutoIncrement()); wSeqButton.setSelection(StringUtils.isNotEmpty(fields.getSequenceFrom())); if (!returnFields.isUseAutoIncrement() && StringUtils.isEmpty(fields.getSequenceFrom())) { wTableMax.setSelection(true); } if (dbMeta != null && dbMeta.supportsSequences() && StringUtils.isNotEmpty(fields.getSequenceFrom())) { wSeq.setText(fields.getSequenceFrom()); returnFields.setUseAutoIncrement(false); wTableMax.setSelection(false); } } else { // The "creation" field now determines the behaviour of the // key creation. if (CombinationLookupMeta.CREATION_METHOD_AUTOINC.equals(techKeyCreation)) { wAutoinc.setSelection(true); } else if ((CombinationLookupMeta.CREATION_METHOD_SEQUENCE.equals(techKeyCreation))) { wSeqButton.setSelection(true); } else { // the rest wTableMax.setSelection(true); returnFields.setTechKeyCreation(CombinationLookupMeta.CREATION_METHOD_TABLEMAX); } wSeq.setText(Const.NVL(fields.getSequenceFrom(), "")); } setAutoincUse(); setSequence(); setTableMax(); wSchema.setText(Const.NVL(input.getSchemaName(), "")); wTable.setText(Const.NVL(input.getTableName(), "")); wTk.setText(Const.NVL(returnFields.getTechnicalKeyField(), "")); if (input.getDatabaseMeta() != null) { wConnection.setText(input.getDatabaseMeta().getName()); } wHashfield.setText(Const.NVL(input.getHashField(), "")); wCommit.setText("" + input.getCommitSize()); wCachesize.setText("" + input.getCacheSize()); wLastUpdateField.setText(Const.NVL(returnFields.getLastUpdateField(), "")); wKey.setRowNums(); wKey.optWidth(true); wTransformName.selectAll(); wTransformName.setFocus(); } private void cancel() { transformName = null; input.setChanged(backupChanged); dispose(); } private void ok() { if (Utils.isEmpty(wTransformName.getText())) { return; } getInfo(input); transformName = wTransformName.getText(); // return value if (findDatabase(wConnection.getText()) == null) { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage( BaseMessages.getString(PKG, "CombinationLookupDialog.NoValidConnection.DialogMessage")); mb.setText( BaseMessages.getString(PKG, "CombinationLookupDialog.NoValidConnection.DialogTitle")); mb.open(); } dispose(); } private void getInfo(CombinationLookupMeta in) { CFields fields = in.getFields(); ReturnFields returnFields = fields.getReturnFields(); fields.getKeyFields().clear(); for (TableItem item : wKey.getNonEmptyItems()) { fields.getKeyFields().add(new KeyField(item.getText(2), item.getText(1))); } in.setPreloadCache(wPreloadCache.getSelection()); returnFields.setUseAutoIncrement(wAutoinc.getSelection() && wAutoinc.isEnabled()); in.setReplaceFields(wReplace.getSelection()); in.setUseHash(wHashcode.getSelection()); in.setHashField(wHashfield.getText()); in.setSchemaName(wSchema.getText()); in.setTableName(wTable.getText()); returnFields.setTechnicalKeyField(wTk.getText()); if (wAutoinc.getSelection()) { returnFields.setTechKeyCreation(CombinationLookupMeta.CREATION_METHOD_AUTOINC); returnFields.setUseAutoIncrement(true); // for downwards compatibility fields.setSequenceFrom(null); } else if (wSeqButton.getSelection()) { returnFields.setTechKeyCreation(CombinationLookupMeta.CREATION_METHOD_SEQUENCE); returnFields.setUseAutoIncrement(false); fields.setSequenceFrom(wSeq.getText()); } else { // all the rest returnFields.setTechKeyCreation(CombinationLookupMeta.CREATION_METHOD_TABLEMAX); returnFields.setUseAutoIncrement(false); fields.setSequenceFrom(null); } in.setDatabaseMeta(findDatabase(wConnection.getText())); in.setCommitSize(Const.toInt(wCommit.getText(), 0)); in.setCacheSize(Const.toInt(wCachesize.getText(), 0)); returnFields.setLastUpdateField(wLastUpdateField.getText()); } private void getSchemaNames() { DatabaseMeta dbMeta = findDatabase(wConnection.getText()); if (dbMeta != null) { Database database = new Database(loggingObject, variables, dbMeta); try { database.connect(); String[] schemas = database.getSchemas(); if (null != schemas && schemas.length > 0) { schemas = Const.sortStrings(schemas); EnterSelectionDialog dialog = new EnterSelectionDialog( shell, schemas, BaseMessages.getString( PKG, "CombinationLookupDialog.AvailableSchemas.Title", wConnection.getText()), BaseMessages.getString( PKG, "CombinationLookupDialog.AvailableSchemas.Message", wConnection.getText())); String d = dialog.open(); if (d != null) { wSchema.setText(Const.NVL(d, "")); setTableFieldCombo(); } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(BaseMessages.getString(PKG, "CombinationLookupDialog.NoSchema.Error")); mb.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.GetSchemas.Error")); mb.open(); } } catch (Exception e) { new ErrorDialog( shell, BaseMessages.getString(PKG, "System.Dialog.Error.Title"), BaseMessages.getString(PKG, "CombinationLookupDialog.ErrorGettingSchemas"), e); } finally { database.disconnect(); } } } private void getTableName() { String connectionName = wConnection.getText(); if (StringUtils.isEmpty(connectionName)) { return; } DatabaseMeta dbMeta = findDatabase(connectionName); if (dbMeta != null) { logDebug( BaseMessages.getString( PKG, "CombinationLookupDialog.Log.LookingAtConnection", dbMeta.toString())); DatabaseExplorerDialog std = new DatabaseExplorerDialog( shell, SWT.NONE, variables, dbMeta, pipelineMeta.getDatabases()); std.setSelectedSchemaAndTable(wSchema.getText(), wTable.getText()); if (std.open()) { wSchema.setText(Const.NVL(std.getSchemaName(), "")); wTable.setText(Const.NVL(std.getTableName(), "")); setTableFieldCombo(); } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage( BaseMessages.getString(PKG, "CombinationLookupDialog.ConnectionError2.DialogMessage")); mb.setText(BaseMessages.getString(PKG, "System.Dialog.Error.Title")); mb.open(); } } private void get() { try { IRowMeta r = pipelineMeta.getPrevTransformFields(variables, transformName); if (r != null && !r.isEmpty()) { BaseTransformDialog.getFieldsFromPrevious( r, wKey, 1, new int[] {1, 2}, new int[] {}, -1, -1, (tableItem, v) -> { tableItem.setText(3, "N"); return true; }); } } catch (HopException ke) { new ErrorDialog( shell, BaseMessages.getString(PKG, "CombinationLookupDialog.UnableToGetFieldsError.DialogTitle"), BaseMessages.getString( PKG, "CombinationLookupDialog.UnableToGetFieldsError.DialogMessage"), ke); } } /** Generate code for create table. Conversions done by database. */ private void create() { try { // Gather info... CombinationLookupMeta info = new CombinationLookupMeta(); getInfo(info); String name = transformName; // new name might not yet be linked to other transforms! TransformMeta transformMeta = new TransformMeta( BaseMessages.getString(PKG, "CombinationLookupDialog.TransformMeta.Title"), name, info); IRowMeta prev = pipelineMeta.getPrevTransformFields(variables, transformName); SqlStatement sql = info.getSqlStatements(variables, pipelineMeta, transformMeta, prev, metadataProvider); if (!sql.hasError()) { if (sql.hasSql()) { SqlEditor sqledit = new SqlEditor( shell, SWT.NONE, variables, info.getDatabaseMeta(), DbCache.getInstance(), sql.getSql()); sqledit.open(); } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION); mb.setMessage( BaseMessages.getString(PKG, "CombinationLookupDialog.NoSQLNeeds.DialogMessage")); mb.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.NoSQLNeeds.DialogTitle")); mb.open(); } } else { MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR); mb.setMessage(sql.getError()); mb.setText(BaseMessages.getString(PKG, "CombinationLookupDialog.SQLError.DialogTitle")); mb.open(); } } catch (HopException ke) { new ErrorDialog( shell, BaseMessages.getString(PKG, "CombinationLookupDialog.UnableToCreateSQL.DialogTitle"), BaseMessages.getString(PKG, "CombinationLookupDialog.UnableToCreateSQL.DialogMessage"), ke); } } private DatabaseMeta findDatabase(String name) { try { return metadataProvider.getSerializer(DatabaseMeta.class).load(name); } catch (Exception e) { new ErrorDialog(shell, "Error", "Error looking up database connection " + name, e); return null; } } }
google/cel-java
36,637
checker/src/test/java/dev/cel/checker/ExprCheckerTest.java
// Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package dev.cel.checker; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.collect.ImmutableList.toImmutableList; import static dev.cel.common.types.CelProtoTypes.format; import dev.cel.expr.CheckedExpr; import dev.cel.expr.Decl; import dev.cel.expr.Expr.CreateStruct.EntryOrBuilder; import dev.cel.expr.ExprOrBuilder; import dev.cel.expr.Reference; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.protobuf.DescriptorProtos.FileDescriptorSet; import com.google.testing.junit.testparameterinjector.TestParameterInjector; // import com.google.testing.testsize.MediumTest; import dev.cel.common.CelAbstractSyntaxTree; import dev.cel.common.CelContainer; import dev.cel.common.CelFunctionDecl; import dev.cel.common.CelMutableAst; import dev.cel.common.CelOverloadDecl; import dev.cel.common.CelProtoAbstractSyntaxTree; import dev.cel.common.CelVarDecl; import dev.cel.common.ast.CelConstant; import dev.cel.common.internal.EnvVisitable; import dev.cel.common.internal.EnvVisitor; import dev.cel.common.internal.Errors; import dev.cel.common.types.CelProtoTypes; import dev.cel.common.types.CelType; import dev.cel.common.types.ListType; import dev.cel.common.types.MapType; import dev.cel.common.types.NullableType; import dev.cel.common.types.OpaqueType; import dev.cel.common.types.OptionalType; import dev.cel.common.types.ProtoMessageTypeProvider; import dev.cel.common.types.SimpleType; import dev.cel.common.types.StructTypeReference; import dev.cel.common.types.TypeParamType; import dev.cel.common.types.TypeType; import dev.cel.expr.conformance.proto3.TestAllTypes; import dev.cel.parser.CelMacro; import dev.cel.testing.CelAdorner; import dev.cel.testing.CelBaselineTestCase; import dev.cel.testing.CelDebug; import dev.cel.testing.testdata.proto3.StandaloneGlobalEnum; import java.util.Arrays; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; /** Tests for the CEL {@link ExprChecker}. */ // @MediumTest @RunWith(TestParameterInjector.class) public class ExprCheckerTest extends CelBaselineTestCase { /** Helper to run a test for configured instance variables. */ private void runTest() throws Exception { CelAbstractSyntaxTree ast = prepareTest( Arrays.asList( StandaloneGlobalEnum.getDescriptor().getFile(), TestAllTypes.getDescriptor().getFile(), dev.cel.expr.conformance.proto2.TestAllTypes.getDescriptor().getFile())); if (ast != null) { testOutput() .println( CelDebug.toAdornedDebugString( CelProtoAbstractSyntaxTree.fromCelAst(ast).getExpr(), new CheckedExprAdorner( CelProtoAbstractSyntaxTree.fromCelAst(ast).toCheckedExpr()))); } testOutput().println(); } @SuppressWarnings("CheckReturnValue") private void runErroneousTest(CelAbstractSyntaxTree parsedAst) { checkArgument(!parsedAst.isChecked()); Errors errors = new Errors("<input>", source); Env env = Env.unconfigured(errors, TEST_OPTIONS); ExprChecker.typecheck(env, container, parsedAst, Optional.absent()); testOutput().println(errors.getAllErrorsAsString()); testOutput().println(); } // Standard // ========= @Test public void standardEnvDump() throws Exception { source = "'redundant expression so the env is constructed and can be printed'"; runTest(); testOutput().println(); testOutput().println("Standard environment:"); ((EnvVisitable) celCompiler) .accept( new EnvVisitor() { @Override public void visitDecl(String name, List<Decl> decls) { // TODO: Remove proto to native type adaptation after changing // interface for (Decl decl : decls) { if (decl.hasFunction()) { CelFunctionDecl celFunctionDecl = CelFunctionDecl.newFunctionDeclaration( decl.getName(), decl.getFunction().getOverloadsList().stream() .map(CelOverloadDecl::overloadToCelOverload) .collect(toImmutableList())); testOutput().println(formatFunctionDecl(celFunctionDecl)); } else if (decl.hasIdent()) { CelVarDecl celVarDecl = CelVarDecl.newVarDeclaration( decl.getName(), CelProtoTypes.typeToCelType(decl.getIdent().getType())); testOutput().println(formatVarDecl(celVarDecl)); } else { throw new IllegalArgumentException("Invalid declaration: " + decl); } } } @Override public void visitMacro(CelMacro macro) {} }); } // Operators // ========= @Test public void operatorsBool() throws Exception { source = "false && !true || false ? 2 : 3"; runTest(); } @Test public void operatorsInt64() throws Exception { source = "1 + 2 * 3 - 1 / 2 == 6 % 1"; runTest(); } @Test public void operatorsUInt64() throws Exception { source = "1u + 2u * 3u - 1u / 2u == 6u % 1u"; runTest(); } @Test public void operatorsDouble() throws Exception { source = "1.0 + 2.0 * 3.0 - 1.0 / 2.20202 != 66.6"; runTest(); } @Test public void operatorsString() throws Exception { source = "\"abc\" + \"def\""; runTest(); } @Test public void operatorsBytes() throws Exception { source = "b\"abc\" + b\"def\""; runTest(); } @Test public void operatorsConditional() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "false ? x.single_timestamp : null"; runTest(); } // Name References // =============== @Test public void referenceTypeRelative() throws Exception { source = "proto3.TestAllTypes"; container = CelContainer.ofName("cel.expr.conformance.TestAllTypes"); runTest(); } @Test public void referenceTypeAbsolute() throws Exception { source = ".cel.expr.conformance.proto3.TestAllTypes"; runTest(); } @Test public void referenceValue() throws Exception { declareVariable( "container.x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x"; container = CelContainer.ofName("container"); runTest(); } @Test public void referenceUndefinedError() throws Exception { source = "1 + x"; runTest(); } // Messages // ======== @Test public void anyMessage() throws Exception { declareVariable("x", SimpleType.ANY); declareVariable("y", NullableType.create(SimpleType.INT)); source = "x == google.protobuf.Any{" + "type_url:'types.googleapis.com/cel.expr.conformance.proto3.TestAllTypes'}" + " && x.single_nested_message.bb == 43 || x ==" + " cel.expr.conformance.proto3.TestAllTypes{} || y < x|| x >= x"; runTest(); } @Test public void messageFieldSelect() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_nested_message.bb == 43 && has(x.single_nested_message) && has(x.single_int32)" + " && has(x.repeated_int32) && has(x.map_int64_nested_type)"; runTest(); } @Test public void containers() throws Exception { container = CelContainer.newBuilder() .setName("dev.cel.testing.testdata.proto3.StandaloneGlobalEnum") .addAlias("p3_alias", "cel.expr.conformance.proto3") .addAlias("foo_bar_alias", "foo.bar") .addAlias("foo_bar_baz_alias", "foo.bar.baz") .addAbbreviations("cel.expr.conformance.proto2", "cel.expr.conformance.proto3") .build(); source = "p3_alias.TestAllTypes{}"; runTest(); source = "proto2.TestAllTypes{}"; runTest(); source = "proto3.TestAllTypes{}"; runTest(); source = "SGAR"; // From StandaloneGlobalEnum runTest(); declareVariable("foo.bar", SimpleType.STRING); declareFunction( "baz", memberOverload( "foo_bar_baz_overload", ImmutableList.of(SimpleType.STRING), SimpleType.DYN)); // Member call of "baz()" on "foo.bar" identifier source = "foo_bar_alias.baz()"; runTest(); declareFunction( "foo.bar.baz.qux", globalOverload("foo_bar_baz_qux_overload", ImmutableList.of(), SimpleType.DYN)); // Global call of "foo.bar.baz.qux" as a fully qualified name source = "foo_bar_baz_alias.qux()"; runTest(); } @Test public void messageCreationError() throws Exception { declareVariable("x", SimpleType.INT); source = "x{foo: 1}"; runTest(); declareVariable("y", TypeType.create(SimpleType.INT)); source = "y{foo: 1}"; runTest(); declareVariable("z", TypeType.create(StructTypeReference.create("msg_without_descriptor"))); source = "z{foo: 1}"; runTest(); } @Test public void messageFieldSelectError() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_nested_message.undefined == x.undefined"; runTest(); } // Lists // ===== @Test public void listOperators() throws Exception { declareVariable( "x", ListType.create(StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"))); source = "(x + x)[1].single_int32 == size(x)"; runTest(); source = "x.size() == size(x)"; runTest(); } @Test public void listRepeatedOperators() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.repeated_int64[x.single_int32] == 23"; runTest(); } @Test public void listIndexTypeError() throws Exception { declareVariable( "x", ListType.create(StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"))); source = "x[1u]"; runTest(); } @Test public void identError() throws Exception { source = "undeclared_ident"; runTest(); } @Test public void listElemTypeError() throws Exception { declareVariable( "x", ListType.create(StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"))); declareVariable("y", ListType.create(SimpleType.INT)); source = "x + y"; runTest(); } // Maps // ==== @Test public void mapOperators() throws Exception { declareVariable( "x", MapType.create( SimpleType.STRING, StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"))); source = "x[\"a\"].single_int32 == 23"; runTest(); source = "x.size() == size(x)"; runTest(); } @Test public void mapIndexTypeError() throws Exception { declareVariable( "x", MapType.create( SimpleType.STRING, StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"))); source = "x[2].single_int32 == 23"; runTest(); } @Test public void mapEmpty() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "size(x.map_int64_nested_type) == 0"; runTest(); } // Wrappers // ======== @Test public void wrapper() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_int64_wrapper + 1 != 23"; runTest(); } @Test public void equalsWrapper() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_int64_wrapper == 1 && " + "x.single_int32_wrapper != 2 && " + "x.single_double_wrapper != 2.0 && " + "x.single_float_wrapper == 1.0 && " + "x.single_uint32_wrapper == 1u && " + "x.single_uint64_wrapper != 42u"; runTest(); } // Nullable // ======== @Test public void nullableWrapper() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_int64_wrapper == null"; runTest(); } @Test public void nullableMessage() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_nested_message != null"; runTest(); container = CelContainer.ofName("cel.expr.conformance.proto3.TestAllTypesProto"); source = "null == TestAllTypes{} || TestAllTypes{} == null"; runTest(); } @Test public void nullNull() throws Exception { source = "null == null && null != null"; runTest(); } @Test public void nullablePrimitiveError() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_int64 != null"; runTest(); } // Dynamic Types // ============= @Test public void dynOperators() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_value + 1 / x.single_struct.y == 23"; runTest(); } @Test public void dynOperatorsAtRuntime() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes")); source = "x.single_value[23] + x.single_struct['y']"; runTest(); } @Test public void flexibleTypeAdaption() throws Exception { source = "size([] + [1])"; runTest(); source = "[1] + [dyn('string')]"; runTest(); source = "[dyn('string')] + [1]"; runTest(); source = "([[[1]], [[2]], [[3]]][0][0] + [2, 3, {'four': {'five': 'six'}}])[3]"; runTest(); declareVariable("a", TypeParamType.create("T")); source = "a.b + 1 == a[0]"; runTest(); CelType keyParam = TypeParamType.create("A"); CelType valParam = TypeParamType.create("B"); CelType mapType = MapType.create(keyParam, valParam); declareFunction( "merge", globalOverload("merge_maps", ImmutableList.of(mapType, mapType), mapType)); source = "merge({'hello': dyn(1)}, {'world': 2.0})"; runTest(); source = "1 in dyn([1, 2, 3])"; runTest(); } @Test public void userFunctionOverlappingOverloadsError() throws Exception { declareFunction( "func", memberOverload("overlapping_overload_1", ImmutableList.of(SimpleType.INT), SimpleType.INT), memberOverload("overlapping_overload_2", ImmutableList.of(SimpleType.INT), SimpleType.INT)); source = "func(1)"; runTest(); } // Json Types // ========== @Test public void jsonType() throws Exception { declareVariable("x", StructTypeReference.create("google.protobuf.Struct")); declareVariable("y", StructTypeReference.create("google.protobuf.ListValue")); declareVariable("z", StructTypeReference.create("google.protobuf.Value")); source = "x[\"claims\"][\"groups\"][0].name == \"dummy\" " + "&& x.claims[\"exp\"] == y[1].time " + "&& x.claims.structured == {'key': z} " + "&& z == 1.0"; runTest(); } // Call Style and User Functions // ============================= @Test public void callStyle() throws Exception { CelType param = TypeParamType.create("A"); // Note, the size() function here is added in a separate scope from the standard declaration // set, but the environment ensures that the standard and custom overloads are returned together // during function resolution time. declareFunction( "size", memberOverload("my_size", ImmutableList.of(ListType.create(param)), SimpleType.INT)); declareVariable("x", ListType.create(SimpleType.INT)); source = "size(x) == x.size()"; runTest(); } @Test public void userFunction() throws Exception { declareFunction( "myfun", memberOverload( "myfun_instance", ImmutableList.of(SimpleType.INT, SimpleType.BOOL, SimpleType.UINT), SimpleType.INT), globalOverload( "myfun_static", ImmutableList.of(SimpleType.INT, SimpleType.BOOL, SimpleType.UINT), SimpleType.INT)); source = "myfun(1, true, 3u) + 1.myfun(false, 3u).myfun(true, 42u)"; runTest(); } @Test public void namespacedFunctions() throws Exception { declareFunction( "ns.func", globalOverload("ns_func_overload", ImmutableList.of(SimpleType.STRING), SimpleType.INT)); source = "ns.func('hello')"; runTest(); declareFunction( "member", memberOverload( "ns_member_overload", ImmutableList.of(SimpleType.INT, SimpleType.INT), SimpleType.INT)); source = "ns.func('hello').member(ns.func('test'))"; runTest(); source = "{ns.func('test'): 2}"; runTest(); source = "{2: ns.func('test')}"; runTest(); source = "[ns.func('test'), 2]"; runTest(); source = "[ns.func('test')].map(x, x * 2)"; runTest(); source = "[1, 2].map(x, x * ns.func('test'))"; runTest(); container = CelContainer.ofName("ns"); source = "func('hello')"; runTest(); source = "func('hello').member(func('test'))"; runTest(); } @Test public void namespacedVariables() throws Exception { container = CelContainer.ofName("ns"); declareVariable("ns.x", SimpleType.INT); source = "x"; runTest(); container = CelContainer.ofName("cel.expr.conformance.proto3"); CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("cel.expr.conformance.proto3.msgVar", messageType); source = "msgVar.single_int32"; runTest(); } @Test public void userFunctionMultipleOverloadsWithSanitization() throws Exception { CelType structType = StructTypeReference.create("google.protobuf.Struct"); declareVariable("s", structType); declareFunction( "myfun", globalOverload("myfun_int", ImmutableList.of(SimpleType.INT), SimpleType.INT), globalOverload("myfun_struct", ImmutableList.of(structType), SimpleType.INT)); source = "myfun(1) + myfun(s)"; runTest(); } @Test public void userFunctionOverlaps() throws Exception { CelType param = TypeParamType.create("TEST"); // Note, the size() function here shadows the definition of the size() function in the standard // declaration set. The type param name is chosen as 'TEST' to make sure not to conflict with // the standard environment type param name for the same overload signature. declareFunction( "size", globalOverload("my_size", ImmutableList.of(ListType.create(param)), SimpleType.UINT)); declareVariable("x", ListType.create(SimpleType.INT)); source = "size(x) == 1u"; runTest(); } @Test public void userFunctionAddsOverload() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); declareFunction( "size", globalOverload("size_message", ImmutableList.of(messageType), SimpleType.INT)); source = "size(x) > 4"; runTest(); } @Test public void userFunctionAddsMacroError() throws Exception { declareFunction( "has", globalOverload("has_id", ImmutableList.of(SimpleType.DYN), SimpleType.DYN)); source = "false"; runTest(); } // Proto2 // ====== @Test public void proto2PrimitiveField() throws Exception { declareVariable("x", StructTypeReference.create("cel.expr.conformance.proto2.TestAllTypes")); source = "x.single_fixed32 != 0u && x.single_fixed64 > 1u && x.single_int32 != null"; runTest(); source = "x.nestedgroup.single_name == ''"; runTest(); } // Aggregates // ========== @Test public void aggregateMessage() throws Exception { container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes{single_int32: 1, single_int64: 2}"; runTest(); } @Test public void aggregateMessageFieldUndefinedError() throws Exception { container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes{single_int32: 1, undefined: 2}"; runTest(); } @Test public void aggregateMessageFieldTypeError() throws Exception { container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes{single_int32: 1u}"; runTest(); } @Test public void aggregateList() throws Exception { source = "[] + [1,2,3,] + [4]"; runTest(); } @Test public void aggregateListDyn() throws Exception { source = "[1, 2u]"; expectedType = ListType.create(SimpleType.DYN); runTest(); } @Test public void aggregateMap() throws Exception { source = "{1:2u, 2:3u}"; runTest(); } @Test public void aggregateMapDyn() throws Exception { source = "{1:2u, 2u:3}"; expectedType = MapType.create(SimpleType.DYN, SimpleType.DYN); runTest(); } @Test public void aggregateMapDynValue() throws Exception { source = "{1:2u, 2:3}"; expectedType = MapType.create(SimpleType.INT, SimpleType.DYN); runTest(); } @Test public void aggregateMapDynKey() throws Exception { source = "{1:2, 2u:3}"; expectedType = MapType.create(SimpleType.DYN, SimpleType.INT); runTest(); } @Test public void aggregateMapFieldSelection() throws Exception { source = "{\"a\":1, \"b\":2}.a"; runTest(); } // Expected and Unexpected Types // ============================= @Test public void expectedAggregateList() throws Exception { source = "[] + [1,2,3,] + [4]"; expectedType = ListType.create(SimpleType.INT); runTest(); } @Test public void unexpectedAggregateMapError() throws Exception { source = "{1:2u, 2:3u}"; expectedType = MapType.create(SimpleType.INT, SimpleType.BOOL); runTest(); } // Type Denotations // ================ @Test public void types() throws Exception { source = "list == type([1]) && map == type({1:2u})"; runTest(); source = "{}.map(c,[c,type(c)])"; runTest(); } // Enum Values // =========== @Test public void enumValues() throws Exception { container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes.NestedEnum.BAR != 99"; runTest(); } @Test public void nestedEnums() throws Exception { declareVariable("x", StructTypeReference.create(TestAllTypes.getDescriptor().getFullName())); container = CelContainer.ofName(TestAllTypes.getDescriptor().getFile().getPackage()); source = "x.single_nested_enum == TestAllTypes.NestedEnum.BAR"; runTest(); declareVariable("single_nested_enum", SimpleType.INT); source = "single_nested_enum == TestAllTypes.NestedEnum.BAR"; runTest(); source = "TestAllTypes{single_nested_enum : TestAllTypes.NestedEnum.BAR}.single_nested_enum == 1"; runTest(); } @Test public void globalEnumValues() throws Exception { container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "GlobalEnum.GAZ == 2"; runTest(); } // Global Enum Values in separate file. // =========== @Test public void globalStandaloneEnumValues() throws Exception { container = CelContainer.ofName("dev.cel.testing.testdata.proto3"); source = "StandaloneGlobalEnum.SGAZ == 2"; FileDescriptorSet.Builder descriptorBuilder = FileDescriptorSet.newBuilder(); descriptorBuilder.addFile(StandaloneGlobalEnum.getDescriptor().getFile().toProto()); CelAbstractSyntaxTree ast = prepareTest(descriptorBuilder.build()); if (ast != null) { testOutput() .println( CelDebug.toAdornedDebugString( CelProtoAbstractSyntaxTree.fromCelAst(ast).getExpr(), new CheckedExprAdorner( CelProtoAbstractSyntaxTree.fromCelAst(ast).toCheckedExpr()))); } } // Conversions // =========== @Test public void conversions() throws Exception { source = "int(1u) + int(uint(\"1\"))"; runTest(); } // Comprehensions // ============== @Test public void quantifiers() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.repeated_int64.all(e, e > 0) " + "&& x.repeated_int64.exists(e, e < 0) " + "&& x.repeated_int64.exists_one(e, e == 0)"; runTest(); } @Test public void twoVarComprehensions_allMacro() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.map_string_string.all(i, v, i < v) " + "&& x.repeated_int64.all(i, v, i < v) " + "&& [1, 2, 3, 4].all(i, v, i < 5 && v > 0) " + "&& {'a': 1, 'b': 2}.all(k, v, k.startsWith('a') && v == 1)"; runTest(); } @Test public void twoVarComprehensions_existsMacro() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.map_string_string.exists(i, v, i < v) " + "&& x.repeated_int64.exists(i, v, i < v) " + "&& [1, 2, 3, 4].exists(i, v, i < 5 && v > 0) " + "&& {'a': 1, 'b': 2}.exists(k, v, k.startsWith('a') && v == 1)"; runTest(); } @Test public void twoVarComprehensions_existsOneMacro() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.map_string_string.exists_one(i, v, i < v) " + "&& x.repeated_int64.exists_one(i, v, i < v) " + "&& [1, 2, 3, 4].exists_one(i, v, i < 5 && v > 0) " + "&& {'a': 1, 'b': 2}.exists_one(k, v, k.startsWith('a') && v == 1)"; runTest(); } @Test public void twoVarComprehensions_transformListMacro() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "[1, 2, 3].transformList(i, v, i > 0 && v < 3, (i * v) + v) == [4] " + "&& [1, 2, 3].transformList(i, v, i % 2 == 0, (i * v) + v) == [1,9] " + "&& [1, 2, 3].transformList(i, v, (i * v) + v) == [1,4,9]"; runTest(); } @Test public void twoVarComprehensions_incorrectIterVars() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.map_string_string.all(i + 1, v, i < v) && x.repeated_int64.all(i, v + 1, i < v)"; runTest(); } @Test public void twoVarComprehensions_duplicateIterVars() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.repeated_int64.exists(i, i, i < v)"; runTest(); } @Test public void twoVarComprehensions_incorrectNumberOfArgs() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "[1, 2, 3, 4].exists_one(i, v, i < v, v)" + "&& x.map_string_string.transformList(i, i < v) " + "&& [1, 2, 3].transformList(i, v, i > 0 && x < 3, (i * v) + v) == [4]"; runTest(); } @Test public void quantifiersErrors() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.all(e, 0)"; runTest(); } @Test public void mapExpr() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.repeated_int64.map(x, double(x))"; runTest(); source = "[].map(x, [].map(y, x in y && y in x))"; runTest(); source = "[{}.map(c,c,c)]+[{}.map(c,c,c)]"; runTest(); } @Test public void mapFilterExpr() throws Exception { CelType messageType = StructTypeReference.create("cel.expr.conformance.proto3.TestAllTypes"); declareVariable("x", messageType); source = "x.repeated_int64.map(x, x > 0, double(x))"; runTest(); declareVariable("lists", SimpleType.DYN); source = "lists.filter(x, x > 1.5)"; runTest(); declareVariable("args", MapType.create(SimpleType.STRING, SimpleType.DYN)); source = "args.user[\"myextension\"].customAttributes.filter(x, x.name == \"hobbies\")"; runTest(); } // Abstract Types // ============== @Test public void abstractTypeParameterLess() throws Exception { CelType abstractType = OpaqueType.create("abs"); // Declare the identifier 'abs' to bind to the abstract type. declareVariable("abs", TypeType.create(abstractType)); // Declare a function to create a new value of abstract type. declareFunction("make_abs", globalOverload("make_abs", ImmutableList.of(), abstractType)); // Declare a function to consume value of abstract type. declareFunction( "as_bool", memberOverload("as_bool", ImmutableList.of(abstractType), SimpleType.BOOL)); source = "type(make_abs()) == abs && make_abs().as_bool()"; runTest(); } @Test public void abstractTypeParameterized() throws Exception { CelType typeParam = TypeParamType.create("T"); CelType abstractType = OpaqueType.create("vector", typeParam); TypeType typeOfTypeParam = TypeType.create(typeParam); TypeType typeOfAbstractType = TypeType.create(abstractType); declareFunction( "vector", // Declare the function 'vector' to create the abstract type. globalOverload("vector_type", ImmutableList.of(typeOfTypeParam), typeOfAbstractType), // Declare a function to create a new value of abstract type based on a list. globalOverload("vector_list", ImmutableList.of(ListType.create(typeParam)), abstractType)); // Declare a function to consume value of abstract type. declareFunction( "at", memberOverload("vector_at_int", ImmutableList.of(abstractType, SimpleType.INT), typeParam)); // The parameterization of 'vector(dyn)' is erased at runtime and so is checked as a 'vector', // but no further. source = "type(vector([1])) == vector(dyn) && vector([1]).at(0) == 1"; runTest(); } @Test public void abstractTypeParameterizedInListLiteral() throws Exception { CelType typeParam = TypeParamType.create("T"); CelType abstractType = OpaqueType.create("vector", typeParam); TypeType typeOfAbstractType = TypeType.create(abstractType); TypeType typeOfTypeParam = TypeType.create(typeParam); declareFunction( "vector", // Declare the function 'vector' to create the abstract type. globalOverload("vector_type", ImmutableList.of(typeOfTypeParam), typeOfAbstractType), // Declare a function to create a new value of abstract type based on a list. globalOverload("vector_list", ImmutableList.of(ListType.create(typeParam)), abstractType)); source = "size([vector([1, 2]), vector([2u, -1])]) == 2"; runTest(); } @Test public void abstractTypeParameterizedError() throws Exception { CelType typeParam = TypeParamType.create("T"); CelType abstractType = OpaqueType.create("vector", typeParam); TypeType typeOfAbstractType = TypeType.create(abstractType); TypeType typeOfTypeParam = TypeType.create(typeParam); declareFunction( "vector", // Declare the function 'vector' to create the abstract type. globalOverload("vector_type", ImmutableList.of(typeOfTypeParam), typeOfAbstractType), // Declare a function to create a new value of abstract type based on a list. globalOverload("vector_list", ImmutableList.of(ListType.create(typeParam)), abstractType)); declareFunction( "add", globalOverload( "add_vector_type", ImmutableList.of(typeOfAbstractType, typeOfAbstractType), typeOfAbstractType)); source = "add(vector([1, 2]), vector([2u, -1])) == vector([1, 2, 2u, -1])"; runTest(); } // Optionals @Test public void optionals() throws Exception { declareVariable("a", MapType.create(SimpleType.STRING, SimpleType.STRING)); source = "a.?b"; runTest(); clearAllDeclarations(); declareVariable("x", OptionalType.create(MapType.create(SimpleType.STRING, SimpleType.STRING))); source = "x.y"; runTest(); source = "{?'nested': x.b}"; runTest(); clearAllDeclarations(); declareVariable("d", OptionalType.create(SimpleType.DYN)); source = "d.dynamic"; runTest(); source = "has(d.dynamic)"; runTest(); clearAllDeclarations(); declareVariable("e", OptionalType.create(MapType.create(SimpleType.STRING, SimpleType.DYN))); source = "has(e.?b.c)"; runTest(); clearAllDeclarations(); source = "{?'key': {'a': 'b'}.?value}"; runTest(); source = "{?'key': {'a': 'b'}.?value}.key"; runTest(); container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes{?single_int32: {}.?i}"; runTest(); container = CelContainer.ofName(""); declareVariable("a", OptionalType.create(SimpleType.STRING)); declareVariable("b", OptionalType.create(SimpleType.STRING)); source = "[?a, ?b, 'world']"; runTest(); source = "[?a, ?b, 2]"; runTest(); source = "{?'str':a, 2:3}"; runTest(); } @Test public void optionalErrors() throws Exception { source = "{?'key': 'hi'}"; runTest(); source = "[?'value']"; runTest(); container = CelContainer.ofName("cel.expr.conformance.proto3"); source = "TestAllTypes{?single_int32: 1}"; runTest(); source = "a.?b"; declareVariable("a", MapType.create(SimpleType.STRING, SimpleType.STRING)); prepareCompiler(new ProtoMessageTypeProvider()); CelAbstractSyntaxTree parsedAst = celCompiler.parse(source).getAst(); CelMutableAst mutableAst = CelMutableAst.fromCelAst(parsedAst); mutableAst.expr().call().args().get(1).setConstant(CelConstant.ofValue(true)); runErroneousTest(mutableAst.toParsedAst()); } private static class CheckedExprAdorner implements CelAdorner { private final CheckedExpr checkedExpr; private CheckedExprAdorner(CheckedExpr checkedExpr) { this.checkedExpr = checkedExpr; } @Override public String adorn(ExprOrBuilder expr) { return adorn(expr.getId()); } @Override public String adorn(EntryOrBuilder entry) { return adorn(entry.getId()); } private String adorn(long exprId) { String adorned = ""; if (checkedExpr.containsTypeMap(exprId)) { adorned = String.format("~%s", format(checkedExpr.getTypeMapOrThrow(exprId))); } if (checkedExpr.containsReferenceMap(exprId)) { adorned = String.format("%s^%s", adorned, print(checkedExpr.getReferenceMapOrThrow(exprId))); } return adorned; } private String print(Reference reference) { if (reference.getOverloadIdCount() > 0) { return Joiner.on("|").join(reference.getOverloadIdList()); } return reference.getName(); } } }
apache/fory
36,969
java/fory-core/src/test/java/org/apache/fory/serializer/collection/MapSerializersTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fory.serializer.collection; import static com.google.common.collect.ImmutableList.of; import static org.apache.fory.TestUtils.mapOf; import static org.apache.fory.collection.Collections.ofArrayList; import static org.apache.fory.collection.Collections.ofHashMap; import static org.testng.Assert.assertEquals; import com.google.common.collect.ImmutableMap; import java.io.Serializable; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import lombok.AllArgsConstructor; import lombok.Data; import org.apache.fory.Fory; import org.apache.fory.ForyTestBase; import org.apache.fory.collection.LazyMap; import org.apache.fory.collection.MapEntry; import org.apache.fory.config.CompatibleMode; import org.apache.fory.config.Language; import org.apache.fory.reflect.TypeRef; import org.apache.fory.serializer.Serializer; import org.apache.fory.serializer.collection.CollectionSerializersTest.TestEnum; import org.apache.fory.test.bean.BeanB; import org.apache.fory.test.bean.Cyclic; import org.apache.fory.test.bean.MapFields; import org.apache.fory.type.GenericType; import org.testng.Assert; import org.testng.annotations.Test; public class MapSerializersTest extends ForyTestBase { @Test(dataProvider = "basicMultiConfigFory") public void basicTestCaseWithMultiConfig( boolean trackingRef, boolean codeGen, boolean scopedMetaShare, CompatibleMode compatibleMode) { Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(trackingRef) .requireClassRegistration(false) .withCodegen(codeGen) .withCompatibleMode(compatibleMode) .withScopedMetaShare(scopedMetaShare) .build(); // testBasicMap Map<String, Integer> data = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); serDeCheckSerializer(fory, data, "HashMap"); serDeCheckSerializer(fory, new LinkedHashMap<>(data), "LinkedHashMap"); // testBasicMapNested Map<String, Integer> data0 = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); Map<String, Map<String, Integer>> nestedMap = ofHashMap("k1", data0, "k2", data0); serDeCheckSerializer(fory, nestedMap, "HashMap"); serDeCheckSerializer(fory, new LinkedHashMap<>(nestedMap), "LinkedHashMap"); // testMapGenerics byte[] bytes1 = fory.serialize(data); fory.getGenerics().pushGenericType(GenericType.build(new TypeRef<Map<String, Integer>>() {})); byte[] bytes2 = fory.serialize(data); Assert.assertTrue(bytes1.length > bytes2.length); fory.getGenerics().popGenericType(); assertThrowsCause(RuntimeException.class, () -> fory.deserialize(bytes2)); // testSortedMap Map<String, Integer> treeMap = new TreeMap<>(ImmutableMap.of("a", 1, "b", 2)); serDeCheckSerializer(fory, treeMap, "SortedMap"); byte[] sortMapBytes1 = fory.serialize(treeMap); fory.getGenerics().pushGenericType(GenericType.build(new TypeRef<Map<String, Integer>>() {})); byte[] sortMapBytes2 = fory.serialize(treeMap); Assert.assertTrue(sortMapBytes1.length > sortMapBytes2.length); fory.getGenerics().popGenericType(); assertThrowsCause(RuntimeException.class, () -> fory.deserialize(sortMapBytes2)); // testTreeMap TreeMap<String, String> map = new TreeMap<>( (Comparator<? super String> & Serializable) (s1, s2) -> { int delta = s1.length() - s2.length(); if (delta == 0) { return s1.compareTo(s2); } else { return delta; } }); map.put("str1", "1"); map.put("str2", "1"); assertEquals(map, serDe(fory, map)); BeanForMap beanForMap = new BeanForMap(); assertEquals(beanForMap, serDe(fory, beanForMap)); // testEmptyMap serDeCheckSerializer(fory, Collections.EMPTY_MAP, "EmptyMapSerializer"); serDeCheckSerializer(fory, Collections.emptySortedMap(), "EmptySortedMap"); // testSingletonMap serDeCheckSerializer(fory, Collections.singletonMap("k", 1), "SingletonMap"); // testConcurrentMap serDeCheckSerializer(fory, new ConcurrentHashMap<>(data), "ConcurrentHashMap"); serDeCheckSerializer(fory, new ConcurrentSkipListMap<>(data), "ConcurrentSkipListMap"); // testEnumMap EnumMap<TestEnum, Object> enumMap = new EnumMap<>(TestEnum.class); enumMap.put(TestEnum.A, 1); enumMap.put(TestEnum.B, "str"); serDe(fory, enumMap); Assert.assertEquals( fory.getClassResolver().getSerializerClass(enumMap.getClass()), MapSerializers.EnumMapSerializer.class); // testNoArgConstructor Map<String, Integer> map1 = newInnerMap(); Assert.assertEquals(jdkDeserialize(jdkSerialize(map1)), map1); serDeCheck(fory, map1); // testMapFieldSerializers MapFields obj = createMapFieldsObject(); Assert.assertEquals(serDe(fory, obj), obj); // testBigMapFieldSerializers final MapFields mapFieldsObject = createBigMapFieldsObject(); serDeCheck(fory, mapFieldsObject); // testObjectKeyValueChunk final Map<Object, Object> differentKeyAndValueTypeMap = createDifferentKeyAndValueTypeMap(); final Serializer<? extends Map> serializer = fory.getSerializer(differentKeyAndValueTypeMap.getClass()); MapSerializers.HashMapSerializer mapSerializer = (MapSerializers.HashMapSerializer) serializer; serDeCheck(fory, differentKeyAndValueTypeMap); // testObjectKeyValueBigChunk for (int i = 0; i < 3000; i++) { differentKeyAndValueTypeMap.put("k" + i, i); } serDeCheck(fory, differentKeyAndValueTypeMap); // testMapChunkRefTracking Map<String, Integer> map2 = new HashMap<>(); for (int i = 0; i < 1; i++) { map2.put("k" + i, i); } Object v = ofArrayList(map2, ofHashMap("k1", map2, "k2", new HashMap<>(map2), "k3", map2)); serDeCheck(fory, v); // testMapChunkRefTrackingGenerics MapFields obj1 = new MapFields(); Map<String, Integer> map3 = new HashMap<>(); for (int i = 0; i < 1; i++) { map3.put("k" + i, i); } obj.map = map3; obj.mapKeyFinal = ofHashMap("k1", map3); serDeCheck(fory, obj1); } @Test(dataProvider = "referenceTrackingConfig") public void testBasicMap(boolean referenceTrackingConfig) { Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTrackingConfig) .requireClassRegistration(false) .build(); Map<String, Integer> data = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); serDeCheckSerializer(fory, data, "HashMap"); serDeCheckSerializer(fory, new LinkedHashMap<>(data), "LinkedHashMap"); } @Test(dataProvider = "foryCopyConfig") public void testBasicMap(Fory fory) { Map<String, Object> data = new HashMap<>(ImmutableMap.of("a", 1, "b", 2, "c", Cyclic.create(true))); copyCheck(fory, data); copyCheck(fory, new LinkedHashMap<>(data)); copyCheck(fory, new LazyMap<>(new ArrayList<>(data.entrySet()))); Map<Object, Object> cycMap = new HashMap<>(); cycMap.put(cycMap, cycMap); Map<Object, Object> copy = fory.copy(cycMap); copy.forEach( (k, v) -> { Assert.assertSame(k, copy); Assert.assertSame(v, copy); Assert.assertSame(k, v); }); } @Test(dataProvider = "referenceTrackingConfig") public void testBasicMapNested(boolean referenceTrackingConfig) { Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTrackingConfig) .requireClassRegistration(false) .build(); Map<String, Integer> data0 = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); Map<String, Map<String, Integer>> data = ofHashMap("k1", data0, "k2", data0); serDeCheckSerializer(fory, data, "HashMap"); serDeCheckSerializer(fory, new LinkedHashMap<>(data), "LinkedHashMap"); } @Test(dataProvider = "foryCopyConfig") public void testBasicMapNested(Fory fory) { Map<String, Integer> data0 = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); Map<String, Map<String, Integer>> data = ofHashMap("k1", data0, "k2", data0); copyCheck(fory, data); copyCheck(fory, new LinkedHashMap<>(data)); } @Test(dataProvider = "referenceTrackingConfig") public void testMapGenerics(boolean referenceTrackingConfig) { Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTrackingConfig) .requireClassRegistration(false) .build(); Map<String, Integer> data = new HashMap<>(ImmutableMap.of("a", 1, "b", 2)); byte[] bytes1 = fory.serialize(data); fory.getGenerics().pushGenericType(GenericType.build(new TypeRef<Map<String, Integer>>() {})); byte[] bytes2 = fory.serialize(data); Assert.assertTrue(bytes1.length > bytes2.length); fory.getGenerics().popGenericType(); assertThrowsCause(RuntimeException.class, () -> fory.deserialize(bytes2)); } @Test(dataProvider = "referenceTrackingConfig") public void testSortedMap(boolean referenceTrackingConfig) { Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTrackingConfig) .requireClassRegistration(false) .build(); Map<String, Integer> data = new TreeMap<>(ImmutableMap.of("a", 1, "b", 2)); serDeCheckSerializer(fory, data, "SortedMap"); byte[] bytes1 = fory.serialize(data); fory.getGenerics().pushGenericType(GenericType.build(new TypeRef<Map<String, Integer>>() {})); byte[] bytes2 = fory.serialize(data); Assert.assertTrue(bytes1.length > bytes2.length); fory.getGenerics().popGenericType(); assertThrowsCause(RuntimeException.class, () -> fory.deserialize(bytes2)); } @Test(dataProvider = "foryCopyConfig") public void testSortedMap(Fory fory) { Map<String, Integer> data = new TreeMap<>(ImmutableMap.of("a", 1, "b", 2)); copyCheck(fory, data); } @Data public static class BeanForMap { public Map<String, String> map = new TreeMap<>(); { map.put("k1", "v1"); map.put("k2", "v2"); } } @Test public void testTreeMap() { boolean referenceTracking = true; Fory fory = builder() .withLanguage(Language.JAVA) .withRefTracking(referenceTracking) .requireClassRegistration(false) .build(); TreeMap<String, String> map = new TreeMap<>( (Comparator<? super String> & Serializable) (s1, s2) -> { int delta = s1.length() - s2.length(); if (delta == 0) { return s1.compareTo(s2); } else { return delta; } }); map.put("str1", "1"); map.put("str2", "1"); assertEquals(map, serDe(fory, map)); BeanForMap beanForMap = new BeanForMap(); assertEquals(beanForMap, serDe(fory, beanForMap)); } @Test(dataProvider = "foryCopyConfig") public void testTreeMap(Fory fory) { TreeMap<String, String> map = new TreeMap<>( (Comparator<? super String> & Serializable) (s1, s2) -> { int delta = s1.length() - s2.length(); if (delta == 0) { return s1.compareTo(s2); } else { return delta; } }); map.put("str1", "1"); map.put("str2", "1"); copyCheck(fory, map); BeanForMap beanForMap = new BeanForMap(); copyCheck(fory, beanForMap); } @Test public void testEmptyMap() { serDeCheckSerializer(getJavaFory(), Collections.EMPTY_MAP, "EmptyMapSerializer"); serDeCheckSerializer(getJavaFory(), Collections.emptySortedMap(), "EmptySortedMap"); } @Test(dataProvider = "foryCopyConfig") public void testEmptyMap(Fory fory) { copyCheckWithoutSame(fory, Collections.EMPTY_MAP); copyCheckWithoutSame(fory, Collections.emptySortedMap()); } @Test public void testSingleMap() { serDeCheckSerializer(getJavaFory(), Collections.singletonMap("k", 1), "SingletonMap"); } @Test(dataProvider = "foryCopyConfig") public void testSingleMap(Fory fory) { copyCheck(fory, Collections.singletonMap("k", 1)); } @Test public void testConcurrentMap() { Map<String, Integer> data = new TreeMap<>(ImmutableMap.of("a", 1, "b", 2)); serDeCheckSerializer(getJavaFory(), new ConcurrentHashMap<>(data), "ConcurrentHashMap"); serDeCheckSerializer(getJavaFory(), new ConcurrentSkipListMap<>(data), "ConcurrentSkipListMap"); } @Test(dataProvider = "foryCopyConfig") public void testConcurrentMap(Fory fory) { Map<String, Integer> data = new TreeMap<>(ImmutableMap.of("a", 1, "b", 2)); copyCheck(fory, new ConcurrentHashMap<>(data)); copyCheck(fory, new ConcurrentSkipListMap<>(data)); } @Test public void testEnumMap() { EnumMap<TestEnum, Object> enumMap = new EnumMap<>(TestEnum.class); enumMap.put(TestEnum.A, 1); enumMap.put(TestEnum.B, "str"); serDe(getJavaFory(), enumMap); Assert.assertEquals( getJavaFory().getClassResolver().getSerializerClass(enumMap.getClass()), MapSerializers.EnumMapSerializer.class); } @Test(dataProvider = "foryCopyConfig") public void testEnumMap(Fory fory) { EnumMap<TestEnum, Object> enumMap = new EnumMap<>(TestEnum.class); enumMap.put(TestEnum.A, 1); enumMap.put(TestEnum.B, "str"); copyCheck(fory, enumMap); Assert.assertEquals( getJavaFory().getClassResolver().getSerializerClass(enumMap.getClass()), MapSerializers.EnumMapSerializer.class); } private static Map<String, Integer> newInnerMap() { return new HashMap<String, Integer>() { { put("k1", 1); put("k2", 2); } }; } @Test public void testNoArgConstructor() { Fory fory = builder().withLanguage(Language.JAVA).requireClassRegistration(false).build(); Map<String, Integer> map = newInnerMap(); Assert.assertEquals(jdkDeserialize(jdkSerialize(map)), map); serDeCheck(fory, map); } @Test(dataProvider = "foryCopyConfig") public void testNoArgConstructor(Fory fory) { Map<String, Integer> map = newInnerMap(); copyCheck(fory, map); } @Test public void testMapNoJIT() { Fory fory = builder().withLanguage(Language.JAVA).withCodegen(false).build(); serDeCheck(fory, new HashMap<>(ImmutableMap.of("a", 1, "b", 2))); serDeCheck(fory, new HashMap<>(ImmutableMap.of("a", "v1", "b", "v2"))); serDeCheck(fory, new HashMap<>(ImmutableMap.of(1, 2, 3, 4))); } @Test(dataProvider = "javaFory") public void testMapFieldSerializers(Fory fory) { MapFields obj = createMapFieldsObject(); Assert.assertEquals(serDe(fory, obj), obj); } @Test(dataProvider = "foryCopyConfig") public void testMapFieldSerializersCopy(Fory fory) { MapFields obj = createMapFieldsObject(); copyCheck(fory, obj); } @Test(dataProvider = "javaForyKVCompatible") public void testMapFieldsKVCompatible(Fory fory) { MapFields obj = createMapFieldsObject(); Assert.assertEquals(serDe(fory, obj), obj); } @Test(dataProvider = "foryCopyConfig") public void testMapFieldsKVCompatibleCopy(Fory fory) { MapFields obj = createMapFieldsObject(); copyCheck(fory, obj); } public static MapFields createBigMapFieldsObject() { Map<String, Integer> map = new HashMap<>(); for (int i = 0; i < 1000; i++) { map.put("k" + i, i); } return createMapFieldsObject(map); } public static MapFields createMapFieldsObject() { return createMapFieldsObject(ImmutableMap.of("k1", 1, "k2", 2)); } public static MapFields createMapFieldsObject(Map<String, Integer> map) { MapFields obj = new MapFields(); obj.map = map; obj.map2 = new HashMap<>(map); obj.map3 = new HashMap<>(map); obj.mapKeyFinal = new HashMap<>(ImmutableMap.of("k1", map, "k2", new HashMap<>(map))); obj.mapValueFinal = new HashMap<>(map); obj.linkedHashMap = new LinkedHashMap<>(map); obj.linkedHashMap2 = new LinkedHashMap<>(map); obj.linkedHashMap3 = new LinkedHashMap<>(map); obj.sortedMap = new TreeMap<>(map); obj.sortedMap2 = new TreeMap<>(map); obj.sortedMap3 = new TreeMap<>(map); obj.concurrentHashMap = new ConcurrentHashMap<>(map); obj.concurrentHashMap2 = new ConcurrentHashMap<>(map); obj.concurrentHashMap3 = new ConcurrentHashMap<>(map); obj.skipListMap = new ConcurrentSkipListMap<>(map); obj.skipListMap2 = new ConcurrentSkipListMap<>(map); obj.skipListMap3 = new ConcurrentSkipListMap<>(map); EnumMap<TestEnum, Object> enumMap = new EnumMap<>(TestEnum.class); enumMap.put(TestEnum.A, 1); enumMap.put(TestEnum.B, "str"); obj.enumMap = enumMap; obj.enumMap2 = enumMap; obj.emptyMap = Collections.emptyMap(); obj.sortedEmptyMap = Collections.emptySortedMap(); obj.singletonMap = Collections.singletonMap("k", "v"); return obj; } public static class TestClass1ForDefaultMap extends AbstractMap<String, Object> { private final Set<MapEntry> data = new HashSet<>(); @Override public Set<Entry<String, Object>> entrySet() { Set data = this.data; return data; } @Override public Object put(String key, Object value) { return data.add(new MapEntry<>(key, value)); } } public static class TestClass2ForDefaultMap extends AbstractMap<String, Object> { private final Set<Entry<String, Object>> data = new HashSet<>(); @Override public Set<Entry<String, Object>> entrySet() { Set data = this.data; return data; } @Override public Object put(String key, Object value) { return data.add(new MapEntry<>(key, value)); } } @Test(dataProvider = "enableCodegen") public void testDefaultMapSerializer(boolean enableCodegen) { Fory fory = builder() .withLanguage(Language.JAVA) .withCodegen(enableCodegen) .requireClassRegistration(false) .build(); TestClass1ForDefaultMap map = new TestClass1ForDefaultMap(); map.put("a", 1); map.put("b", 2); Assert.assertSame( fory.getClassResolver().getSerializerClass(TestClass1ForDefaultMap.class), MapSerializers.DefaultJavaMapSerializer.class); serDeCheck(fory, map); TestClass2ForDefaultMap map2 = new TestClass2ForDefaultMap(); map.put("a", 1); map.put("b", 2); Assert.assertSame( fory.getClassResolver().getSerializerClass(TestClass2ForDefaultMap.class), MapSerializers.DefaultJavaMapSerializer.class); serDeCheck(fory, map2); } @Test(dataProvider = "foryCopyConfig") public void testDefaultMapSerializer(Fory fory) { TestClass1ForDefaultMap map = new TestClass1ForDefaultMap(); map.put("a", 1); map.put("b", 2); Assert.assertSame( fory.getClassResolver().getSerializerClass(TestClass1ForDefaultMap.class), MapSerializers.DefaultJavaMapSerializer.class); copyCheck(fory, map); TestClass2ForDefaultMap map2 = new TestClass2ForDefaultMap(); map.put("a", 1); map.put("b", 2); Assert.assertSame( fory.getClassResolver().getSerializerClass(TestClass2ForDefaultMap.class), MapSerializers.DefaultJavaMapSerializer.class); copyCheck(fory, map2); } @Data @AllArgsConstructor public static class GenericMapBoundTest { // test k/v generics public Map<Map<Integer, Collection<Integer>>, ? extends Collection<Integer>> map1; // test k/v generics bounds public Map<? extends Map<Integer, ? extends Collection<Integer>>, ? extends Collection<Integer>> map2; } @Test public void testGenericMapBound() { Fory fory1 = builder() .withLanguage(Language.JAVA) .requireClassRegistration(false) .withCodegen(false) .build(); Fory fory2 = builder() .withLanguage(Language.JAVA) .requireClassRegistration(false) .withCodegen(false) .build(); ArrayList<Integer> list = new ArrayList<>(of(1, 2)); roundCheck( fory1, fory2, new GenericMapBoundTest( new HashMap<>(mapOf(new HashMap<>(mapOf(1, list)), list)), new HashMap<>(mapOf(new HashMap<>(mapOf(1, list)), list)))); } public static class StringKeyMap<T> extends HashMap<String, T> {} @Test public void testStringKeyMapSerializer() { // see https://github.com/apache/fory/issues/1170 Fory fory = Fory.builder().withRefTracking(true).build(); fory.registerSerializer(StringKeyMap.class, MapSerializers.StringKeyMapSerializer.class); { StringKeyMap<List<String>> map = new StringKeyMap<>(); map.put("k1", ofArrayList("a", "b")); serDeCheck(fory, map); } { // test nested map StringKeyMap<StringKeyMap<String>> map = new StringKeyMap<>(); StringKeyMap<String> map2 = new StringKeyMap<>(); map2.put("k-k1", "v1"); map2.put("k-k2", "v2"); map.put("k1", map2); serDeCheck(fory, map); } } @Test(dataProvider = "foryCopyConfig") public void testStringKeyMapSerializer(Fory fory) { fory.registerSerializer(StringKeyMap.class, MapSerializers.StringKeyMapSerializer.class); { StringKeyMap<List<String>> map = new StringKeyMap<>(); map.put("k1", ofArrayList("a", "b")); copyCheck(fory, map); } { // test nested map StringKeyMap<StringKeyMap<String>> map = new StringKeyMap<>(); StringKeyMap<String> map2 = new StringKeyMap<>(); map2.put("k-k1", "v1"); map2.put("k-k2", "v2"); map.put("k1", map2); copyCheck(fory, map); } } // must be final class to test nested map value by private MapSerializer private static final class PrivateMap<K, V> implements Map<K, V> { private Set<Entry<K, V>> set = new HashSet<>(); @Override public int size() { return set.size(); } @Override public boolean isEmpty() { return set.isEmpty(); } @Override public boolean containsKey(Object key) { return set.stream().anyMatch(e -> e.getKey().equals(key)); } @Override public boolean containsValue(Object value) { return set.stream().anyMatch(e -> e.getValue().equals(value)); } @Override public V get(Object key) { for (Entry<K, V> kvEntry : set) { if (kvEntry.getKey().equals(key)) { return kvEntry.getValue(); } } return null; } @Override public V put(K key, V value) { set.add(new MapEntry<>(key, value)); return null; } @Override public V remove(Object key) { throw new UnsupportedOperationException(); } @Override public void putAll(Map<? extends K, ? extends V> m) { throw new UnsupportedOperationException(); } @Override public void clear() { set.clear(); } @Override public Set<K> keySet() { throw new UnsupportedOperationException(); } @Override public Collection<V> values() { throw new UnsupportedOperationException(); } @Override public Set<Entry<K, V>> entrySet() { return set; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PrivateMap<?, ?> that = (PrivateMap<?, ?>) o; return Objects.equals(set, that.set); } @Override public int hashCode() { return Objects.hash(set); } } @Data @AllArgsConstructor public static class LazyMapCollectionFieldStruct { List<PrivateMap<String, Integer>> mapList; PrivateMap<String, Integer> map; } @Test public void testNestedValueByPrivateMapSerializer() { Fory fory = builder().withRefTracking(false).build(); // test private map serializer fory.registerSerializer(PrivateMap.class, new MapSerializer(fory, PrivateMap.class) {}); PrivateMap<String, Integer> map = new PrivateMap<>(); map.put("k", 1); serDeCheck(fory, new LazyMapCollectionFieldStruct(ofArrayList(map), map)); } @Test(dataProvider = "referenceTrackingConfig") public void testObjectKeyValueChunk(boolean referenceTrackingConfig) { Fory fory = Fory.builder().withRefTracking(referenceTrackingConfig).build(); final Map<Object, Object> differentKeyAndValueTypeMap = createDifferentKeyAndValueTypeMap(); final Serializer<? extends Map> serializer = fory.getSerializer(differentKeyAndValueTypeMap.getClass()); MapSerializers.HashMapSerializer mapSerializer = (MapSerializers.HashMapSerializer) serializer; serDeCheck(fory, differentKeyAndValueTypeMap); } @Test(dataProvider = "referenceTrackingConfig") public void testObjectKeyValueBigChunk(boolean referenceTrackingConfig) { Fory fory = Fory.builder().withRefTracking(referenceTrackingConfig).build(); final Map<Object, Object> differentKeyAndValueTypeMap = createDifferentKeyAndValueTypeMap(); for (int i = 0; i < 3000; i++) { differentKeyAndValueTypeMap.put("k" + i, i); } final Serializer<? extends Map> serializer = fory.getSerializer(differentKeyAndValueTypeMap.getClass()); MapSerializers.HashMapSerializer mapSerializer = (MapSerializers.HashMapSerializer) serializer; serDeCheck(fory, differentKeyAndValueTypeMap); } @Test public void testMapChunkRefTracking() { Fory fory = builder().withRefTracking(true).withCodegen(false).requireClassRegistration(false).build(); Map<String, Integer> map = new HashMap<>(); for (int i = 0; i < 1; i++) { map.put("k" + i, i); } Object v = ofArrayList(map, ofHashMap("k1", map, "k2", new HashMap<>(map), "k3", map)); serDeCheck(fory, v); } @Test public void testMapChunkRefTrackingGenerics() { Fory fory = builder().withRefTracking(true).withCodegen(false).requireClassRegistration(false).build(); MapFields obj = new MapFields(); Map<String, Integer> map = new HashMap<>(); for (int i = 0; i < 1; i++) { map.put("k" + i, i); } obj.map = map; obj.mapKeyFinal = ofHashMap("k1", map); serDeCheck(fory, obj); } @Test(dataProvider = "referenceTrackingConfig") public void testMapFieldsChunkSerializer(boolean referenceTrackingConfig) { Fory fory = builder() .withRefTracking(referenceTrackingConfig) .withCodegen(false) .requireClassRegistration(false) .build(); final MapFields mapFieldsObject = createBigMapFieldsObject(); serDeCheck(fory, mapFieldsObject); } private static Map<Object, Object> createDifferentKeyAndValueTypeMap() { Map<Object, Object> map = new HashMap<>(); map.put(null, "1"); map.put(2, "1"); map.put(4, "1"); map.put(6, "1"); map.put(7, "1"); map.put(10, "1"); map.put(12, "null"); map.put(19, "null"); map.put(11, null); map.put(20, null); map.put(21, 9); map.put(22, 99); map.put(291, 900); map.put("292", 900); map.put("293", 900); map.put("23", 900); return map; } @Data public static class MapFieldStruct1 { public Map<Integer, Boolean> map1; public Map<String, String> map2; } @Test(dataProvider = "referenceTrackingConfig") public void testMapFieldStructCodegen1(boolean referenceTrackingConfig) { Fory fory = builder() .withRefTracking(referenceTrackingConfig) .withCodegen(true) .requireClassRegistration(false) .build(); MapFieldStruct1 struct1 = new MapFieldStruct1(); struct1.map1 = ofHashMap(1, false, 2, true); struct1.map2 = ofHashMap("k1", "v1", "k2", "v2"); serDeCheck(fory, struct1); } @Data public static class MapFieldStruct2 { public Map<Object, Object> map1; public Map<String, Object> map2; public Map<Object, String> map3; } @Test(dataProvider = "referenceTrackingConfig") public void testMapFieldStructCodegen2(boolean referenceTrackingConfig) { Fory fory = builder() .withRefTracking(referenceTrackingConfig) .withCodegen(true) .requireClassRegistration(false) .build(); MapFieldStruct2 struct1 = new MapFieldStruct2(); struct1.map1 = ofHashMap(1, false, 2, true); struct1.map2 = ofHashMap("k1", "v1", "k2", "v2"); struct1.map3 = ofHashMap(1, "v1", 2, "v2"); serDeCheck(fory, struct1); } @Data public static class MapFieldStruct3 { public Map<Object, Object> map1; public Map<BeanB, Object> map2; public Map<Object, BeanB> map3; } @Test(dataProvider = "referenceTrackingConfig") public void testMapFieldStructCodegen3(boolean referenceTrackingConfig) { Fory fory = builder() .withRefTracking(referenceTrackingConfig) .withCodegen(true) .requireClassRegistration(false) .build(); MapFieldStruct3 struct1 = new MapFieldStruct3(); BeanB beanB = BeanB.createBeanB(2); struct1.map1 = ofHashMap(BeanB.createBeanB(2), BeanB.createBeanB(2)); struct1.map2 = ofHashMap(BeanB.createBeanB(2), 1, beanB, beanB); struct1.map3 = ofHashMap(1, beanB, 2, beanB, 3, BeanB.createBeanB(2)); serDeCheck(fory, struct1); } @Data public static class NestedMapFieldStruct1 { public Map<Object, Map<String, String>> map1; public Map<String, Map<String, Integer>> map2; public Map<Integer, Map<String, BeanB>> map3; public Map<Object, Map<Object, Map<String, BeanB>>> map4; } @Test(dataProvider = "referenceTrackingConfig") public void testNestedMapFieldStructCodegen(boolean referenceTrackingConfig) { Fory fory = builder() .withRefTracking(referenceTrackingConfig) .withCodegen(true) .requireClassRegistration(false) .build(); NestedMapFieldStruct1 struct1 = new NestedMapFieldStruct1(); BeanB beanB = BeanB.createBeanB(2); struct1.map1 = ofHashMap(1, ofHashMap("k1", "v1", "k2", "v2")); struct1.map2 = ofHashMap("k1", ofHashMap("k1", 1, "k2", 2)); struct1.map3 = ofHashMap(1, ofHashMap("k1", beanB, "k2", beanB, "k3", BeanB.createBeanB(1))); struct1.map4 = ofHashMap( 2, ofHashMap(true, ofHashMap("k1", beanB, "k2", beanB, "k3", BeanB.createBeanB(1)))); serDeCheck(fory, struct1); } @Data static class Wildcard<T> { public int c = 9; public T t; } @Data private static class Wildcard1<T> { public int c = 10; public T t; } @Data public static class MapWildcardFieldStruct1 { protected Map<String, ?> f0; protected Map<String, Wildcard<String>> f1; protected Map<String, Wildcard<?>> f2; protected Map<?, Wildcard<?>> f3; protected Map<?, Wildcard1<?>> f4; protected Map<Wildcard1<String>, Wildcard<?>> f5; protected Map<String, Wildcard1<?>> f6; } @Test(dataProvider = "referenceTrackingConfig") public void testWildcard(boolean referenceTrackingConfig) { Fory fory = builder().withRefTracking(referenceTrackingConfig).requireClassRegistration(false).build(); MapWildcardFieldStruct1 struct = new MapWildcardFieldStruct1(); struct.f0 = ofHashMap("k", 1); struct.f1 = ofHashMap("k1", new Wildcard<>()); struct.f2 = ofHashMap("k2", new Wildcard<>()); struct.f3 = ofHashMap(new Wildcard<>(), new Wildcard<>()); struct.f4 = ofHashMap(new Wildcard1<>(), new Wildcard1<>()); struct.f5 = ofHashMap(new Wildcard1<>(), new Wildcard<>()); struct.f5 = ofHashMap("k5", new Wildcard1<>()); serDeCheck(fory, struct); } @Data public static class NestedListMap { public List<Map<String, String>> map1; public List<HashMap<String, String>> map2; } @Test(dataProvider = "referenceTrackingConfig") public void testNestedListMap(boolean referenceTrackingConfig) { Fory fory = builder().withRefTracking(referenceTrackingConfig).requireClassRegistration(false).build(); NestedListMap o = new NestedListMap(); o.map1 = ofArrayList(ofHashMap("k1", "v")); o.map2 = ofArrayList(ofHashMap("k2", "2")); serDeCheck(fory, o); } @Data public static class NestedMapCollectionGenericTestClass { public Map<String, Object> map = new HashMap<>(); } @Test(dataProvider = "enableCodegen") public void testNestedMapCollectionGeneric(boolean enableCodegen) { NestedMapCollectionGenericTestClass obj = new NestedMapCollectionGenericTestClass(); obj.map = new LinkedHashMap<>(); obj.map.put("obj", ofHashMap("obj", 1, "b", ofArrayList(10))); Fory fory = builder().requireClassRegistration(false).withCodegen(enableCodegen).build(); fory.deserialize(fory.serialize(obj)); } @Data public static class NestedStringLongListMap { public Map<String, List<Long>> stringInt64ListMap; } @Test(dataProvider = "enableCodegen") public void testNestedStringLongListMap(boolean enableCodegen) { Fory fory = Fory.builder().withLanguage(Language.JAVA).withCodegen(enableCodegen).build(); fory.register(NestedStringLongListMap.class); NestedStringLongListMap pojo = new NestedStringLongListMap(); pojo.stringInt64ListMap = new HashMap<>(); pojo.stringInt64ListMap.put("a", Arrays.asList(100L, 200L, 300L)); pojo.stringInt64ListMap.put("b", null); serDeCheck(fory, pojo); fory.serialize(pojo); } @Data @AllArgsConstructor public static class NullChunkGeneric { public Map<String, Integer> map; public Map<String, List<Integer>> map1; } @Test public void testNullChunkGeneric() { Fory fory1 = builder().withCodegen(true).build(); Map<String, Integer> map = ofHashMap(null, 1, "k1", null, "k2", 2); Map<String, List<Integer>> map1 = ofHashMap(null, ofArrayList(1), "k1", null, "k2", ofArrayList(2)); NullChunkGeneric o = new NullChunkGeneric(map, map1); byte[] bytes = fory1.serialize(o); Fory fory2 = builder().withCodegen(false).build(); Object object = fory2.deserialize(bytes); assertEquals(object, o); } @Data @AllArgsConstructor public static class State<K extends Comparable<K>, V> { Map<K, V[]> map; } @Test public void testChunkArrayGeneric() { Fory fory = Fory.builder().withLanguage(Language.JAVA).requireClassRegistration(false).build(); State original = new State(ofHashMap("foo", new String[] {"bar"})); State state = serDe(fory, original); Assert.assertEquals(state.map.get("foo"), new String[] {"bar"}); State state1 = new State(ofHashMap("foo", null, "bar", new String[] {"bar"})); byte[] bytes = fory.serialize(state1); Fory fory2 = builder().withCodegen(false).build(); State state2 = (State) fory2.deserialize(bytes); Assert.assertEquals(state2.map.get("bar"), new String[] {"bar"}); } @Data public static class OuterClass { private Map<String, InnerClass> f1 = new HashMap<>(); private TestEnum f2; } @Data public static class InnerClass { int f1; } @Test(dataProvider = "compatible") public void testNestedMapGenericCodegen(boolean compatible) { Fory fory = builder() .withCodegen(true) .withCompatibleMode( compatible ? CompatibleMode.COMPATIBLE : CompatibleMode.SCHEMA_CONSISTENT) .requireClassRegistration(false) .build(); OuterClass value = new OuterClass(); value.f1.put("aaa", null); serDeCheck(fory, value); } }
apache/ignite
37,116
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/transactions/IgniteTxEntry.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.transactions; import java.nio.ByteBuffer; import java.util.Collection; import java.util.LinkedList; import java.util.UUID; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import javax.cache.expiry.ExpiryPolicy; import javax.cache.processor.EntryProcessor; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.GridDirectTransient; import org.apache.ignite.internal.IgniteCodeGeneratingFail; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.CacheInvalidStateException; import org.apache.ignite.internal.processors.cache.CacheInvokeEntry; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.CacheObjectValueContext; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheOperation; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.IgniteExternalizableExpiryPolicy; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.lang.GridAbsClosureX; import org.apache.ignite.internal.util.lang.GridPeerDeployAware; import org.apache.ignite.internal.util.tostring.GridToStringBuilder; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.T2; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.thread.IgniteThread; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.READ; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.TRANSFORM; /** * Transaction entry. Note that it is essential that this class does not override * {@link #equals(Object)} method, as transaction entries should use referential * equality. */ @IgniteCodeGeneratingFail // Field filters should not be generated by MessageCodeGenerator. public class IgniteTxEntry implements GridPeerDeployAware, Message { /** */ private static final long serialVersionUID = 0L; /** Dummy version for non-existing entry read in SERIALIZABLE transaction. */ public static final GridCacheVersion SER_READ_EMPTY_ENTRY_VER = new GridCacheVersion(0, 0, 0); /** Dummy version for any existing entry read in SERIALIZABLE transaction. */ public static final GridCacheVersion SER_READ_NOT_EMPTY_VER = new GridCacheVersion(0, 0, 1); /** */ public static final GridCacheVersion GET_ENTRY_INVALID_VER_UPDATED = new GridCacheVersion(0, 0, 2); /** */ public static final GridCacheVersion GET_ENTRY_INVALID_VER_AFTER_GET = new GridCacheVersion(0, 0, 3); /** Skip store flag bit mask. */ private static final int TX_ENTRY_SKIP_STORE_FLAG_MASK = 1; /** Keep binary flag. */ private static final int TX_ENTRY_KEEP_BINARY_FLAG_MASK = 1 << 1; /** Flag indicating that old value for 'invoke' operation was non null on primary node. */ private static final int TX_ENTRY_OLD_VAL_ON_PRIMARY = 1 << 2; /** Flag indicating that near cache is enabled on originating node and it should be added as reader. */ private static final int TX_ENTRY_ADD_READER_FLAG_MASK = 1 << 3; /** Flag indicating that 'invoke' operation was no-op on primary. */ private static final int TX_ENTRY_NOOP_ON_PRIMARY = 1 << 4; /** Prepared flag updater. */ private static final AtomicIntegerFieldUpdater<IgniteTxEntry> PREPARED_UPD = AtomicIntegerFieldUpdater.newUpdater(IgniteTxEntry.class, "prepared"); /** Owning transaction. */ @GridToStringExclude @GridDirectTransient public IgniteInternalTx tx; /** Cache key. */ @GridToStringExclude private KeyCacheObject key; /** Cache ID. */ @GridToStringExclude private int cacheId; /** Transient tx key. */ @GridDirectTransient private IgniteTxKey txKey; /** Cache value. */ @GridToStringInclude private TxEntryValueHolder val = new TxEntryValueHolder(); /** Visible value for peek. */ @GridToStringInclude @GridDirectTransient private TxEntryValueHolder prevVal = new TxEntryValueHolder(); /** Old value before update. */ @GridToStringInclude private TxEntryValueHolder oldVal = new TxEntryValueHolder(); /** Transform. */ @GridToStringInclude @GridDirectTransient private Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessorsCol; /** Transient field for calculated entry processor value. */ @GridDirectTransient private T2<GridCacheOperation, CacheObject> entryProcessorCalcVal; /** Transform closure bytes. */ @GridToStringExclude private byte[] transformClosBytes; /** Time to live. */ private long ttl; /** DR expire time (explicit) */ private long conflictExpireTime = CU.EXPIRE_TIME_CALCULATE; /** Conflict version. */ private GridCacheVersion conflictVer; /** Explicit lock version if there is one. */ @GridToStringInclude private GridCacheVersion explicitVer; /** DHT version. */ @GridDirectTransient private volatile GridCacheVersion dhtVer; /** Put filters. */ @GridToStringInclude private CacheEntryPredicate[] filters; /** Flag indicating whether filters passed. Used for fast-commit transactions. */ @GridDirectTransient private boolean filtersPassed; /** Flag indicating that filter is set and can not be replaced. */ @GridDirectTransient private boolean filtersSet; /** Underlying cache entry. */ @GridDirectTransient private volatile GridCacheEntryEx entry; /** Cache registry. */ @GridDirectTransient private GridCacheContext<?, ?> ctx; /** Prepared flag to prevent multiple candidate add. */ @GridDirectTransient private transient volatile int prepared; /** Lock flag for collocated cache. */ @GridDirectTransient private transient boolean locked; /** Assigned node ID (required only for partitioned cache). */ @GridDirectTransient private UUID nodeId; /** Flag if this node is a back up node. */ @GridDirectTransient private boolean locMapped; /** Expiry policy. */ @GridDirectTransient private ExpiryPolicy expiryPlc; /** Expiry policy transfer flag. */ @GridDirectTransient private boolean transferExpiryPlc; /** Expiry policy bytes. */ private byte[] expiryPlcBytes; /** Additional flags. */ private byte flags; /** Partition update counter. */ @GridDirectTransient private long partUpdateCntr; /** */ private GridCacheVersion serReadVer; /** */ @GridDirectTransient @GridToStringExclude private transient @Nullable GridAbsClosureX cqNotifyC; /** * Empty constructor. */ public IgniteTxEntry() { /* No-op. */ } /** * This constructor is meant for remote transactions. * * @param ctx Cache registry. * @param tx Owning transaction. * @param op Operation. * @param val Value. * @param ttl Time to live. * @param conflictExpireTime DR expire time. * @param entry Cache entry. * @param conflictVer Data center replication version. * @param skipStore Skip store flag. */ public IgniteTxEntry(GridCacheContext<?, ?> ctx, IgniteInternalTx tx, GridCacheOperation op, CacheObject val, long ttl, long conflictExpireTime, GridCacheEntryEx entry, @Nullable GridCacheVersion conflictVer, boolean skipStore, boolean keepBinary ) { assert ctx != null; assert tx != null; assert op != null; assert entry != null; this.ctx = ctx; this.tx = tx; this.val.value(op, val, false, false); this.entry = entry; this.ttl = ttl; this.conflictExpireTime = conflictExpireTime; this.conflictVer = conflictVer; skipStore(skipStore); keepBinary(keepBinary); key = entry.key(); cacheId = entry.context().cacheId(); } /** * This constructor is meant for local transactions. * * @param ctx Cache registry. * @param tx Owning transaction. * @param op Operation. * @param val Value. * @param entryProcessor Entry processor. * @param invokeArgs Optional arguments for EntryProcessor. * @param ttl Time to live. * @param entry Cache entry. * @param filters Put filters. * @param conflictVer Data center replication version. * @param skipStore Skip store flag. * @param addReader Add reader flag. */ public IgniteTxEntry(GridCacheContext<?, ?> ctx, IgniteInternalTx tx, GridCacheOperation op, CacheObject val, EntryProcessor<Object, Object, Object> entryProcessor, Object[] invokeArgs, long ttl, GridCacheEntryEx entry, CacheEntryPredicate[] filters, GridCacheVersion conflictVer, boolean skipStore, boolean keepBinary, boolean addReader ) { assert ctx != null; assert tx != null; assert op != null; assert entry != null; this.ctx = ctx; this.tx = tx; this.val.value(op, val, false, false); this.entry = entry; this.ttl = ttl; this.filters = filters; this.conflictVer = conflictVer; skipStore(skipStore); keepBinary(keepBinary); addReader(addReader); if (entryProcessor != null) addEntryProcessor(entryProcessor, invokeArgs); key = entry.key(); cacheId = entry.context().cacheId(); } /** * @return Cache context for this tx entry. */ public GridCacheContext<?, ?> context() { return ctx; } /** * @param ctx Cache context for this tx entry. */ public void context(GridCacheContext<?, ?> ctx) { this.ctx = ctx; } /** * @return Flag indicating if this entry is affinity mapped to the same node. */ public boolean locallyMapped() { return locMapped; } /** * @param locMapped Flag indicating if this entry is affinity mapped to the same node. */ public void locallyMapped(boolean locMapped) { this.locMapped = locMapped; } /** * @param ctx Context. * @return Clean copy of this entry. */ public IgniteTxEntry cleanCopy(GridCacheContext<?, ?> ctx) { IgniteTxEntry cp = new IgniteTxEntry(); cp.key = key; cp.cacheId = cacheId; cp.ctx = ctx; cp.val = new TxEntryValueHolder(); cp.filters = filters; cp.val.value(val.op(), val.value(), val.hasWriteValue(), val.hasReadValue()); cp.entryProcessorsCol = entryProcessorsCol; cp.ttl = ttl; cp.conflictExpireTime = conflictExpireTime; cp.explicitVer = explicitVer; cp.conflictVer = conflictVer; cp.expiryPlc = expiryPlc; cp.flags = flags; cp.serReadVer = serReadVer; return cp; } /** * @return Node ID. */ public UUID nodeId() { return nodeId; } /** * @param nodeId Node ID. */ public void nodeId(UUID nodeId) { this.nodeId = nodeId; } /** * @return DHT version. */ public GridCacheVersion dhtVersion() { return dhtVer; } /** * @param dhtVer DHT version. */ public void dhtVersion(GridCacheVersion dhtVer) { this.dhtVer = dhtVer; } /** * @return {@code True} if tx entry was marked as locked. */ public boolean locked() { return locked; } /** * Marks tx entry as locked. */ public void markLocked() { locked = true; } /** * Sets partition counter. * * @param partCntr Partition counter. */ public void updateCounter(long partCntr) { this.partUpdateCntr = partCntr; } /** * @return Partition index. */ public long updateCounter() { return partUpdateCntr; } /** * @param val Value to set. */ public void setAndMarkValid(CacheObject val) { setAndMarkValid(op(), val, this.val.hasWriteValue(), this.val.hasReadValue()); } /** * @param op Operation. * @param val Value to set. */ void setAndMarkValid(GridCacheOperation op, CacheObject val) { setAndMarkValid(op, val, this.val.hasWriteValue(), this.val.hasReadValue()); } /** * @param op Operation. * @param val Value to set. * @param hasReadVal Has read value flag. * @param hasWriteVal Has write value flag. */ void setAndMarkValid(GridCacheOperation op, CacheObject val, boolean hasWriteVal, boolean hasReadVal) { this.val.value(op, val, hasWriteVal, hasReadVal); markValid(); } /** * Marks this entry as value-has-bean-read. Effectively, makes values enlisted to transaction visible * to further peek operations. */ public void markValid() { prevVal.value(val.op(), val.value(), val.hasWriteValue(), val.hasReadValue()); } /** * Marks entry as prepared. * * @return True if entry was marked prepared by this call. */ boolean markPrepared() { return PREPARED_UPD.compareAndSet(this, 0, 1); } /** * @return Entry key. */ public KeyCacheObject key() { return key; } /** * @return Cache ID. */ public int cacheId() { return cacheId; } /** * Sets skip store flag value. * * @param skipStore Skip store flag. */ public void skipStore(boolean skipStore) { setFlag(skipStore, TX_ENTRY_SKIP_STORE_FLAG_MASK); } /** * @return Skip store flag. */ public boolean skipStore() { return isFlag(TX_ENTRY_SKIP_STORE_FLAG_MASK); } /** * @param oldValOnPrimary {@code True} If old value for was non null on primary node. */ public void oldValueOnPrimary(boolean oldValOnPrimary) { setFlag(oldValOnPrimary, TX_ENTRY_OLD_VAL_ON_PRIMARY); } /** * @return {@code True} If old value for 'invoke' operation was non null on primary node. */ boolean oldValueOnPrimary() { return isFlag(TX_ENTRY_OLD_VAL_ON_PRIMARY); } /** * Sets keep binary flag value. * * @param keepBinary Keep binary flag value. */ public void keepBinary(boolean keepBinary) { setFlag(keepBinary, TX_ENTRY_KEEP_BINARY_FLAG_MASK); } /** * @return Keep binary flag value. */ public boolean keepBinary() { return isFlag(TX_ENTRY_KEEP_BINARY_FLAG_MASK); } /** * @param addReader Add reader flag. */ public void addReader(boolean addReader) { setFlag(addReader, TX_ENTRY_ADD_READER_FLAG_MASK); } /** * @return Add reader flag. */ public boolean addReader() { return isFlag(TX_ENTRY_ADD_READER_FLAG_MASK); } /** * @param noop Add no-op flag. */ public void noop(boolean noop) { setFlag(noop, TX_ENTRY_NOOP_ON_PRIMARY); } /** * @return {@code true} if noop flag is set, {@code false} otherwise. */ public boolean noop() { return isFlag(TX_ENTRY_NOOP_ON_PRIMARY); } /** * Sets flag mask. * * @param flag Set or clear. * @param mask Mask. */ private void setFlag(boolean flag, int mask) { flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask); } /** * Reads flag mask. * * @param mask Mask to read. * @return Flag value. */ private boolean isFlag(int mask) { return (flags & mask) != 0; } /** * @return Tx key. */ public IgniteTxKey txKey() { if (txKey == null) txKey = new IgniteTxKey(key, cacheId); return txKey; } /** * @return Underlying cache entry. */ public GridCacheEntryEx cached() { return entry; } /** * @param entry Cache entry. */ public void cached(GridCacheEntryEx entry) { assert entry == null || entry.context() == ctx : "Invalid entry assigned to tx entry [txEntry=" + this + ", entry=" + entry + ", ctxNear=" + ctx.isNear() + ", ctxDht=" + ctx.isDht() + ']'; this.entry = entry; } /** * @return Entry value. */ @Nullable public CacheObject value() { return val.value(); } /** * @return Old value. */ @Nullable public CacheObject oldValue() { return oldVal != null ? oldVal.value() : null; } /** * @param oldVal Old value. */ public void oldValue(CacheObject oldVal) { if (this.oldVal == null) this.oldVal = new TxEntryValueHolder(); this.oldVal.value(op(), oldVal, true, true); } /** * @return {@code True} if old value present. */ public boolean hasOldValue() { return oldVal != null && oldVal.hasValue(); } /** * @return {@code True} if has value explicitly set. */ public boolean hasValue() { return val.hasValue(); } /** * @return {@code True} if has write value set. */ public boolean hasWriteValue() { return val.hasWriteValue(); } /** * @return {@code True} if has read value set. */ public boolean hasReadValue() { return val.hasReadValue(); } /** * @return Value visible for peek. */ @Nullable public CacheObject previousValue() { return prevVal.value(); } /** * @return {@code True} if has previous value explicitly set. */ public boolean hasPreviousValue() { return prevVal.hasValue(); } /** * @return Previous operation to revert entry in case of filter failure. */ @Nullable public GridCacheOperation previousOperation() { return prevVal.op(); } /** * @return Time to live. */ public long ttl() { return ttl; } /** * @param ttl Time to live. */ public void ttl(long ttl) { this.ttl = ttl; } /** * @return Conflict expire time. */ public long conflictExpireTime() { return conflictExpireTime; } /** * @param conflictExpireTime Conflict expire time. */ public void conflictExpireTime(long conflictExpireTime) { this.conflictExpireTime = conflictExpireTime; } /** * @param val Entry value. * @param writeVal Write value flag. * @param readVal Read value flag. */ public void value(@Nullable CacheObject val, boolean writeVal, boolean readVal) { this.val.value(this.val.op(), val, writeVal, readVal); } /** * Sets read value if this tx entry does not have write value yet. * * @param val Read value to set. */ public void readValue(@Nullable CacheObject val) { this.val.value(this.val.op(), val, false, true); } /** * @param entryProcessor Entry processor. * @param invokeArgs Optional arguments for EntryProcessor. */ public void addEntryProcessor(EntryProcessor<Object, Object, Object> entryProcessor, Object[] invokeArgs) { if (entryProcessorsCol == null) entryProcessorsCol = new LinkedList<>(); entryProcessorsCol.add(new T2<>(entryProcessor, invokeArgs)); // Must clear transform closure bytes since collection has changed. transformClosBytes = null; val.op(TRANSFORM); } /** * @return Collection of entry processors. */ public Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessors() { return entryProcessorsCol; } /** * @param cacheVal Value. * @return New value. */ @SuppressWarnings("unchecked") public CacheObject applyEntryProcessors(CacheObject cacheVal) { GridCacheVersion ver; try { ver = entry.version(); } catch (GridCacheEntryRemovedException ignore) { assert tx == null || tx.optimistic() : tx; ver = null; } Object val = null; Object keyVal = null; for (T2<EntryProcessor<Object, Object, Object>, Object[]> t : entryProcessors()) { IgniteThread.onEntryProcessorEntered(true); try { CacheInvokeEntry<Object, Object> invokeEntry = new CacheInvokeEntry(key, keyVal, cacheVal, val, ver, keepBinary(), cached()); EntryProcessor proc = t.get1(); proc.process(invokeEntry, t.get2()); val = invokeEntry.getValue(); keyVal = invokeEntry.key(); } catch (Exception ignore) { // No-op. } finally { IgniteThread.onEntryProcessorLeft(); } } return ctx.toCacheObject(val); } /** * @param entryProcessorsCol Collection of entry processors. */ public void entryProcessors( @Nullable Collection<T2<EntryProcessor<Object, Object, Object>, Object[]>> entryProcessorsCol) { this.entryProcessorsCol = entryProcessorsCol; // Must clear transform closure bytes since collection has changed. transformClosBytes = null; } /** * @return Cache operation. */ public GridCacheOperation op() { return val.op(); } /** * @param op Cache operation. */ public void op(GridCacheOperation op) { val.op(op); } /** * @return {@code True} if read entry. */ public boolean isRead() { return op() == READ; } /** * @param explicitVer Explicit version. */ public void explicitVersion(GridCacheVersion explicitVer) { this.explicitVer = explicitVer; } /** * @return Explicit version. */ public GridCacheVersion explicitVersion() { return explicitVer; } /** * @return Conflict version. */ @Nullable public GridCacheVersion conflictVersion() { return conflictVer; } /** * @param conflictVer Conflict version. */ public void conflictVersion(@Nullable GridCacheVersion conflictVer) { this.conflictVer = conflictVer; } /** * @return Put filters. */ public CacheEntryPredicate[] filters() { return filters; } /** * @param filters Put filters. */ public void filters(CacheEntryPredicate[] filters) { this.filters = filters; } /** * @return {@code True} if filters passed for fast-commit transactions. */ public boolean filtersPassed() { return filtersPassed; } /** * @param filtersPassed {@code True} if filters passed for fast-commit transactions. */ public void filtersPassed(boolean filtersPassed) { this.filtersPassed = filtersPassed; } /** * @return {@code True} if filters are set. */ public boolean filtersSet() { return filtersSet; } /** * @param filtersSet {@code True} if filters are set and should not be replaced. */ public void filtersSet(boolean filtersSet) { this.filtersSet = filtersSet; } /** * @param ctx Context. * @param transferExpiry {@code True} if expire policy should be marshalled. * @throws IgniteCheckedException If failed. */ public void marshal(GridCacheSharedContext<?, ?> ctx, boolean transferExpiry) throws IgniteCheckedException { if (filters != null) { for (CacheEntryPredicate p : filters) { if (p != null) p.prepareMarshal(this.ctx); } } // Do not serialize filters if they are null. if (transformClosBytes == null && entryProcessorsCol != null) transformClosBytes = CU.marshal(this.ctx, entryProcessorsCol); if (transferExpiry) transferExpiryPlc = expiryPlc != null && expiryPlc != this.ctx.expiry(); key.prepareMarshal(context().cacheObjectContext()); val.marshal(context()); if (transferExpiryPlc) { if (expiryPlcBytes == null) expiryPlcBytes = CU.marshal(this.ctx, new IgniteExternalizableExpiryPolicy(expiryPlc)); } else expiryPlcBytes = null; if (oldVal != null) oldVal.marshal(context()); } /** * Prepares this entry to unmarshall. In particular, this method initialize a cache context. * * @param ctx Cache context. * @param topVer Topology version that is used to validate a cache context. * If this parameter is {@code null} then validation will be skipped. * @param near Near flag. * @throws IgniteCheckedException If un-marshalling failed. */ public void prepareUnmarshal( GridCacheSharedContext<?, ?> ctx, AffinityTopologyVersion topVer, boolean near ) throws IgniteCheckedException { if (this.ctx == null) { GridCacheContext<?, ?> cacheCtx = ctx.cacheContext(cacheId); if (cacheCtx == null || (topVer != null && topVer.before(cacheCtx.startTopologyVersion()))) throw new CacheInvalidStateException( "Failed to perform cache operation (cache is stopped), cacheId=" + cacheId); if (cacheCtx.isNear() && !near) cacheCtx = cacheCtx.near().dht().context(); else if (!cacheCtx.isNear() && near) cacheCtx = cacheCtx.dht().near().context(); this.ctx = cacheCtx; } } /** * Unmarshalls entry. * * @param ctx Cache context. * @param near Near flag. * @param clsLdr Class loader. * @throws IgniteCheckedException If un-marshalling failed. */ public void unmarshal( GridCacheSharedContext<?, ?> ctx, boolean near, ClassLoader clsLdr ) throws IgniteCheckedException { if (this.ctx == null) prepareUnmarshal(ctx, null, near); CacheObjectValueContext coctx = this.ctx.cacheObjectContext(); if (coctx == null) throw new CacheInvalidStateException( "Failed to perform cache operation (cache is stopped), cacheId=" + cacheId); // Unmarshal transform closure anyway if it exists. if (transformClosBytes != null && entryProcessorsCol == null) entryProcessorsCol = U.unmarshal(ctx, transformClosBytes, U.resolveClassLoader(clsLdr, ctx.gridConfig())); if (filters == null) filters = CU.empty0(); else { for (CacheEntryPredicate p : filters) { if (p != null) p.finishUnmarshal(this.ctx, clsLdr); } } key.finishUnmarshal(coctx, clsLdr); val.unmarshal(coctx, clsLdr); if (expiryPlcBytes != null && expiryPlc == null) expiryPlc = U.unmarshal(ctx, expiryPlcBytes, U.resolveClassLoader(clsLdr, ctx.gridConfig())); if (hasOldValue()) oldVal.unmarshal(coctx, clsLdr); } /** * @param expiryPlc Expiry policy. */ public void expiry(@Nullable ExpiryPolicy expiryPlc) { this.expiryPlc = expiryPlc; } /** * @return Expiry policy. */ @Nullable public ExpiryPolicy expiry() { return expiryPlc; } /** * @return Entry processor calculated value. */ public T2<GridCacheOperation, CacheObject> entryProcessorCalculatedValue() { return entryProcessorCalcVal; } /** * @param entryProcessorCalcVal Entry processor calculated value. */ public void entryProcessorCalculatedValue(T2<GridCacheOperation, CacheObject> entryProcessorCalcVal) { assert entryProcessorCalcVal != null; this.entryProcessorCalcVal = entryProcessorCalcVal; } /** * Gets stored entry version. Version is stored for all entries in serializable transaction or * when value is read using {@link IgniteCache#getEntry(Object)} method. * * @return Entry version. */ @Nullable public GridCacheVersion entryReadVersion() { return serReadVer; } /** * @param ver Entry version. */ public void entryReadVersion(GridCacheVersion ver) { assert this.serReadVer == null : "Wrong version [serReadVer=" + serReadVer + ", ver=" + ver + "]"; assert ver != null; this.serReadVer = ver; } /** * Clears recorded read version, should be done before starting commit of not serializable/optimistic transaction. */ public void clearEntryReadVersion() { serReadVer = null; } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 0: if (!writer.writeInt(cacheId)) return false; writer.incrementState(); case 1: if (!writer.writeLong(conflictExpireTime)) return false; writer.incrementState(); case 2: if (!writer.writeMessage(conflictVer)) return false; writer.incrementState(); case 3: if (!writer.writeByteArray(expiryPlcBytes)) return false; writer.incrementState(); case 4: if (!writer.writeMessage(explicitVer)) return false; writer.incrementState(); case 5: if (!writer.writeObjectArray( !F.isEmptyOrNulls(filters) ? filters : null, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 6: if (!writer.writeByte(flags)) return false; writer.incrementState(); case 7: if (!writer.writeKeyCacheObject(key)) return false; writer.incrementState(); case 8: if (!writer.writeMessage(oldVal)) return false; writer.incrementState(); case 9: if (!writer.writeMessage(serReadVer)) return false; writer.incrementState(); case 10: if (!writer.writeByteArray(transformClosBytes)) return false; writer.incrementState(); case 11: if (!writer.writeLong(ttl)) return false; writer.incrementState(); case 12: if (!writer.writeMessage(val)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); switch (reader.state()) { case 0: cacheId = reader.readInt(); if (!reader.isLastRead()) return false; reader.incrementState(); case 1: conflictExpireTime = reader.readLong(); if (!reader.isLastRead()) return false; reader.incrementState(); case 2: conflictVer = reader.readMessage(); if (!reader.isLastRead()) return false; reader.incrementState(); case 3: expiryPlcBytes = reader.readByteArray(); if (!reader.isLastRead()) return false; reader.incrementState(); case 4: explicitVer = reader.readMessage(); if (!reader.isLastRead()) return false; reader.incrementState(); case 5: filters = reader.readObjectArray(MessageCollectionItemType.MSG, CacheEntryPredicate.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 6: flags = reader.readByte(); if (!reader.isLastRead()) return false; reader.incrementState(); case 7: key = reader.readKeyCacheObject(); if (!reader.isLastRead()) return false; reader.incrementState(); case 8: oldVal = reader.readMessage(); if (!reader.isLastRead()) return false; reader.incrementState(); case 9: serReadVer = reader.readMessage(); if (!reader.isLastRead()) return false; reader.incrementState(); case 10: transformClosBytes = reader.readByteArray(); if (!reader.isLastRead()) return false; reader.incrementState(); case 11: ttl = reader.readLong(); if (!reader.isLastRead()) return false; reader.incrementState(); case 12: val = reader.readMessage(); if (!reader.isLastRead()) return false; reader.incrementState(); } return true; } /** {@inheritDoc} */ @Override public short directType() { return 100; } /** {@inheritDoc} */ @Override public Class<?> deployClass() { ClassLoader clsLdr = getClass().getClassLoader(); CacheObject val = value(); // First of all check classes that may be loaded by class loader other than application one. return key != null && !clsLdr.equals(key.getClass().getClassLoader()) ? key.getClass() : val != null ? val.getClass() : getClass(); } /** */ public GridAbsClosureX cqNotifyClosure() { return cqNotifyC; } /** * @param clo Clo. */ public void cqNotifyClosure(GridAbsClosureX clo) { cqNotifyC = clo; } /** {@inheritDoc} */ @Override public ClassLoader classLoader() { return deployClass().getClassLoader(); } /** {@inheritDoc} */ @Override public String toString() { return GridToStringBuilder.toString(IgniteTxEntry.class, this, "xidVer", tx == null ? "null" : tx.xidVersion()); } }
apache/hadoop
37,290
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/async/impl/NMClientAsyncImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.client.api.async.impl; import java.nio.ByteBuffer; import java.util.EnumSet; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.client.api.NMClient; import org.apache.hadoop.yarn.client.api.async.NMClientAsync; import org.apache.hadoop.yarn.client.api.impl.NMClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AbstractEvent; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.state.InvalidStateTransitionException; import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Private @Unstable public class NMClientAsyncImpl extends NMClientAsync { private static final Logger LOG = LoggerFactory.getLogger(NMClientAsyncImpl.class); protected static final int INITIAL_THREAD_POOL_SIZE = 10; protected ThreadPoolExecutor threadPool; protected int maxThreadPoolSize; protected Thread eventDispatcherThread; protected AtomicBoolean stopped = new AtomicBoolean(false); protected BlockingQueue<ContainerEvent> events = new LinkedBlockingQueue<ContainerEvent>(); protected ConcurrentMap<ContainerId, StatefulContainer> containers = new ConcurrentHashMap<ContainerId, StatefulContainer>(); public NMClientAsyncImpl(AbstractCallbackHandler callbackHandler) { this(NMClientAsync.class.getName(), callbackHandler); } public NMClientAsyncImpl( String name, AbstractCallbackHandler callbackHandler) { this(name, new NMClientImpl(), callbackHandler); } @Private @VisibleForTesting protected NMClientAsyncImpl(String name, NMClient client, AbstractCallbackHandler callbackHandler) { super(name, client, callbackHandler); this.client = client; this.callbackHandler = callbackHandler; } /** * @deprecated Use {@link * #NMClientAsyncImpl(NMClientAsync.AbstractCallbackHandler)} * instead. */ @Deprecated public NMClientAsyncImpl(CallbackHandler callbackHandler) { this(NMClientAsync.class.getName(), callbackHandler); } /** * @deprecated Use {@link #NMClientAsyncImpl(String, * NMClientAsync.AbstractCallbackHandler)} instead. */ @Deprecated public NMClientAsyncImpl(String name, CallbackHandler callbackHandler) { this(name, new NMClientImpl(), callbackHandler); } @Private @VisibleForTesting @Deprecated protected NMClientAsyncImpl(String name, NMClient client, CallbackHandler callbackHandler) { super(name, client, callbackHandler); this.client = client; this.callbackHandler = callbackHandler; } @Override protected void serviceInit(Configuration conf) throws Exception { this.maxThreadPoolSize = conf.getInt( YarnConfiguration.NM_CLIENT_ASYNC_THREAD_POOL_MAX_SIZE, YarnConfiguration.DEFAULT_NM_CLIENT_ASYNC_THREAD_POOL_MAX_SIZE); LOG.info("Upper bound of the thread pool size is {}.", maxThreadPoolSize); client.init(conf); super.serviceInit(conf); } @Override protected void serviceStart() throws Exception { client.start(); ThreadFactory tf = new ThreadFactoryBuilder().setNameFormat( this.getClass().getName() + " #%d").setDaemon(true).build(); // Start with a default core-pool size and change it dynamically. int initSize = Math.min(INITIAL_THREAD_POOL_SIZE, maxThreadPoolSize); threadPool = new ThreadPoolExecutor(initSize, Integer.MAX_VALUE, 1, TimeUnit.HOURS, new LinkedBlockingQueue<Runnable>(), tf); eventDispatcherThread = new Thread() { @Override public void run() { ContainerEvent event = null; Set<String> allNodes = new HashSet<String>(); while (!stopped.get() && !Thread.currentThread().isInterrupted()) { try { event = events.take(); } catch (InterruptedException e) { if (!stopped.get()) { LOG.error("Returning, thread interrupted", e); } return; } allNodes.add(event.getNodeId().toString()); int threadPoolSize = threadPool.getCorePoolSize(); // We can increase the pool size only if haven't reached the maximum // limit yet. if (threadPoolSize != maxThreadPoolSize) { // nodes where containers will run at *this* point of time. This is // *not* the cluster size and doesn't need to be. int nodeNum = allNodes.size(); int idealThreadPoolSize = Math.min(maxThreadPoolSize, nodeNum); if (threadPoolSize < idealThreadPoolSize) { // Bump up the pool size to idealThreadPoolSize + // INITIAL_POOL_SIZE, the later is just a buffer so we are not // always increasing the pool-size int newThreadPoolSize = Math.min(maxThreadPoolSize, idealThreadPoolSize + INITIAL_THREAD_POOL_SIZE); LOG.info("Set NMClientAsync thread pool size to {} " + "as the number of nodes to talk to is {}.", newThreadPoolSize, nodeNum); threadPool.setCorePoolSize(newThreadPoolSize); } } // the events from the queue are handled in parallel with a thread // pool threadPool.execute(getContainerEventProcessor(event)); // TODO: Group launching of multiple containers to a single // NodeManager into a single connection } } }; eventDispatcherThread.setName("Container Event Dispatcher"); eventDispatcherThread.setDaemon(false); eventDispatcherThread.start(); super.serviceStart(); } @Override protected void serviceStop() throws Exception { if (stopped.getAndSet(true)) { // return if already stopped return; } if (eventDispatcherThread != null) { eventDispatcherThread.interrupt(); try { eventDispatcherThread.join(); } catch (InterruptedException e) { LOG.error("The thread of " + eventDispatcherThread.getName() + " didn't finish normally.", e); } } if (threadPool != null) { threadPool.shutdownNow(); } if (client != null) { // If NMClientImpl doesn't stop running containers, the states doesn't // need to be cleared. if (!(client instanceof NMClientImpl) || ((NMClientImpl) client).getCleanupRunningContainers().get()) { if (containers != null) { containers.clear(); } } client.stop(); } super.serviceStop(); } public void startContainerAsync( Container container, ContainerLaunchContext containerLaunchContext) { if (containers.putIfAbsent(container.getId(), new StatefulContainer(this, container.getId())) != null) { callbackHandler.onStartContainerError(container.getId(), RPCUtil.getRemoteException("Container " + container.getId() + " is already started or scheduled to start")); } try { events.put(new StartContainerEvent(container, containerLaunchContext)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of starting Container {}", container.getId()); callbackHandler.onStartContainerError(container.getId(), e); } } @Deprecated public void increaseContainerResourceAsync(Container container) { if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container resource " + "increase callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(container.getId()) == null) { handler.onIncreaseContainerResourceError( container.getId(), RPCUtil.getRemoteException( "Container " + container.getId() + " is neither started nor scheduled to start")); } try { events.put(new UpdateContainerResourceEvent(container, true)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of increasing " + "resource of Container {}", container.getId()); handler.onIncreaseContainerResourceError(container.getId(), e); } } @Override public void updateContainerResourceAsync(Container container) { if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container resource " + "increase callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(container.getId()) == null) { handler.onUpdateContainerResourceError( container.getId(), RPCUtil.getRemoteException( "Container " + container.getId() + " is neither started nor scheduled to start")); } try { events.put(new UpdateContainerResourceEvent(container, false)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of " + "increasing resource of Container {}.", container.getId()); handler.onUpdateContainerResourceError(container.getId(), e); } } @Override public void reInitializeContainerAsync(ContainerId containerId, ContainerLaunchContext containerLaunchContex, boolean autoCommit){ if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container re-initialize " + "callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(containerId) == null) { handler.onContainerReInitializeError( containerId, RPCUtil.getRemoteException( "Container " + containerId + " is not started")); } try { events.put(new ReInitializeContainerEvevnt(containerId, client.getNodeIdOfStartedContainer(containerId), containerLaunchContex, autoCommit)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of " + "re-initializing of Container {}", containerId); handler.onContainerReInitializeError(containerId, e); } } @Override public void restartContainerAsync(ContainerId containerId){ if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container restart " + "callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(containerId) == null) { handler.onContainerRestartError( containerId, RPCUtil.getRemoteException( "Container " + containerId + " is not started")); } try { events.put(new ContainerEvent(containerId, client.getNodeIdOfStartedContainer(containerId), null, ContainerEventType.RESTART_CONTAINER)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of restart of Container {}", containerId); handler.onContainerRestartError(containerId, e); } } @Override public void rollbackLastReInitializationAsync(ContainerId containerId){ if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container rollback " + "callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(containerId) == null) { handler.onRollbackLastReInitializationError( containerId, RPCUtil.getRemoteException( "Container " + containerId + " is not started")); } try { events.put(new ContainerEvent(containerId, client.getNodeIdOfStartedContainer(containerId), null, ContainerEventType.ROLLBACK_LAST_REINIT)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event Rollback " + "re-initialization of Container {}", containerId); handler.onRollbackLastReInitializationError(containerId, e); } } @Override public void commitLastReInitializationAsync(ContainerId containerId){ if (!(callbackHandler instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container commit last " + "re-initialization callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) callbackHandler; if (containers.get(containerId) == null) { handler.onCommitLastReInitializationError( containerId, RPCUtil.getRemoteException( "Container " + containerId + " is not started")); } try { events.put(new ContainerEvent(containerId, client.getNodeIdOfStartedContainer(containerId), null, ContainerEventType.COMMIT_LAST_REINT)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event " + "Commit re-initialization of Container {}", containerId); handler.onCommitLastReInitializationError(containerId, e); } } public void stopContainerAsync(ContainerId containerId, NodeId nodeId) { if (containers.get(containerId) == null) { callbackHandler.onStopContainerError(containerId, RPCUtil.getRemoteException("Container " + containerId + " is neither started nor scheduled to start")); } try { events.put(new ContainerEvent(containerId, nodeId, null, ContainerEventType.STOP_CONTAINER)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of stopping Container {}", containerId); callbackHandler.onStopContainerError(containerId, e); } } public void getContainerStatusAsync(ContainerId containerId, NodeId nodeId) { try { events.put(new ContainerEvent(containerId, nodeId, null, ContainerEventType.QUERY_CONTAINER)); } catch (InterruptedException e) { LOG.warn("Exception when scheduling the event of querying " + "the status of Container {}", containerId); callbackHandler.onGetContainerStatusError(containerId, e); } } protected enum ContainerState { PREP, FAILED, RUNNING, DONE, } protected boolean isCompletelyDone(StatefulContainer container) { return container.getState() == ContainerState.DONE || container.getState() == ContainerState.FAILED; } protected ContainerEventProcessor getContainerEventProcessor( ContainerEvent event) { return new ContainerEventProcessor(event); } /** * The type of the event of interacting with a container */ protected enum ContainerEventType { START_CONTAINER, STOP_CONTAINER, QUERY_CONTAINER, UPDATE_CONTAINER_RESOURCE, REINITIALIZE_CONTAINER, RESTART_CONTAINER, ROLLBACK_LAST_REINIT, COMMIT_LAST_REINT } protected static class ContainerEvent extends AbstractEvent<ContainerEventType>{ private ContainerId containerId; private NodeId nodeId; private Token containerToken; public ContainerEvent(ContainerId containerId, NodeId nodeId, Token containerToken, ContainerEventType type) { super(type); this.containerId = containerId; this.nodeId = nodeId; this.containerToken = containerToken; } public ContainerId getContainerId() { return containerId; } public NodeId getNodeId() { return nodeId; } public Token getContainerToken() { return containerToken; } } protected static class StartContainerEvent extends ContainerEvent { private Container container; private ContainerLaunchContext containerLaunchContext; public StartContainerEvent(Container container, ContainerLaunchContext containerLaunchContext) { super(container.getId(), container.getNodeId(), container.getContainerToken(), ContainerEventType.START_CONTAINER); this.container = container; this.containerLaunchContext = containerLaunchContext; } public Container getContainer() { return container; } public ContainerLaunchContext getContainerLaunchContext() { return containerLaunchContext; } } protected static class ReInitializeContainerEvevnt extends ContainerEvent { private ContainerLaunchContext containerLaunchContext; private boolean autoCommit; public ReInitializeContainerEvevnt(ContainerId containerId, NodeId nodeId, ContainerLaunchContext containerLaunchContext, boolean autoCommit) { super(containerId, nodeId, null, ContainerEventType.REINITIALIZE_CONTAINER); this.containerLaunchContext = containerLaunchContext; this.autoCommit = autoCommit; } public ContainerLaunchContext getContainerLaunchContext() { return containerLaunchContext; } public boolean isAutoCommit() { return autoCommit; } } protected static class UpdateContainerResourceEvent extends ContainerEvent { private Container container; private boolean isIncreaseEvent; // UpdateContainerResourceEvent constructor takes in a // flag to support callback API's calling through the deprecated // increaseContainerResource public UpdateContainerResourceEvent(Container container, boolean isIncreaseEvent) { super(container.getId(), container.getNodeId(), container.getContainerToken(), ContainerEventType.UPDATE_CONTAINER_RESOURCE); this.container = container; this.isIncreaseEvent = isIncreaseEvent; } public Container getContainer() { return container; } } protected static class StatefulContainer implements EventHandler<ContainerEvent> { protected final static StateMachineFactory<StatefulContainer, ContainerState, ContainerEventType, ContainerEvent> stateMachineFactory = new StateMachineFactory<StatefulContainer, ContainerState, ContainerEventType, ContainerEvent>(ContainerState.PREP) // Transitions from PREP state .addTransition(ContainerState.PREP, EnumSet.of(ContainerState.RUNNING, ContainerState.FAILED), ContainerEventType.START_CONTAINER, new StartContainerTransition()) .addTransition(ContainerState.PREP, ContainerState.DONE, ContainerEventType.STOP_CONTAINER, new OutOfOrderTransition()) // Transitions from RUNNING state .addTransition(ContainerState.RUNNING, ContainerState.RUNNING, ContainerEventType.UPDATE_CONTAINER_RESOURCE, new UpdateContainerResourceTransition()) // Transitions for Container Upgrade .addTransition(ContainerState.RUNNING, EnumSet.of(ContainerState.RUNNING, ContainerState.FAILED), ContainerEventType.REINITIALIZE_CONTAINER, new ReInitializeContainerTransition()) .addTransition(ContainerState.RUNNING, EnumSet.of(ContainerState.RUNNING, ContainerState.FAILED), ContainerEventType.RESTART_CONTAINER, new ReInitializeContainerTransition()) .addTransition(ContainerState.RUNNING, EnumSet.of(ContainerState.RUNNING, ContainerState.FAILED), ContainerEventType.ROLLBACK_LAST_REINIT, new ReInitializeContainerTransition()) .addTransition(ContainerState.RUNNING, EnumSet.of(ContainerState.RUNNING, ContainerState.FAILED), ContainerEventType.COMMIT_LAST_REINT, new ReInitializeContainerTransition()) .addTransition(ContainerState.RUNNING, EnumSet.of(ContainerState.DONE, ContainerState.FAILED), ContainerEventType.STOP_CONTAINER, new StopContainerTransition()) // Transition from DONE state .addTransition(ContainerState.DONE, ContainerState.DONE, EnumSet.of(ContainerEventType.START_CONTAINER, ContainerEventType.STOP_CONTAINER, ContainerEventType.UPDATE_CONTAINER_RESOURCE)) // Transition from FAILED state .addTransition(ContainerState.FAILED, ContainerState.FAILED, EnumSet.of(ContainerEventType.START_CONTAINER, ContainerEventType.STOP_CONTAINER, ContainerEventType.REINITIALIZE_CONTAINER, ContainerEventType.RESTART_CONTAINER, ContainerEventType.COMMIT_LAST_REINT, ContainerEventType.ROLLBACK_LAST_REINIT, ContainerEventType.UPDATE_CONTAINER_RESOURCE)); protected static class StartContainerTransition implements MultipleArcTransition<StatefulContainer, ContainerEvent, ContainerState> { @Override public ContainerState transition( StatefulContainer container, ContainerEvent event) { ContainerId containerId = event.getContainerId(); try { StartContainerEvent scEvent = null; if (event instanceof StartContainerEvent) { scEvent = (StartContainerEvent) event; } assert scEvent != null; Map<String, ByteBuffer> allServiceResponse = container.nmClientAsync.getClient().startContainer( scEvent.getContainer(), scEvent.getContainerLaunchContext()); try { container.nmClientAsync.getCallbackHandler().onContainerStarted( containerId, allServiceResponse); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from onContainerStarted for " + "Container " + containerId, thr); } return ContainerState.RUNNING; } catch (Throwable e) { return onExceptionRaised(container, event, e); } } private ContainerState onExceptionRaised(StatefulContainer container, ContainerEvent event, Throwable t) { try { container.nmClientAsync.getCallbackHandler().onStartContainerError( event.getContainerId(), t); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info( "Unchecked exception is thrown from onStartContainerError for " + "Container " + event.getContainerId(), thr); } return ContainerState.FAILED; } } protected static class UpdateContainerResourceTransition implements SingleArcTransition<StatefulContainer, ContainerEvent> { @SuppressWarnings("deprecation") @Override public void transition( StatefulContainer container, ContainerEvent event) { boolean isIncreaseEvent = false; if (!(container.nmClientAsync.getCallbackHandler() instanceof AbstractCallbackHandler)) { LOG.error("Callback handler does not implement container resource " + "update callback methods"); return; } AbstractCallbackHandler handler = (AbstractCallbackHandler) container.nmClientAsync .getCallbackHandler(); try { if (!(event instanceof UpdateContainerResourceEvent)) { throw new AssertionError("Unexpected event type. Expecting:" + "UpdateContainerResourceEvent. Got:" + event); } UpdateContainerResourceEvent updateEvent = (UpdateContainerResourceEvent) event; container.nmClientAsync.getClient().updateContainerResource( updateEvent.getContainer()); isIncreaseEvent = updateEvent.isIncreaseEvent; try { //If isIncreaseEvent is set, set the appropriate callbacks //for backward compatibility if (isIncreaseEvent) { handler.onContainerResourceIncreased(updateEvent.getContainerId(), updateEvent.getContainer().getResource()); } else { handler.onContainerResourceUpdated(updateEvent.getContainerId(), updateEvent.getContainer().getResource()); } } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from " + "onContainerResourceUpdated for Container " + event.getContainerId(), thr); } } catch (Exception e) { try { if (isIncreaseEvent) { handler .onIncreaseContainerResourceError(event.getContainerId(), e); } else { handler.onUpdateContainerResourceError(event.getContainerId(), e); } } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from " + "onUpdateContainerResourceError for Container " + event.getContainerId(), thr); } } } } protected static class ReInitializeContainerTransition implements MultipleArcTransition<StatefulContainer, ContainerEvent, ContainerState> { @Override public ContainerState transition(StatefulContainer container, ContainerEvent containerEvent) { ContainerId containerId = containerEvent.getContainerId(); AbstractCallbackHandler handler = (AbstractCallbackHandler) container .nmClientAsync.getCallbackHandler(); Throwable handlerError = null; try { switch(containerEvent.getType()) { case REINITIALIZE_CONTAINER: if (!(containerEvent instanceof ReInitializeContainerEvevnt)) { LOG.error("Unexpected Event.. [{}]", containerEvent.getType()); return ContainerState.FAILED; } ReInitializeContainerEvevnt rEvent = (ReInitializeContainerEvevnt)containerEvent; container.nmClientAsync.getClient().reInitializeContainer( containerId, rEvent.getContainerLaunchContext(), rEvent.isAutoCommit()); try { handler.onContainerReInitialize(containerId); } catch (Throwable tr) { handlerError = tr; } break; case RESTART_CONTAINER: container.nmClientAsync.getClient().restartContainer(containerId); try { handler.onContainerRestart(containerId); } catch (Throwable tr) { handlerError = tr; } break; case ROLLBACK_LAST_REINIT: container.nmClientAsync.getClient() .rollbackLastReInitialization(containerId); try { handler.onRollbackLastReInitialization(containerId); } catch (Throwable tr) { handlerError = tr; } break; case COMMIT_LAST_REINT: container.nmClientAsync.getClient() .commitLastReInitialization(containerId); try { handler.onCommitLastReInitialization(containerId); } catch (Throwable tr) { handlerError = tr; } break; default: LOG.warn("Event of type [{}] not" + " expected here..", containerEvent.getType()); break; } if (handlerError != null) { LOG.info("Unchecked exception is thrown in handler for event [" + containerEvent.getType() + "] for Container " + containerId, handlerError); } return ContainerState.RUNNING; } catch (Throwable t) { switch(containerEvent.getType()) { case REINITIALIZE_CONTAINER: try { handler.onContainerReInitializeError(containerId, t); } catch (Throwable tr) { handlerError = tr; } break; case RESTART_CONTAINER: try { handler.onContainerRestartError(containerId, t); } catch (Throwable tr) { handlerError = tr; } break; case ROLLBACK_LAST_REINIT: try { handler.onRollbackLastReInitializationError(containerId, t); } catch (Throwable tr) { handlerError = tr; } break; case COMMIT_LAST_REINT: try { handler.onCommitLastReInitializationError(containerId, t); } catch (Throwable tr) { handlerError = tr; } break; default: LOG.warn("Event of type [" + containerEvent.getType() + "] not" + " expected here.."); break; } if (handlerError != null) { LOG.info("Unchecked exception is thrown in handler for event [" + containerEvent.getType() + "] for Container " + containerId, handlerError); } } return ContainerState.FAILED; } } protected static class StopContainerTransition implements MultipleArcTransition<StatefulContainer, ContainerEvent, ContainerState> { @Override public ContainerState transition( StatefulContainer container, ContainerEvent event) { ContainerId containerId = event.getContainerId(); try { container.nmClientAsync.getClient().stopContainer( containerId, event.getNodeId()); try { container.nmClientAsync.getCallbackHandler().onContainerStopped( event.getContainerId()); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from onContainerStopped for " + "Container " + event.getContainerId(), thr); } return ContainerState.DONE; } catch (Throwable e) { return onExceptionRaised(container, event, e); } } private ContainerState onExceptionRaised(StatefulContainer container, ContainerEvent event, Throwable t) { try { container.nmClientAsync.getCallbackHandler().onStopContainerError( event.getContainerId(), t); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from onStopContainerError for " + "Container " + event.getContainerId(), thr); } return ContainerState.FAILED; } } protected static class OutOfOrderTransition implements SingleArcTransition<StatefulContainer, ContainerEvent> { protected static final String STOP_BEFORE_START_ERROR_MSG = "Container was killed before it was launched"; @Override public void transition(StatefulContainer container, ContainerEvent event) { try { container.nmClientAsync.getCallbackHandler().onStartContainerError( event.getContainerId(), RPCUtil.getRemoteException(STOP_BEFORE_START_ERROR_MSG)); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info( "Unchecked exception is thrown from onStartContainerError for " + "Container " + event.getContainerId(), thr); } } } private final NMClientAsync nmClientAsync; private final ContainerId containerId; private final StateMachine<ContainerState, ContainerEventType, ContainerEvent> stateMachine; private final ReadLock readLock; private final WriteLock writeLock; public StatefulContainer(NMClientAsync client, ContainerId containerId) { this.nmClientAsync = client; this.containerId = containerId; stateMachine = stateMachineFactory.make(this); ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); readLock = lock.readLock(); writeLock = lock.writeLock(); } @Override public void handle(ContainerEvent event) { writeLock.lock(); try { try { this.stateMachine.doTransition(event.getType(), event); } catch (InvalidStateTransitionException e) { LOG.error("Can't handle this event at current state", e); } } finally { writeLock.unlock(); } } public ContainerId getContainerId() { return containerId; } public ContainerState getState() { readLock.lock(); try { return stateMachine.getCurrentState(); } finally { readLock.unlock(); } } } protected class ContainerEventProcessor implements Runnable { protected ContainerEvent event; public ContainerEventProcessor(ContainerEvent event) { this.event = event; } @Override public void run() { ContainerId containerId = event.getContainerId(); LOG.info("Processing Event {} for Container {}", event, containerId); if (event.getType() == ContainerEventType.QUERY_CONTAINER) { try { ContainerStatus containerStatus = client.getContainerStatus( containerId, event.getNodeId()); try { callbackHandler.onContainerStatusReceived( containerId, containerStatus); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info( "Unchecked exception is thrown from onContainerStatusReceived" + " for Container " + event.getContainerId(), thr); } } catch (Throwable e) { onExceptionRaised(containerId, e); } } else { StatefulContainer container = containers.get(containerId); if (container == null) { LOG.info("Container {} is already stopped or failed", containerId); } else { container.handle(event); if (isCompletelyDone(container)) { containers.remove(containerId); } } } } private void onExceptionRaised(ContainerId containerId, Throwable t) { try { callbackHandler.onGetContainerStatusError(containerId, t); } catch (Throwable thr) { // Don't process user created unchecked exception LOG.info("Unchecked exception is thrown from onGetContainerStatusError" + " for Container " + containerId, thr); } } } }
apache/hbase
37,333
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/MemStoreFlusher.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import io.opentelemetry.api.trace.Span; import io.opentelemetry.context.Scope; import java.io.IOException; import java.lang.Thread.UncaughtExceptionHandler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.ConcurrentModificationException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.concurrent.BlockingQueue; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.hadoop.hbase.regionserver.HRegion.FlushResult; import org.apache.hadoop.hbase.trace.TraceUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil; import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; /** * Thread that flushes cache on request NOTE: This class extends Thread rather than Chore because * the sleep time can be interrupted when there is something to do, rather than the Chore sleep time * which is invariant. * @see FlushRequester */ @InterfaceAudience.Private public class MemStoreFlusher implements FlushRequester, ConfigurationObserver { private static final Logger LOG = LoggerFactory.getLogger(MemStoreFlusher.class); private Configuration conf; // These two data members go together. Any entry in the one must have // a corresponding entry in the other. private final BlockingQueue<FlushQueueEntry> flushQueue = new DelayQueue<>(); protected final Map<Region, FlushRegionEntry> regionsInQueue = new HashMap<>(); private AtomicBoolean wakeupPending = new AtomicBoolean(); private final long threadWakeFrequency; private final HRegionServer server; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final Object blockSignal = new Object(); private long blockingWaitTime; private final LongAdder updatesBlockedMsHighWater = new LongAdder(); private FlushHandler[] flushHandlers; private final AtomicInteger flusherIdGen = new AtomicInteger(); private ThreadFactory flusherThreadFactory; private List<FlushRequestListener> flushRequestListeners = new ArrayList<>(1); /** * Singleton instance inserted into flush queue used for signaling. */ private static final FlushQueueEntry WAKEUPFLUSH_INSTANCE = new FlushQueueEntry() { @Override public long getDelay(TimeUnit unit) { return 0; } @Override public int compareTo(Delayed o) { return -1; } @Override public boolean equals(Object obj) { return obj == this; } @Override public int hashCode() { return 42; } }; /** * */ public MemStoreFlusher(final Configuration conf, final HRegionServer server) { super(); this.conf = conf; this.server = server; this.threadWakeFrequency = conf.getLong(HConstants.THREAD_WAKE_FREQUENCY, 10 * 1000); this.blockingWaitTime = conf.getInt("hbase.hstore.blockingWaitTime", 90000); int handlerCount = 0; if (server != null) { handlerCount = getHandlerCount(conf); LOG.info("globalMemStoreLimit=" + TraditionalBinaryPrefix .long2String(this.server.getRegionServerAccounting().getGlobalMemStoreLimit(), "", 1) + ", globalMemStoreLimitLowMark=" + TraditionalBinaryPrefix.long2String( this.server.getRegionServerAccounting().getGlobalMemStoreLimitLowMark(), "", 1) + ", Offheap=" + (this.server.getRegionServerAccounting().isOffheap())); } this.flushHandlers = new FlushHandler[handlerCount]; } public LongAdder getUpdatesBlockedMsHighWater() { return this.updatesBlockedMsHighWater; } /** * The memstore across all regions has exceeded the low water mark. Pick one region to flush and * flush it synchronously (this is called from the flush thread) * @return true if successful */ private boolean flushOneForGlobalPressure(FlushType flushType) { SortedMap<Long, Collection<HRegion>> regionsBySize = null; switch (flushType) { case ABOVE_OFFHEAP_HIGHER_MARK: case ABOVE_OFFHEAP_LOWER_MARK: regionsBySize = server.getCopyOfOnlineRegionsSortedByOffHeapSize(); break; case ABOVE_ONHEAP_HIGHER_MARK: case ABOVE_ONHEAP_LOWER_MARK: default: regionsBySize = server.getCopyOfOnlineRegionsSortedByOnHeapSize(); } Set<HRegion> excludedRegions = new HashSet<>(); double secondaryMultiplier = ServerRegionReplicaUtil.getRegionReplicaStoreFileRefreshMultiplier(conf); boolean flushedOne = false; while (!flushedOne) { // Find the biggest region that doesn't have too many storefiles (might be null!) HRegion bestFlushableRegion = getBiggestMemStoreRegion(regionsBySize, excludedRegions, true); // Find the biggest region, total, even if it might have too many flushes. HRegion bestAnyRegion = getBiggestMemStoreRegion(regionsBySize, excludedRegions, false); // Find the biggest region that is a secondary region HRegion bestRegionReplica = getBiggestMemStoreOfRegionReplica(regionsBySize, excludedRegions); if (bestAnyRegion == null) { // If bestAnyRegion is null, assign replica. It may be null too. Next step is check for null bestAnyRegion = bestRegionReplica; } if (bestAnyRegion == null) { LOG.error("Above memory mark but there are no flushable regions!"); return false; } HRegion regionToFlush; long bestAnyRegionSize; long bestFlushableRegionSize; switch (flushType) { case ABOVE_OFFHEAP_HIGHER_MARK: case ABOVE_OFFHEAP_LOWER_MARK: bestAnyRegionSize = bestAnyRegion.getMemStoreOffHeapSize(); bestFlushableRegionSize = getMemStoreOffHeapSize(bestFlushableRegion); break; case ABOVE_ONHEAP_HIGHER_MARK: case ABOVE_ONHEAP_LOWER_MARK: bestAnyRegionSize = bestAnyRegion.getMemStoreHeapSize(); bestFlushableRegionSize = getMemStoreHeapSize(bestFlushableRegion); break; default: bestAnyRegionSize = bestAnyRegion.getMemStoreDataSize(); bestFlushableRegionSize = getMemStoreDataSize(bestFlushableRegion); } if (bestAnyRegionSize > 2 * bestFlushableRegionSize) { // Even if it's not supposed to be flushed, pick a region if it's more than twice // as big as the best flushable one - otherwise when we're under pressure we make // lots of little flushes and cause lots of compactions, etc, which just makes // life worse! if (LOG.isDebugEnabled()) { LOG.debug("Under global heap pressure: " + "Region " + bestAnyRegion.getRegionInfo().getRegionNameAsString() + " has too many " + "store files, but is " + TraditionalBinaryPrefix.long2String(bestAnyRegionSize, "", 1) + " vs best flushable region's " + TraditionalBinaryPrefix.long2String(bestFlushableRegionSize, "", 1) + ". Choosing the bigger."); } regionToFlush = bestAnyRegion; } else { if (bestFlushableRegion == null) { regionToFlush = bestAnyRegion; } else { regionToFlush = bestFlushableRegion; } } long regionToFlushSize; long bestRegionReplicaSize; switch (flushType) { case ABOVE_OFFHEAP_HIGHER_MARK: case ABOVE_OFFHEAP_LOWER_MARK: regionToFlushSize = regionToFlush.getMemStoreOffHeapSize(); bestRegionReplicaSize = getMemStoreOffHeapSize(bestRegionReplica); break; case ABOVE_ONHEAP_HIGHER_MARK: case ABOVE_ONHEAP_LOWER_MARK: regionToFlushSize = regionToFlush.getMemStoreHeapSize(); bestRegionReplicaSize = getMemStoreHeapSize(bestRegionReplica); break; default: regionToFlushSize = regionToFlush.getMemStoreDataSize(); bestRegionReplicaSize = getMemStoreDataSize(bestRegionReplica); } if ((regionToFlush == null || regionToFlushSize == 0) && bestRegionReplicaSize == 0) { // A concurrency issue (such as splitting region) may happen such that the online region // seen by getCopyOfOnlineRegionsSortedByXX() method is no longer eligible to // getBiggestMemStoreRegion(). This means that we can come out of the loop LOG.debug("Above memory mark but there is no flushable region"); return false; } if ( regionToFlush == null || (bestRegionReplica != null && ServerRegionReplicaUtil.isRegionReplicaStoreFileRefreshEnabled(conf) && (bestRegionReplicaSize > secondaryMultiplier * regionToFlushSize)) ) { LOG.info("Refreshing storefiles of region " + bestRegionReplica + " due to global heap pressure. Total memstore off heap size=" + TraditionalBinaryPrefix .long2String(server.getRegionServerAccounting().getGlobalMemStoreOffHeapSize(), "", 1) + " memstore heap size=" + TraditionalBinaryPrefix .long2String(server.getRegionServerAccounting().getGlobalMemStoreHeapSize(), "", 1)); flushedOne = refreshStoreFilesAndReclaimMemory(bestRegionReplica); if (!flushedOne) { LOG.info("Excluding secondary region " + bestRegionReplica + " - trying to find a different region to refresh files."); excludedRegions.add(bestRegionReplica); } } else { LOG.info("Flush of region " + regionToFlush + " due to global heap pressure. " + "Flush type=" + flushType.toString() + ", Total Memstore Heap size=" + TraditionalBinaryPrefix .long2String(server.getRegionServerAccounting().getGlobalMemStoreHeapSize(), "", 1) + ", Total Memstore Off-Heap size=" + TraditionalBinaryPrefix .long2String(server.getRegionServerAccounting().getGlobalMemStoreOffHeapSize(), "", 1) + ", Region memstore size=" + TraditionalBinaryPrefix.long2String(regionToFlushSize, "", 1)); flushedOne = flushRegion(regionToFlush, true, null, FlushLifeCycleTracker.DUMMY); if (!flushedOne) { LOG.info("Excluding unflushable region " + regionToFlush + " - trying to find a different region to flush."); excludedRegions.add(regionToFlush); } } } return true; } /** Returns Return memstore offheap size or null if <code>r</code> is null */ private static long getMemStoreOffHeapSize(HRegion r) { return r == null ? 0 : r.getMemStoreOffHeapSize(); } /** Returns Return memstore heap size or null if <code>r</code> is null */ private static long getMemStoreHeapSize(HRegion r) { return r == null ? 0 : r.getMemStoreHeapSize(); } /** Returns Return memstore data size or null if <code>r</code> is null */ private static long getMemStoreDataSize(HRegion r) { return r == null ? 0 : r.getMemStoreDataSize(); } private class FlushHandler extends Thread { private final AtomicBoolean running = new AtomicBoolean(true); private FlushHandler(String name) { super(name); } @Override public void run() { while (!server.isStopped() && running.get()) { FlushQueueEntry fqe = null; try { wakeupPending.set(false); // allow someone to wake us up again fqe = flushQueue.poll(threadWakeFrequency, TimeUnit.MILLISECONDS); if (fqe == null || fqe == WAKEUPFLUSH_INSTANCE) { FlushType type = isAboveLowWaterMark(); if (type != FlushType.NORMAL) { LOG.debug("Flush thread woke up because memory above low water=" + TraditionalBinaryPrefix.long2String( server.getRegionServerAccounting().getGlobalMemStoreLimitLowMark(), "", 1)); // For offheap memstore, even if the lower water mark was breached due to heap // overhead // we still select the regions based on the region's memstore data size. // TODO : If we want to decide based on heap over head it can be done without tracking // it per region. if (!flushOneForGlobalPressure(type)) { // Wasn't able to flush any region, but we're above low water mark // This is unlikely to happen, but might happen when closing the // entire server - another thread is flushing regions. We'll just // sleep a little bit to avoid spinning, and then pretend that // we flushed one, so anyone blocked will check again Thread.sleep(1000); wakeUpIfBlocking(); } // Enqueue another one of these tokens so we'll wake up again wakeupFlushThread(); } continue; } FlushRegionEntry fre = (FlushRegionEntry) fqe; if (!flushRegion(fre)) { break; } } catch (InterruptedException ex) { continue; } catch (ConcurrentModificationException ex) { continue; } catch (Exception ex) { LOG.error("Cache flusher failed for entry " + fqe, ex); if (!server.checkFileSystem()) { break; } } } if (server.isStopped()) { synchronized (regionsInQueue) { regionsInQueue.clear(); flushQueue.clear(); } // Signal anyone waiting, so they see the close flag wakeUpIfBlocking(); } LOG.info(getName() + " exiting"); } public void shutdown() { if (!running.compareAndSet(true, false)) { LOG.warn("{} is already signaled to shutdown", getName()); } } } private void wakeupFlushThread() { if (wakeupPending.compareAndSet(false, true)) { flushQueue.add(WAKEUPFLUSH_INSTANCE); } } private HRegion getBiggestMemStoreRegion(SortedMap<Long, Collection<HRegion>> regionsBySize, Set<HRegion> excludedRegions, boolean checkStoreFileCount) { synchronized (regionsInQueue) { for (Map.Entry<Long, Collection<HRegion>> entry : regionsBySize.entrySet()) { for (HRegion region : entry.getValue()) { if (excludedRegions.contains(region)) { continue; } if (region.writestate.flushing || !region.writestate.writesEnabled) { continue; } if (checkStoreFileCount && isTooManyStoreFiles(region)) { continue; } return region; } } } return null; } private HRegion getBiggestMemStoreOfRegionReplica( SortedMap<Long, Collection<HRegion>> regionsBySize, Set<HRegion> excludedRegions) { synchronized (regionsInQueue) { for (Map.Entry<Long, Collection<HRegion>> entry : regionsBySize.entrySet()) { for (HRegion region : entry.getValue()) { if (excludedRegions.contains(region)) { continue; } if (RegionReplicaUtil.isDefaultReplica(region.getRegionInfo())) { continue; } return region; } } } return null; } private boolean refreshStoreFilesAndReclaimMemory(Region region) { try { return region.refreshStoreFiles(); } catch (IOException e) { LOG.warn("Refreshing store files failed with exception", e); } return false; } /** * Return the FlushType if global memory usage is above the high watermark */ private FlushType isAboveHighWaterMark() { return server.getRegionServerAccounting().isAboveHighWaterMark(); } /** * Return the FlushType if we're above the low watermark */ private FlushType isAboveLowWaterMark() { return server.getRegionServerAccounting().isAboveLowWaterMark(); } @Override public boolean requestFlush(HRegion r, FlushLifeCycleTracker tracker) { return this.requestFlush(r, null, tracker); } @Override public boolean requestFlush(HRegion r, List<byte[]> families, FlushLifeCycleTracker tracker) { synchronized (regionsInQueue) { FlushRegionEntry existFqe = regionsInQueue.get(r); if (existFqe != null) { // if a delayed one exists and not reach the time to execute, just remove it if (existFqe.isDelay() && existFqe.whenToExpire > EnvironmentEdgeManager.currentTime()) { LOG.info("Remove the existing delayed flush entry for {}, " + "because we need to flush it immediately", r); this.regionsInQueue.remove(r); this.flushQueue.remove(existFqe); r.decrementFlushesQueuedCount(); } else { tracker.notExecuted("Flush already requested on " + r); return false; } } // This entry has no delay so it will be added at the top of the flush // queue. It'll come out near immediately. FlushRegionEntry fqe = new FlushRegionEntry(r, families, tracker); this.regionsInQueue.put(r, fqe); this.flushQueue.add(fqe); r.incrementFlushesQueuedCount(); return true; } } @Override public boolean requestDelayedFlush(HRegion r, long delay) { synchronized (regionsInQueue) { if (!regionsInQueue.containsKey(r)) { // This entry has some delay FlushRegionEntry fqe = new FlushRegionEntry(r, null, FlushLifeCycleTracker.DUMMY); fqe.requeue(delay); this.regionsInQueue.put(r, fqe); this.flushQueue.add(fqe); r.incrementFlushesQueuedCount(); return true; } return false; } } public int getFlushQueueSize() { return flushQueue.size(); } /** * Only interrupt once it's done with a run through the work loop. */ void interruptIfNecessary() { lock.writeLock().lock(); try { for (FlushHandler flushHandler : flushHandlers) { if (flushHandler != null) { flushHandler.interrupt(); } } } finally { lock.writeLock().unlock(); } } synchronized void start(UncaughtExceptionHandler eh) { this.flusherThreadFactory = new ThreadFactoryBuilder().setDaemon(true).setUncaughtExceptionHandler(eh).build(); lock.readLock().lock(); try { startFlushHandlerThreads(flushHandlers, 0, flushHandlers.length); } finally { lock.readLock().unlock(); } } boolean isAlive() { lock.readLock().lock(); try { for (FlushHandler flushHandler : flushHandlers) { if (flushHandler != null && flushHandler.isAlive()) { return true; } } return false; } finally { lock.readLock().unlock(); } } void shutdown() { lock.readLock().lock(); try { for (FlushHandler flushHandler : flushHandlers) { if (flushHandler != null) { Threads.shutdown(flushHandler); } } } finally { lock.readLock().unlock(); } } /** * A flushRegion that checks store file count. If too many, puts the flush on delay queue to retry * later. * @return true if the region was successfully flushed, false otherwise. If false, there will be * accompanying log messages explaining why the region was not flushed. */ private boolean flushRegion(final FlushRegionEntry fqe) { HRegion region = fqe.region; if (!region.getRegionInfo().isMetaRegion() && isTooManyStoreFiles(region)) { if (fqe.isMaximumWait(this.blockingWaitTime)) { LOG.info("Waited " + (EnvironmentEdgeManager.currentTime() - fqe.createTime) + "ms on a compaction to clean up 'too many store files'; waited " + "long enough... proceeding with flush of " + region.getRegionInfo().getRegionNameAsString()); } else { // If this is first time we've been put off, then emit a log message. if (fqe.getRequeueCount() <= 0) { // Note: We don't impose blockingStoreFiles constraint on meta regions LOG.warn("{} has too many store files({}); delaying flush up to {} ms", region.getRegionInfo().getEncodedName(), getStoreFileCount(region), this.blockingWaitTime); final CompactSplit compactSplitThread = server.getCompactSplitThread(); if (!compactSplitThread.requestSplit(region)) { try { compactSplitThread.requestSystemCompaction(region, Thread.currentThread().getName()); } catch (IOException e) { e = e instanceof RemoteException ? ((RemoteException) e).unwrapRemoteException() : e; LOG.error("Cache flush failed for region " + Bytes.toStringBinary(region.getRegionInfo().getRegionName()), e); } } } // Put back on the queue. Have it come back out of the queue // after a delay of this.blockingWaitTime / 100 ms. this.flushQueue.add(fqe.requeue(this.blockingWaitTime / 100)); // Tell a lie, it's not flushed but it's ok return true; } } return flushRegion(region, false, fqe.families, fqe.getTracker()); } /** * Flush a region. * @param region Region to flush. * @param emergencyFlush Set if we are being force flushed. If true the region needs to be removed * from the flush queue. If false, when we were called from the main flusher * run loop and we got the entry to flush by calling poll on the flush queue * (which removed it). * @param families stores of region to flush. * @return true if the region was successfully flushed, false otherwise. If false, there will be * accompanying log messages explaining why the region was not flushed. */ private boolean flushRegion(HRegion region, boolean emergencyFlush, List<byte[]> families, FlushLifeCycleTracker tracker) { synchronized (this.regionsInQueue) { FlushRegionEntry fqe = this.regionsInQueue.remove(region); // Use the start time of the FlushRegionEntry if available if (fqe != null && emergencyFlush) { // Need to remove from region from delay queue. When NOT an // emergencyFlush, then item was removed via a flushQueue.poll. flushQueue.remove(fqe); } } tracker.beforeExecution(); lock.readLock().lock(); final CompactSplit compactSplitThread = server.getCompactSplitThread(); try { notifyFlushRequest(region, emergencyFlush); FlushResult flushResult = region.flushcache(families, false, tracker); boolean shouldCompact = flushResult.isCompactionNeeded(); // We just want to check the size boolean shouldSplit = region.checkSplit().isPresent(); if (shouldSplit) { compactSplitThread.requestSplit(region); } else if (shouldCompact) { compactSplitThread.requestSystemCompaction(region, Thread.currentThread().getName()); } } catch (DroppedSnapshotException ex) { // Cache flush can fail in a few places. If it fails in a critical // section, we get a DroppedSnapshotException and a replay of wal // is required. Currently the only way to do this is a restart of // the server. Abort because hdfs is probably bad (HBASE-644 is a case // where hdfs was bad but passed the hdfs check). server.abort("Replay of WAL required. Forcing server shutdown", ex); return false; } catch (IOException ex) { ex = ex instanceof RemoteException ? ((RemoteException) ex).unwrapRemoteException() : ex; LOG.error("Cache flush failed" + (region != null ? (" for region " + Bytes.toStringBinary(region.getRegionInfo().getRegionName())) : ""), ex); if (!server.checkFileSystem()) { return false; } } finally { lock.readLock().unlock(); wakeUpIfBlocking(); tracker.afterExecution(); } return true; } private void notifyFlushRequest(Region region, boolean emergencyFlush) { FlushType type = null; if (emergencyFlush) { type = isAboveHighWaterMark(); } if (type == null) { type = isAboveLowWaterMark(); } for (FlushRequestListener listener : flushRequestListeners) { listener.flushRequested(type, region); } } private void wakeUpIfBlocking() { synchronized (blockSignal) { blockSignal.notifyAll(); } } private boolean isTooManyStoreFiles(Region region) { // When compaction is disabled, the region is flushable if (!region.getTableDescriptor().isCompactionEnabled()) { return false; } for (Store store : region.getStores()) { if (store.hasTooManyStoreFiles()) { return true; } } return false; } private int getStoreFileCount(Region region) { int count = 0; for (Store store : region.getStores()) { count += store.getStorefilesCount(); } return count; } /** * Check if the regionserver's memstore memory usage is greater than the limit. If so, flush * regions with the biggest memstores until we're down to the lower limit. This method blocks * callers until we're down to a safe amount of memstore consumption. */ public void reclaimMemStoreMemory() { Span span = TraceUtil.getGlobalTracer().spanBuilder("MemStoreFluser.reclaimMemStoreMemory").startSpan(); try (Scope scope = span.makeCurrent()) { FlushType flushType = isAboveHighWaterMark(); if (flushType != FlushType.NORMAL) { span.addEvent("Force Flush. We're above high water mark."); long start = EnvironmentEdgeManager.currentTime(); long nextLogTimeMs = start; synchronized (this.blockSignal) { boolean blocked = false; long startTime = 0; boolean interrupted = false; try { flushType = isAboveHighWaterMark(); while (flushType != FlushType.NORMAL && !server.isStopped()) { if (!blocked) { startTime = EnvironmentEdgeManager.currentTime(); if (!server.getRegionServerAccounting().isOffheap()) { logMsg("global memstore heapsize", server.getRegionServerAccounting().getGlobalMemStoreHeapSize(), server.getRegionServerAccounting().getGlobalMemStoreLimit()); } else { switch (flushType) { case ABOVE_OFFHEAP_HIGHER_MARK: logMsg("the global offheap memstore datasize", server.getRegionServerAccounting().getGlobalMemStoreOffHeapSize(), server.getRegionServerAccounting().getGlobalMemStoreLimit()); break; case ABOVE_ONHEAP_HIGHER_MARK: logMsg("global memstore heapsize", server.getRegionServerAccounting().getGlobalMemStoreHeapSize(), server.getRegionServerAccounting().getGlobalOnHeapMemStoreLimit()); break; default: break; } } } blocked = true; wakeupFlushThread(); try { // we should be able to wait forever, but we've seen a bug where // we miss a notify, so put a 5 second bound on it at least. blockSignal.wait(5 * 1000); } catch (InterruptedException ie) { LOG.warn("Interrupted while waiting"); interrupted = true; } long nowMs = EnvironmentEdgeManager.currentTime(); if (nowMs >= nextLogTimeMs) { LOG.warn("Memstore is above high water mark and block {} ms", nowMs - start); nextLogTimeMs = nowMs + 1000; } flushType = isAboveHighWaterMark(); } } finally { if (interrupted) { Thread.currentThread().interrupt(); } } if (blocked) { final long totalTime = EnvironmentEdgeManager.currentTime() - startTime; if (totalTime > 0) { this.updatesBlockedMsHighWater.add(totalTime); } LOG.info("Unblocking updates for server " + server.toString()); } } } else { flushType = isAboveLowWaterMark(); if (flushType != FlushType.NORMAL) { wakeupFlushThread(); } span.end(); } } } private void logMsg(String type, long val, long max) { LOG.info("Blocking updates: {} {} is >= blocking {}", type, TraditionalBinaryPrefix.long2String(val, "", 1), TraditionalBinaryPrefix.long2String(max, "", 1)); } @Override public String toString() { return "flush_queue=" + flushQueue.size(); } public String dumpQueue() { StringBuilder queueList = new StringBuilder(); queueList.append("Flush Queue Queue dump:\n"); queueList.append(" Flush Queue:\n"); java.util.Iterator<FlushQueueEntry> it = flushQueue.iterator(); while (it.hasNext()) { queueList.append(" " + it.next().toString()); queueList.append("\n"); } return queueList.toString(); } /** * Register a MemstoreFlushListener */ @Override public void registerFlushRequestListener(final FlushRequestListener listener) { this.flushRequestListeners.add(listener); } /** * Unregister the listener from MemstoreFlushListeners * @return true when passed listener is unregistered successfully. */ @Override public boolean unregisterFlushRequestListener(final FlushRequestListener listener) { return this.flushRequestListeners.remove(listener); } /** * Sets the global memstore limit to a new size. */ @Override public void setGlobalMemStoreLimit(long globalMemStoreSize) { this.server.getRegionServerAccounting().setGlobalMemStoreLimits(globalMemStoreSize); reclaimMemStoreMemory(); } interface FlushQueueEntry extends Delayed { } /** * Datastructure used in the flush queue. Holds region and retry count. Keeps tabs on how old this * object is. Implements {@link Delayed}. On construction, the delay is zero. When added to a * delay queue, we'll come out near immediately. Call {@link #requeue(long)} passing delay in * milliseconds before readding to delay queue if you want it to stay there a while. */ static class FlushRegionEntry implements FlushQueueEntry { private final HRegion region; private final long createTime; private long whenToExpire; private int requeueCount = 0; private final List<byte[]> families; private final FlushLifeCycleTracker tracker; FlushRegionEntry(final HRegion r, List<byte[]> families, FlushLifeCycleTracker tracker) { this.region = r; this.createTime = EnvironmentEdgeManager.currentTime(); this.whenToExpire = this.createTime; this.families = families; this.tracker = tracker; } /** Returns True if we have been delayed > <code>maximumWait</code> milliseconds. */ public boolean isMaximumWait(final long maximumWait) { return (EnvironmentEdgeManager.currentTime() - this.createTime) > maximumWait; } /** Returns True if the entry is a delay flush task */ protected boolean isDelay() { return this.whenToExpire > this.createTime; } /** * @return Count of times {@link #requeue(long)} was called; i.e this is number of times we've * been requeued. */ public int getRequeueCount() { return this.requeueCount; } public FlushLifeCycleTracker getTracker() { return tracker; } /** * @param when When to expire, when to come up out of the queue. Specify in milliseconds. This * method adds EnvironmentEdgeManager.currentTime() to whatever you pass. * @return This. */ public FlushRegionEntry requeue(final long when) { this.whenToExpire = EnvironmentEdgeManager.currentTime() + when; this.requeueCount++; return this; } @Override public long getDelay(TimeUnit unit) { return unit.convert(this.whenToExpire - EnvironmentEdgeManager.currentTime(), TimeUnit.MILLISECONDS); } @Override public int compareTo(Delayed other) { // Delay is compared first. If there is a tie, compare region's hash code int ret = Long.valueOf(getDelay(TimeUnit.MILLISECONDS) - other.getDelay(TimeUnit.MILLISECONDS)) .intValue(); if (ret != 0) { return ret; } FlushQueueEntry otherEntry = (FlushQueueEntry) other; return hashCode() - otherEntry.hashCode(); } @Override public String toString() { return "[flush region " + Bytes.toStringBinary(region.getRegionInfo().getRegionName()) + "]"; } @Override public int hashCode() { int hash = (int) getDelay(TimeUnit.MILLISECONDS); return hash ^ region.hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } FlushRegionEntry other = (FlushRegionEntry) obj; if ( !Bytes.equals(this.region.getRegionInfo().getRegionName(), other.region.getRegionInfo().getRegionName()) ) { return false; } return compareTo(other) == 0; } } private int getHandlerCount(Configuration conf) { int handlerCount = conf.getInt("hbase.hstore.flusher.count", 2); if (handlerCount < 1) { LOG.warn( "hbase.hstore.flusher.count was configed to {} which is less than 1, " + "corrected to 1", handlerCount); handlerCount = 1; } return handlerCount; } @Override public void onConfigurationChange(Configuration newConf) { int newHandlerCount = getHandlerCount(newConf); if (newHandlerCount != flushHandlers.length) { LOG.info("update hbase.hstore.flusher.count from {} to {}", flushHandlers.length, newHandlerCount); lock.writeLock().lock(); try { FlushHandler[] newFlushHandlers = Arrays.copyOf(flushHandlers, newHandlerCount); if (newHandlerCount > flushHandlers.length) { startFlushHandlerThreads(newFlushHandlers, flushHandlers.length, newFlushHandlers.length); } else { stopFlushHandlerThreads(flushHandlers, newHandlerCount, flushHandlers.length); } flusherIdGen.compareAndSet(flushHandlers.length, newFlushHandlers.length); this.flushHandlers = newFlushHandlers; } finally { lock.writeLock().unlock(); } } } private void startFlushHandlerThreads(FlushHandler[] flushHandlers, int start, int end) { if (flusherThreadFactory != null) { for (int i = start; i < end; i++) { flushHandlers[i] = new FlushHandler("MemStoreFlusher." + flusherIdGen.getAndIncrement()); flusherThreadFactory.newThread(flushHandlers[i]); flushHandlers[i].start(); } } } private void stopFlushHandlerThreads(FlushHandler[] flushHandlers, int start, int end) { for (int i = start; i < end; i++) { flushHandlers[i].shutdown(); if (LOG.isDebugEnabled()) { LOG.debug("send shutdown signal to {}", flushHandlers[i].getName()); } } } public int getFlusherCount() { return flusherIdGen.get(); } }
google/guava
37,782
guava-tests/test/com/google/common/math/StatsAccumulatorTest.java
/* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.math; import static com.google.common.math.StatsTesting.ALLOWED_ERROR; import static com.google.common.math.StatsTesting.ALL_MANY_VALUES; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.MANY_VALUES; import static com.google.common.math.StatsTesting.MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.MEGA_STREAM_COUNT; import static com.google.common.math.StatsTesting.MEGA_STREAM_MAX; import static com.google.common.math.StatsTesting.MEGA_STREAM_MEAN; import static com.google.common.math.StatsTesting.MEGA_STREAM_MIN; import static com.google.common.math.StatsTesting.MEGA_STREAM_POPULATION_VARIANCE; import static com.google.common.math.StatsTesting.ONE_VALUE; import static com.google.common.math.StatsTesting.OTHER_ONE_VALUE; import static com.google.common.math.StatsTesting.TWO_VALUES; import static com.google.common.math.StatsTesting.TWO_VALUES_MAX; import static com.google.common.math.StatsTesting.TWO_VALUES_MEAN; import static com.google.common.math.StatsTesting.TWO_VALUES_MIN; import static com.google.common.math.StatsTesting.TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStream; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStreamPart1; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStreamPart2; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static java.lang.Math.sqrt; import static java.util.stream.DoubleStream.concat; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.math.StatsTesting.ManyValues; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import junit.framework.TestCase; import org.jspecify.annotations.NullUnmarked; /** * Tests for {@link StatsAccumulator}. This tests the stats methods for instances built with {@link * StatsAccumulator#add} and {@link StatsAccumulator#addAll}, and various error cases of the {@link * StatsAccumulator#add} and {@link StatsAccumulator#addAll} methods. For tests of the {@link * StatsAccumulator#snapshot} method which returns {@link Stats} instances, see {@link StatsTest}. * * @author Pete Gillin */ @NullUnmarked public class StatsAccumulatorTest extends TestCase { private StatsAccumulator emptyAccumulator; private StatsAccumulator emptyAccumulatorByAddAllEmptyIterable; private StatsAccumulator emptyAccumulatorByAddAllEmptyStats; private StatsAccumulator oneValueAccumulator; private StatsAccumulator oneValueAccumulatorByAddAllEmptyStats; private StatsAccumulator twoValuesAccumulator; private StatsAccumulator twoValuesAccumulatorByAddAllStats; private StatsAccumulator manyValuesAccumulatorByAddAllIterable; private StatsAccumulator manyValuesAccumulatorByAddAllIterator; private StatsAccumulator manyValuesAccumulatorByAddAllVarargs; private StatsAccumulator manyValuesAccumulatorByRepeatedAdd; private StatsAccumulator manyValuesAccumulatorByAddAndAddAll; private StatsAccumulator manyValuesAccumulatorByAddAllStats; private StatsAccumulator manyValuesAccumulatorByAddAllStatsAccumulator; private StatsAccumulator integerManyValuesAccumulatorByAddAllIterable; private StatsAccumulator longManyValuesAccumulatorByAddAllIterator; private StatsAccumulator longManyValuesAccumulatorByAddAllVarargs; @Override protected void setUp() throws Exception { super.setUp(); emptyAccumulator = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyIterable = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyIterable.addAll(ImmutableList.<Double>of()); emptyAccumulatorByAddAllEmptyStats = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyStats.addAll(Stats.of()); oneValueAccumulator = new StatsAccumulator(); oneValueAccumulator.add(ONE_VALUE); oneValueAccumulatorByAddAllEmptyStats = new StatsAccumulator(); oneValueAccumulatorByAddAllEmptyStats.add(ONE_VALUE); oneValueAccumulatorByAddAllEmptyStats.addAll(Stats.of()); twoValuesAccumulator = new StatsAccumulator(); twoValuesAccumulator.addAll(TWO_VALUES); twoValuesAccumulatorByAddAllStats = new StatsAccumulator(); twoValuesAccumulatorByAddAllStats.addAll(Stats.of(ONE_VALUE)); twoValuesAccumulatorByAddAllStats.addAll(Stats.of(OTHER_ONE_VALUE)); manyValuesAccumulatorByAddAllIterable = new StatsAccumulator(); manyValuesAccumulatorByAddAllIterable.addAll(MANY_VALUES); manyValuesAccumulatorByAddAllIterator = new StatsAccumulator(); manyValuesAccumulatorByAddAllIterator.addAll(MANY_VALUES.iterator()); manyValuesAccumulatorByAddAllVarargs = new StatsAccumulator(); manyValuesAccumulatorByAddAllVarargs.addAll(Doubles.toArray(MANY_VALUES)); manyValuesAccumulatorByRepeatedAdd = new StatsAccumulator(); for (double value : MANY_VALUES) { manyValuesAccumulatorByRepeatedAdd.add(value); } manyValuesAccumulatorByAddAndAddAll = new StatsAccumulator(); manyValuesAccumulatorByAddAndAddAll.add(MANY_VALUES.get(0)); manyValuesAccumulatorByAddAndAddAll.addAll(MANY_VALUES.subList(1, MANY_VALUES.size())); manyValuesAccumulatorByAddAllStats = new StatsAccumulator(); manyValuesAccumulatorByAddAllStats.addAll( Stats.of(MANY_VALUES.subList(0, MANY_VALUES.size() / 2))); manyValuesAccumulatorByAddAllStats.addAll( Stats.of(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size()))); manyValuesAccumulatorByAddAllStatsAccumulator = new StatsAccumulator(); manyValuesAccumulatorByAddAllStatsAccumulator.addAll( statsAccumulatorOf(MANY_VALUES.subList(0, MANY_VALUES.size() / 2))); manyValuesAccumulatorByAddAllStatsAccumulator.addAll( statsAccumulatorOf(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size()))); integerManyValuesAccumulatorByAddAllIterable = new StatsAccumulator(); integerManyValuesAccumulatorByAddAllIterable.addAll(INTEGER_MANY_VALUES); longManyValuesAccumulatorByAddAllIterator = new StatsAccumulator(); longManyValuesAccumulatorByAddAllIterator.addAll(LONG_MANY_VALUES.iterator()); longManyValuesAccumulatorByAddAllVarargs = new StatsAccumulator(); longManyValuesAccumulatorByAddAllVarargs.addAll(Longs.toArray(LONG_MANY_VALUES)); } private static StatsAccumulator statsAccumulatorOf(Iterable<? extends Number> values) { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(values); return accumulator; } public void testCount() { assertThat(emptyAccumulator.count()).isEqualTo(0); assertThat(emptyAccumulatorByAddAllEmptyIterable.count()).isEqualTo(0); assertThat(emptyAccumulatorByAddAllEmptyStats.count()).isEqualTo(0); assertThat(oneValueAccumulator.count()).isEqualTo(1); assertThat(oneValueAccumulatorByAddAllEmptyStats.count()).isEqualTo(1); assertThat(twoValuesAccumulator.count()).isEqualTo(2); assertThat(twoValuesAccumulatorByAddAllStats.count()).isEqualTo(2); assertThat(manyValuesAccumulatorByAddAllIterable.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(integerManyValuesAccumulatorByAddAllIterable.count()) .isEqualTo(StatsTesting.INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.count()) .isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.count()) .isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT); } public void testCountOverflow_doesNotThrow() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.add(ONE_VALUE); for (int power = 1; power < Long.SIZE - 1; power++) { accumulator.addAll(accumulator.snapshot()); } // Should overflow without throwing. accumulator.addAll(accumulator.snapshot()); assertThat(accumulator.count()).isLessThan(0L); } public void testMean() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.mean()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.mean()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.mean()); assertThat(oneValueAccumulator.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(twoValuesAccumulator.mean()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN); assertThat(twoValuesAccumulatorByAddAllStats.mean()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllIterable.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllIterator.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllVarargs.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByRepeatedAdd.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAndAddAll.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllStats.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); // For datasets of many double values created from an iterable, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asIterable()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double mean = accumulator.mean(); double meanByAddAllStats = accumulatorByAddAllStats.mean(); if (values.hasAnyNaN()) { assertWithMessage("mean of " + values).that(mean).isNaN(); assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) { assertWithMessage("mean of " + values).that(mean).isNaN(); assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertWithMessage("mean of " + values).that(mean).isPositiveInfinity(); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isPositiveInfinity(); } else if (values.hasAnyNegativeInfinity()) { assertWithMessage("mean of " + values).that(mean).isNegativeInfinity(); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isNegativeInfinity(); } else { assertWithMessage("mean of " + values) .that(mean) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.mean()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN) .of(INTEGER_MANY_VALUES_MEAN); assertThat(longManyValuesAccumulatorByAddAllIterator.mean()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN); assertThat(longManyValuesAccumulatorByAddAllVarargs.mean()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN); } public void testSum() { assertThat(emptyAccumulator.sum()).isEqualTo(0.0); assertThat(emptyAccumulatorByAddAllEmptyIterable.sum()).isEqualTo(0.0); assertThat(emptyAccumulatorByAddAllEmptyStats.sum()).isEqualTo(0.0); assertThat(oneValueAccumulator.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(twoValuesAccumulator.sum()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN * 2); assertThat(twoValuesAccumulatorByAddAllStats.sum()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_MEAN * 2); assertThat(manyValuesAccumulatorByAddAllIterable.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(integerManyValuesAccumulatorByAddAllIterable.sum()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN) .of(INTEGER_MANY_VALUES_MEAN * INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.sum()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.sum()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); } public void testPopulationVariance() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.populationVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.populationVariance()); assertThat(oneValueAccumulator.populationVariance()).isEqualTo(0.0); assertThat(oneValueAccumulatorByAddAllEmptyStats.populationVariance()).isEqualTo(0.0); assertThat(twoValuesAccumulator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2); assertThat(twoValuesAccumulatorByAddAllStats.populationVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2); assertThat(manyValuesAccumulatorByAddAllIterable.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); // For datasets of many double values created from an iterator, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asIterable().iterator()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double populationVariance = accumulator.populationVariance(); double populationVarianceByAddAllStats = accumulatorByAddAllStats.populationVariance(); if (values.hasAnyNonFinite()) { assertWithMessage("population variance of " + values).that(populationVariance).isNaN(); assertWithMessage("population variance by addAll(Stats) of " + values) .that(populationVarianceByAddAllStats) .isNaN(); } else { assertWithMessage("population variance of " + values) .that(populationVariance) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertWithMessage("population variance by addAll(Stats) of " + values) .that(populationVarianceByAddAllStats) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.populationVariance()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.populationVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.populationVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT); } public void testPopulationStandardDeviation() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.populationStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.populationStandardDeviation()); assertThat(oneValueAccumulator.populationStandardDeviation()).isEqualTo(0.0); assertThat(oneValueAccumulatorByAddAllEmptyStats.populationStandardDeviation()).isEqualTo(0.0); assertThat(twoValuesAccumulator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2)); assertThat(twoValuesAccumulatorByAddAllStats.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2)); assertThat(manyValuesAccumulatorByAddAllIterable.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllIterator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllVarargs.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByRepeatedAdd.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAndAddAll.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllStats.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(integerManyValuesAccumulatorByAddAllIterable.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT)); assertThat(longManyValuesAccumulatorByAddAllIterator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT)); assertThat(longManyValuesAccumulatorByAddAllVarargs.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT)); } public void testSampleVariance() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.sampleVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.sampleVariance()); assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleVariance()); assertThrows( IllegalStateException.class, () -> oneValueAccumulatorByAddAllEmptyStats.sampleVariance()); assertThat(twoValuesAccumulator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS); assertThat(twoValuesAccumulatorByAddAllStats.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS); assertThat(manyValuesAccumulatorByAddAllIterable.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllIterator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllVarargs.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByRepeatedAdd.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAndAddAll.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllStats.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleVariance()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1)); assertThat(longManyValuesAccumulatorByAddAllIterator.sampleVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)); assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)); } public void testSampleStandardDeviation() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.sampleStandardDeviation()); assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> oneValueAccumulatorByAddAllEmptyStats.sampleStandardDeviation()); assertThat(twoValuesAccumulator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS)); assertThat(twoValuesAccumulatorByAddAllStats.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS)); assertThat(manyValuesAccumulatorByAddAllIterable.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllIterator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByRepeatedAdd.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAndAddAll.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllStats.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1))); assertThat(longManyValuesAccumulatorByAddAllIterator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1))); assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1))); } public void testMax() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.max()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.max()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.max()); assertThat(oneValueAccumulator.max()).isEqualTo(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.max()).isEqualTo(ONE_VALUE); assertThat(twoValuesAccumulator.max()).isEqualTo(TWO_VALUES_MAX); assertThat(twoValuesAccumulatorByAddAllStats.max()).isEqualTo(TWO_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllIterable.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllIterator.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByRepeatedAdd.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAndAddAll.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllStats.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.max()).isEqualTo(MANY_VALUES_MAX); // For datasets of many double values created from an array, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asArray()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double max = accumulator.max(); double maxByAddAllStats = accumulatorByAddAllStats.max(); if (values.hasAnyNaN()) { assertWithMessage("max of " + values).that(max).isNaN(); assertWithMessage("max by addAll(Stats) of " + values).that(maxByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertWithMessage("max of " + values).that(max).isPositiveInfinity(); assertWithMessage("max by addAll(Stats) of " + values) .that(maxByAddAllStats) .isPositiveInfinity(); } else { assertWithMessage("max of " + values).that(max).isEqualTo(MANY_VALUES_MAX); assertWithMessage("max by addAll(Stats) of " + values) .that(maxByAddAllStats) .isEqualTo(MANY_VALUES_MAX); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.max()) .isEqualTo(INTEGER_MANY_VALUES_MAX); assertThat(longManyValuesAccumulatorByAddAllIterator.max()).isEqualTo(LONG_MANY_VALUES_MAX); assertThat(longManyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(LONG_MANY_VALUES_MAX); } public void testMin() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.min()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.min()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.min()); assertThat(oneValueAccumulator.min()).isEqualTo(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.min()).isEqualTo(ONE_VALUE); assertThat(twoValuesAccumulator.min()).isEqualTo(TWO_VALUES_MIN); assertThat(twoValuesAccumulatorByAddAllStats.min()).isEqualTo(TWO_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllIterable.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllIterator.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByRepeatedAdd.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAndAddAll.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllStats.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.min()).isEqualTo(MANY_VALUES_MIN); // For datasets of many double values created by adding elements individually, we test many // combinations of finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); for (double value : values.asIterable()) { accumulator.add(value); accumulatorByAddAllStats.addAll(Stats.of(value)); } double min = accumulator.min(); double minByAddAllStats = accumulatorByAddAllStats.min(); if (values.hasAnyNaN()) { assertWithMessage("min of " + values).that(min).isNaN(); assertWithMessage("min by addAll(Stats) of " + values).that(minByAddAllStats).isNaN(); } else if (values.hasAnyNegativeInfinity()) { assertWithMessage("min of " + values).that(min).isNegativeInfinity(); assertWithMessage("min by addAll(Stats) of " + values) .that(minByAddAllStats) .isNegativeInfinity(); } else { assertWithMessage("min of " + values).that(min).isEqualTo(MANY_VALUES_MIN); assertWithMessage("min by addAll(Stats) of " + values) .that(minByAddAllStats) .isEqualTo(MANY_VALUES_MIN); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.min()) .isEqualTo(INTEGER_MANY_VALUES_MIN); assertThat(longManyValuesAccumulatorByAddAllIterator.min()).isEqualTo(LONG_MANY_VALUES_MIN); assertThat(longManyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(LONG_MANY_VALUES_MIN); } public void testVerifyMegaStreamHalves() { assertThat( concat(megaPrimitiveDoubleStreamPart1(), megaPrimitiveDoubleStreamPart2()) .sorted() .toArray()) .isEqualTo(megaPrimitiveDoubleStream().toArray()); } public void testAddAllPrimitiveDoubleStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1()); accumulator.addAll(megaPrimitiveDoubleStreamPart2()); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } public void testAddAllPrimitiveIntStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToInt(x -> (int) x)); accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToInt(x -> (int) x)); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } public void testAddAllPrimitiveLongStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToLong(x -> (long) x)); accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToLong(x -> (long) x)); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } }
googleapis/google-cloud-java
37,475
java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/ProgramsServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.accounts.v1beta; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/accounts/v1beta/programs.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ProgramsServiceGrpc { private ProgramsServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.accounts.v1beta.ProgramsService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.GetProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getGetProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetProgram", requestType = com.google.shopping.merchant.accounts.v1beta.GetProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1beta.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.GetProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getGetProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.GetProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getGetProgramMethod; if ((getGetProgramMethod = ProgramsServiceGrpc.getGetProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getGetProgramMethod = ProgramsServiceGrpc.getGetProgramMethod) == null) { ProgramsServiceGrpc.getGetProgramMethod = getGetProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1beta.GetProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.GetProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("GetProgram")) .build(); } } } return getGetProgramMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest, com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> getListProgramsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListPrograms", requestType = com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest.class, responseType = com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest, com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> getListProgramsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest, com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> getListProgramsMethod; if ((getListProgramsMethod = ProgramsServiceGrpc.getListProgramsMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getListProgramsMethod = ProgramsServiceGrpc.getListProgramsMethod) == null) { ProgramsServiceGrpc.getListProgramsMethod = getListProgramsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest, com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListPrograms")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("ListPrograms")) .build(); } } } return getListProgramsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getEnableProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "EnableProgram", requestType = com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1beta.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getEnableProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getEnableProgramMethod; if ((getEnableProgramMethod = ProgramsServiceGrpc.getEnableProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getEnableProgramMethod = ProgramsServiceGrpc.getEnableProgramMethod) == null) { ProgramsServiceGrpc.getEnableProgramMethod = getEnableProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "EnableProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("EnableProgram")) .build(); } } } return getEnableProgramMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getDisableProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DisableProgram", requestType = com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1beta.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getDisableProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> getDisableProgramMethod; if ((getDisableProgramMethod = ProgramsServiceGrpc.getDisableProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getDisableProgramMethod = ProgramsServiceGrpc.getDisableProgramMethod) == null) { ProgramsServiceGrpc.getDisableProgramMethod = getDisableProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DisableProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1beta.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("DisableProgram")) .build(); } } } return getDisableProgramMethod; } /** Creates a new async stub that supports all call types for the service */ public static ProgramsServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceStub>() { @java.lang.Override public ProgramsServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceStub(channel, callOptions); } }; return ProgramsServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static ProgramsServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingV2Stub>() { @java.lang.Override public ProgramsServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingV2Stub(channel, callOptions); } }; return ProgramsServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ProgramsServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingStub>() { @java.lang.Override public ProgramsServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingStub(channel, callOptions); } }; return ProgramsServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ProgramsServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceFutureStub>() { @java.lang.Override public ProgramsServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceFutureStub(channel, callOptions); } }; return ProgramsServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ default void getProgram( com.google.shopping.merchant.accounts.v1beta.GetProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetProgramMethod(), responseObserver); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ default void listPrograms( com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListProgramsMethod(), responseObserver); } /** * * * <pre> * Enable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ default void enableProgram( com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getEnableProgramMethod(), responseObserver); } /** * * * <pre> * Disable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ default void disableProgram( com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDisableProgramMethod(), responseObserver); } } /** * Base class for the server implementation of the service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public abstract static class ProgramsServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ProgramsServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceStub extends io.grpc.stub.AbstractAsyncStub<ProgramsServiceStub> { private ProgramsServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public void getProgram( com.google.shopping.merchant.accounts.v1beta.GetProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetProgramMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public void listPrograms( com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListProgramsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Enable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public void enableProgram( com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getEnableProgramMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Disable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public void disableProgram( com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDisableProgramMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<ProgramsServiceBlockingV2Stub> { private ProgramsServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program getProgram( com.google.shopping.merchant.accounts.v1beta.GetProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse listPrograms( com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListProgramsMethod(), getCallOptions(), request); } /** * * * <pre> * Enable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program enableProgram( com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getEnableProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Disable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program disableProgram( com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDisableProgramMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<ProgramsServiceBlockingStub> { private ProgramsServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program getProgram( com.google.shopping.merchant.accounts.v1beta.GetProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse listPrograms( com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListProgramsMethod(), getCallOptions(), request); } /** * * * <pre> * Enable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program enableProgram( com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getEnableProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Disable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.shopping.merchant.accounts.v1beta.Program disableProgram( com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDisableProgramMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/topic/9240261?ref_topic=7257954,7259405,&amp;sjid=796648681813264022-EU) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a merchant's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceFutureStub extends io.grpc.stub.AbstractFutureStub<ProgramsServiceFutureStub> { private ProgramsServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1beta.Program> getProgram(com.google.shopping.merchant.accounts.v1beta.GetProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetProgramMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse> listPrograms(com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListProgramsMethod(), getCallOptions()), request); } /** * * * <pre> * Enable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1beta.Program> enableProgram(com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getEnableProgramMethod(), getCallOptions()), request); } /** * * * <pre> * Disable participation in the specified program for the account. Executing * this method requires admin access. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1beta.Program> disableProgram(com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDisableProgramMethod(), getCallOptions()), request); } } private static final int METHODID_GET_PROGRAM = 0; private static final int METHODID_LIST_PROGRAMS = 1; private static final int METHODID_ENABLE_PROGRAM = 2; private static final int METHODID_DISABLE_PROGRAM = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_PROGRAM: serviceImpl.getProgram( (com.google.shopping.merchant.accounts.v1beta.GetProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program>) responseObserver); break; case METHODID_LIST_PROGRAMS: serviceImpl.listPrograms( (com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse>) responseObserver); break; case METHODID_ENABLE_PROGRAM: serviceImpl.enableProgram( (com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program>) responseObserver); break; case METHODID_DISABLE_PROGRAM: serviceImpl.disableProgram( (com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Program>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1beta.GetProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program>( service, METHODID_GET_PROGRAM))) .addMethod( getListProgramsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1beta.ListProgramsRequest, com.google.shopping.merchant.accounts.v1beta.ListProgramsResponse>( service, METHODID_LIST_PROGRAMS))) .addMethod( getEnableProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1beta.EnableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program>( service, METHODID_ENABLE_PROGRAM))) .addMethod( getDisableProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1beta.DisableProgramRequest, com.google.shopping.merchant.accounts.v1beta.Program>( service, METHODID_DISABLE_PROGRAM))) .build(); } private abstract static class ProgramsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ProgramsServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.accounts.v1beta.ProgramsProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ProgramsService"); } } private static final class ProgramsServiceFileDescriptorSupplier extends ProgramsServiceBaseDescriptorSupplier { ProgramsServiceFileDescriptorSupplier() {} } private static final class ProgramsServiceMethodDescriptorSupplier extends ProgramsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ProgramsServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ProgramsServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ProgramsServiceFileDescriptorSupplier()) .addMethod(getGetProgramMethod()) .addMethod(getListProgramsMethod()) .addMethod(getEnableProgramMethod()) .addMethod(getDisableProgramMethod()) .build(); } } } return result; } }
apache/lens
37,307
lens-driver-jdbc/src/main/java/org/apache/lens/driver/jdbc/JDBCDriver.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.lens.driver.jdbc; import static java.lang.Integer.parseInt; import static java.util.Arrays.asList; import static org.apache.lens.driver.jdbc.JDBCDriverConfConstants.*; import static org.apache.lens.driver.jdbc.JDBCDriverConfConstants.ConnectionPoolProperties.*; import static com.google.common.base.Preconditions.checkState; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.sql.*; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; import org.apache.lens.api.LensConf; import org.apache.lens.api.query.QueryHandle; import org.apache.lens.api.query.QueryPrepareHandle; import org.apache.lens.cube.parse.HQLParser; import org.apache.lens.cube.query.cost.StaticCostCalculator; import org.apache.lens.server.api.driver.*; import org.apache.lens.server.api.driver.DriverQueryStatus.DriverQueryState; import org.apache.lens.server.api.error.LensDriverErrorCode; import org.apache.lens.server.api.error.LensException; import org.apache.lens.server.api.events.LensEventListener; import org.apache.lens.server.api.metrics.MethodMetricsContext; import org.apache.lens.server.api.metrics.MethodMetricsFactory; import org.apache.lens.server.api.query.AbstractQueryContext; import org.apache.lens.server.api.query.PreparedQueryContext; import org.apache.lens.server.api.query.QueryContext; import org.apache.lens.server.api.query.constraint.MaxConcurrentDriverQueriesConstraintFactory; import org.apache.lens.server.api.query.cost.*; import org.apache.lens.server.api.query.rewrite.QueryRewriter; import org.apache.lens.server.api.util.LensUtil; import org.apache.lens.server.model.LogSegregationContext; import org.apache.lens.server.model.MappedDiagnosticLogSegregationContext; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.parse.ASTNode; import org.apache.hadoop.hive.ql.parse.HiveParser; import lombok.*; import lombok.extern.slf4j.Slf4j; /** * This driver is responsible for running queries against databases which can be queried using the JDBC API. */ @Slf4j public class JDBCDriver extends AbstractLensDriver { /** The Constant THID. */ public static final AtomicInteger THID = new AtomicInteger(); /** The connection provider. */ private ConnectionProvider connectionProvider; /** The configured. */ boolean configured = false; /** The async query pool. */ private ExecutorService asyncQueryPool; /** The query context map. */ @Getter private ConcurrentHashMap<QueryHandle, JdbcQueryContext> queryContextMap; /** Configuration for estimate connection pool */ private Configuration estimateConf; /** Estimate connection provider */ private ConnectionProvider estimateConnectionProvider; private LogSegregationContext logSegregationContext; private boolean isStatementCancelSupported; QueryCostCalculator queryCostCalculator; /** * Data related to a query submitted to JDBCDriver. */ @Data protected class JdbcQueryContext { /** The lens context. */ private final QueryContext lensContext; /** The result future. */ private Future<QueryResult> resultFuture; /** The rewritten query. */ private String rewrittenQuery; /** The is prepared. */ private boolean isPrepared; /** The is closed. */ private boolean isClosed; /** The query result. */ private QueryResult queryResult; /** * Close result. */ public void closeResult() { if (queryResult != null) { queryResult.close(); } isClosed = true; } public boolean cancel() { boolean ret; log.debug("Canceling resultFuture object"); ret = resultFuture.cancel(true); log.debug("Done resultFuture cancel!"); // queryResult object would be null if query is not yet launched - since we did future.cancel, no other cancel is // required. if (queryResult != null && queryResult.stmt != null && isStatementCancelSupported) { log.debug("Cancelling query through statement cancel"); try { queryResult.stmt.cancel(); log.debug("Done statement cancel!"); ret = true; } catch (SQLFeatureNotSupportedException se) { log.warn("Statement cancel not supported", se); } catch(SQLException e) { log.warn("Statement cancel failed", e); ret = false; } } return ret; } public String getQueryHandleString() { return this.lensContext.getQueryHandleString(); } } /** * Result of a query and associated resources like statement and connection. After the results are consumed, close() * should be called to close the statement and connection */ protected class QueryResult { /** The result set. */ private ResultSet resultSet; /** The error. */ private Throwable error; /** The conn. */ private Connection conn; /** The stmt. */ private Statement stmt; /** The is closed. */ private boolean isClosed; /** The lens result set. */ private InMemoryResultSet lensResultSet; /** * Close. */ protected synchronized void close() { if (isClosed) { return; } try { if (stmt != null) { try { stmt.close(); } catch (SQLException e) { log.error("Error closing SQL statement", e); } } } finally { if (conn != null) { try { conn.close(); } catch (SQLException e) { log.error("Error closing SQL Connection", e); } } } isClosed = true; } /** * Gets the lens result set. * * @param closeAfterFetch the close after fetch * @return the lens result set * @throws LensException the lens exception */ protected synchronized LensResultSet getLensResultSet(boolean closeAfterFetch) throws LensException { if (error != null) { throw new LensException("Query failed!", error); } if (lensResultSet == null) { lensResultSet = new JDBCResultSet(this, resultSet, closeAfterFetch); } return lensResultSet; } } /** * Callabled that returns query result after running the query. This is used for async queries. */ protected class QueryCallable implements Callable<QueryResult> { /** The query context. */ private final JdbcQueryContext queryContext; private final LogSegregationContext logSegregationContext; /** * Instantiates a new query callable. * * @param queryContext the query context */ public QueryCallable(JdbcQueryContext queryContext, @NonNull LogSegregationContext logSegregationContext) { this.queryContext = queryContext; this.logSegregationContext = logSegregationContext; queryContext.getLensContext().setDriverStatus(DriverQueryState.INITIALIZED); } /* * (non-Javadoc) * * @see java.util.concurrent.Callable#call() */ @Override public QueryResult call() { logSegregationContext.setLogSegragationAndQueryId(this.queryContext.getQueryHandleString()); queryContext.getLensContext().setDriverStatus(DriverQueryState.RUNNING); Statement stmt; Connection conn = null; QueryResult result = new QueryResult(); try { queryContext.setQueryResult(result); try { conn = getConnection(); result.conn = conn; } catch (LensException e) { log.error("Error obtaining connection: ", e); result.error = e; } if (conn != null) { try { stmt = createStatement(conn); result.stmt = stmt; Boolean isResultAvailable = stmt.execute(queryContext.getRewrittenQuery()); if (queryContext.getLensContext().getDriverStatus().isCanceled()) { return result; } if (isResultAvailable) { result.resultSet = stmt.getResultSet(); } queryContext.getLensContext().getDriverStatus().setResultSetAvailable(isResultAvailable); queryContext.getLensContext().setDriverStatus(DriverQueryState.SUCCESSFUL); } catch (Exception e) { if (queryContext.getLensContext().getDriverStatus().isCanceled()) { return result; } if (queryContext.isClosed()) { log.info("Ignored exception on already closed query : {} - {}", queryContext.getLensContext().getQueryHandle(), e.getMessage(), e); } else { log.error("Error executing SQL query: {} reason: {}", queryContext.getLensContext().getQueryHandle(), e.getMessage(), e); result.error = e; queryContext.getLensContext().setDriverStatus(DriverQueryState.FAILED, e.getMessage()); // Close connection in case of failed queries. For successful queries, connection is closed // When result set is closed or driver.closeQuery is called result.close(); } } } } finally { Long endTime = queryContext.getLensContext().getDriverStatus().getDriverFinishTime(); if (endTime == null || endTime <= 0) { queryContext.getLensContext().getDriverStatus().setDriverFinishTime(System.currentTimeMillis()); } } return result; } /** * Create statement used to issue the query * * @param conn pre created SQL Connection object * @return statement * @throws SQLException the SQL exception */ public Statement createStatement(Connection conn) throws SQLException { Statement stmt; boolean enabledRowRetrieval = queryContext.getLensContext().getSelectedDriverConf().getBoolean( JDBCDriverConfConstants.JDBC_ENABLE_RESULTSET_STREAMING_RETRIEVAL, JDBCDriverConfConstants.DEFAULT_JDBC_ENABLE_RESULTSET_STREAMING_RETRIEVAL); if (enabledRowRetrieval) { log.info("JDBC streaming retrieval is enabled for {}", queryContext.getLensContext().getQueryHandle()); if (queryContext.isPrepared()) { stmt = conn.prepareStatement(queryContext.getRewrittenQuery(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } else { stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } stmt.setFetchSize(Integer.MIN_VALUE); } else { stmt = queryContext.isPrepared() ? conn.prepareStatement(queryContext.getRewrittenQuery()) : conn.createStatement(); // Get default fetch size from conf if not overridden in query conf int fetchSize = queryContext.getLensContext().getSelectedDriverConf().getInt( JDBCDriverConfConstants.JDBC_FETCH_SIZE, JDBCDriverConfConstants.DEFAULT_JDBC_FETCH_SIZE); stmt.setFetchSize(fetchSize); } stmt.setFetchDirection(ResultSet.FETCH_FORWARD); return stmt; } } /** * The Class DummyQueryRewriter. */ public static class DummyQueryRewriter implements QueryRewriter { /* * (non-Javadoc) * * @see org.apache.lens.server.api.query.QueryRewriter#rewrite * (java.lang.String, org.apache.hadoop.conf.Configuration) */ @Override public String rewrite(String query, Configuration queryConf, HiveConf metastoreConf) throws LensException { return query; } @Override public void init(Configuration rewriteConf) { } } /* * (non-Javadoc) * * @see org.apache.lens.server.api.driver.LensDriver#configure(org.apache.hadoop.conf.Configuration) */ @Override public void configure(Configuration conf, String driverType, String driverName) throws LensException { super.configure(conf, driverType, driverName); init(); configured = true; Class<? extends QueryCostCalculator> queryCostCalculatorClass = getConf().getClass(JDBC_COST_CALCULATOR, StaticCostCalculator.class, QueryCostCalculator.class); try { queryCostCalculator = queryCostCalculatorClass.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new LensException("Can't instantiate query cost calculator of class: " + queryCostCalculatorClass, e); } //For initializing the decider class instance queryCostCalculator.init(this); log.info("JDBC Driver {} configured", getFullyQualifiedName()); } /** * Inits the. * * @throws LensException the lens exception */ public void init() throws LensException { final int maxPoolSize = parseInt(getConf().get(JDBC_POOL_MAX_SIZE.getConfigKey())); final int maxConcurrentQueries = parseInt(getConf().get(MaxConcurrentDriverQueriesConstraintFactory.MAX_CONCURRENT_QUERIES_KEY)); checkState(maxPoolSize >= maxConcurrentQueries, "maxPoolSize:" + maxPoolSize + " maxConcurrentQueries:" + maxConcurrentQueries); queryContextMap = new ConcurrentHashMap<>(); asyncQueryPool = Executors.newCachedThreadPool(new ThreadFactory() { @Override public Thread newThread(Runnable runnable) { Thread th = new Thread(runnable); th.setName("lens-driver-jdbc-" + THID.incrementAndGet()); return th; } }); Class<? extends ConnectionProvider> cpClass = getConf().getClass(JDBC_CONNECTION_PROVIDER, DataSourceConnectionProvider.class, ConnectionProvider.class); try { connectionProvider = cpClass.newInstance(); estimateConnectionProvider = cpClass.newInstance(); } catch (Exception e) { log.error("Error initializing connection provider: ", e); throw new LensException(e); } this.logSegregationContext = new MappedDiagnosticLogSegregationContext(); this.isStatementCancelSupported = getConf().getBoolean(STATEMENT_CANCEL_SUPPORTED, DEFAULT_STATEMENT_CANCEL_SUPPORTED); } public QueryCost calculateQueryCost(AbstractQueryContext qctx) throws LensException { return queryCostCalculator.calculateCost(qctx, this); } /** * Check configured. * * @throws IllegalStateException the illegal state exception */ protected void checkConfigured() throws IllegalStateException { if (!configured) { throw new IllegalStateException("JDBC Driver is not configured!"); } } protected synchronized Connection getConnection() throws LensException { try { // Add here to cover the path when the queries are executed it does not // use the driver conf return connectionProvider.getConnection(getConf()); } catch (SQLException e) { throw new LensException(e); } } /** * Gets the query rewriter. * * @return the query rewriter * @throws LensException the lens exception */ protected QueryRewriter getQueryRewriter() throws LensException { QueryRewriter rewriter; Class<? extends QueryRewriter> queryRewriterClass = getConf().getClass(JDBC_QUERY_REWRITER_CLASS, DummyQueryRewriter.class, QueryRewriter.class); try { rewriter = queryRewriterClass.newInstance(); log.info("{} Initialized :{}", getFullyQualifiedName(), queryRewriterClass); } catch (Exception e) { log.error("{} Unable to create rewriter object", getFullyQualifiedName(), e); throw new LensException(e); } rewriter.init(getConf()); return rewriter; } /** * Gets the query context. * * @param handle the handle * @return the query context * @throws LensException the lens exception */ protected JdbcQueryContext getQueryContext(QueryHandle handle) throws LensException { JdbcQueryContext ctx = queryContextMap.get(handle); if (ctx == null) { throw new LensException("Query not found:" + handle.getHandleId()); } return ctx; } /** * Rewrite query. * * @return the string * @throws LensException the lens exception */ protected String rewriteQuery(AbstractQueryContext ctx) throws LensException { if (ctx.getFinalDriverQuery(this) != null) { return ctx.getFinalDriverQuery(this); } String query = ctx.getDriverQuery(this); Configuration driverQueryConf = ctx.getDriverConf(this); MethodMetricsContext checkForAllowedQuery = MethodMetricsFactory.createMethodGauge(driverQueryConf, true, CHECK_ALLOWED_QUERY); // check if it is select query ASTNode ast = HQLParser.parseHQL(query, ctx.getHiveConf()); if (ast.getToken().getType() != HiveParser.TOK_QUERY) { throw new LensException("Not allowed statement:" + query); } else { // check for insert clause ASTNode dest = HQLParser.findNodeByPath(ast, HiveParser.TOK_INSERT); if (dest != null && ((ASTNode) (dest.getChild(0).getChild(0).getChild(0))).getToken().getType() != HiveParser.TOK_TMP_FILE) { throw new LensException("Not allowed statement:" + query); } } checkForAllowedQuery.markSuccess(); QueryRewriter rewriter = getQueryRewriter(); String rewrittenQuery = rewriter.rewrite(query, driverQueryConf, ctx.getHiveConf()); ctx.setFinalDriverQuery(this, rewrittenQuery); return rewrittenQuery; } /** * Dummy JDBC query Plan class to get min cost selector working. */ private static class JDBCQueryPlan extends DriverQueryPlan { @Getter private final QueryCost cost; JDBCQueryPlan(QueryCost cost){ this.cost = cost; } @Override public String getPlan() { return ""; } } private static final String VALIDATE_GAUGE = "validate-thru-prepare"; private static final String COLUMNAR_SQL_REWRITE_GAUGE = "columnar-sql-rewrite"; private static final String JDBC_PREPARE_GAUGE = "jdbc-prepare-statement"; private static final String CHECK_ALLOWED_QUERY = "jdbc-check-allowed-query"; @Override public QueryCost estimate(AbstractQueryContext qctx) throws LensException { MethodMetricsContext validateGauge = MethodMetricsFactory.createMethodGauge(qctx.getDriverConf(this), true, VALIDATE_GAUGE); validate(qctx); validateGauge.markSuccess(); return calculateQueryCost(qctx); } /** * Explain the given query. * * @param explainCtx The explain context * @return The query plan object; * @throws LensException the lens exception */ @Override public DriverQueryPlan explain(AbstractQueryContext explainCtx) throws LensException { if (explainCtx.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + explainCtx.getUserQuery()); } if (explainCtx.getDriverContext().getDriverQueryPlan(this) != null) { // explain called again and again return explainCtx.getDriverContext().getDriverQueryPlan(this); } checkConfigured(); String explainQuery; String rewrittenQuery = rewriteQuery(explainCtx); Configuration explainConf = explainCtx.getDriverConf(this); String explainKeyword = explainConf.get(JDBC_EXPLAIN_KEYWORD_PARAM, DEFAULT_JDBC_EXPLAIN_KEYWORD); boolean explainBeforeSelect = explainConf.getBoolean(JDBC_EXPLAIN_KEYWORD_BEFORE_SELECT, DEFAULT_JDBC_EXPLAIN_KEYWORD_BEFORE_SELECT); if (explainBeforeSelect) { explainQuery = explainKeyword + " " + rewrittenQuery; } else { explainQuery = rewrittenQuery.replaceAll("select ", "select " + explainKeyword + " "); } log.info("{} Explain Query : {}", getFullyQualifiedName(), explainQuery); boolean validateThroughPrepare = explainCtx.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN, DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN); if (validateThroughPrepare) { QueryContext explainQueryCtx = QueryContext.createContextWithSingleDriver(explainQuery, null, new LensConf(), explainConf, this, explainCtx.getLensSessionIdentifier(), false); QueryResult result = null; try { result = executeInternal(explainQueryCtx, explainQuery); if (result.error != null) { throw new LensException("Query explain failed!", result.error); } } finally { if (result != null) { result.close(); } } } JDBCQueryPlan jqp = new JDBCQueryPlan(calculateQueryCost(explainCtx)); explainCtx.getDriverContext().setDriverQueryPlan(this, jqp); return jqp; } /** * Validate query using prepare * * @param pContext context to validate * @throws LensException */ public void validate(AbstractQueryContext pContext) throws LensException { if (pContext.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + pContext.getUserQuery()); } boolean validateThroughPrepare = pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN, DEFAULT_JDBC_VALIDATE_THROUGH_PREPARE_OR_EXPLAIN); if (validateThroughPrepare) { PreparedStatement stmt; // Estimate queries need to get connection from estimate pool to make sure // we are not blocked by data queries. stmt = prepareInternal(pContext, true, true, "validate-"); if (stmt != null) { try { stmt.close(); } catch (SQLException e) { throw new LensException(); } } } } // Get key used for estimate key config protected String getEstimateKey(String jdbcKey) { return JDBC_DRIVER_PFX + "estimate." + jdbcKey.substring(JDBC_DRIVER_PFX.length()); } // If any 'key' in 'keys' is set in conf, return its value. private static String getKeyOrFallBack(Configuration conf, String... keys) { for (String key : keys) { String val = conf.get(key); if (StringUtils.isNotBlank(val)) { return val; } } return null; } // Get connection config used by estimate pool. protected final Configuration getEstimateConnectionConf() { if (estimateConf == null) { Configuration tmpConf = new Configuration(getConf()); // Override JDBC settings in estimate conf, if set by user explicitly. Otherwise fall back to default JDBC pool // config for (String key : asList(JDBC_CONNECTION_PROPERTIES, JDBC_DB_URI, JDBC_DRIVER_CLASS, JDBC_USER, JDBC_PASSWORD, JDBC_POOL_MAX_SIZE.getConfigKey(), JDBC_POOL_IDLE_TIME.getConfigKey(), JDBC_MAX_IDLE_TIME_EXCESS_CONNECTIONS.getConfigKey(), JDBC_MAX_STATEMENTS_PER_CONNECTION.getConfigKey(), JDBC_GET_CONNECTION_TIMEOUT.getConfigKey())) { String val = getKeyOrFallBack(tmpConf, getEstimateKey(key), key); if (val != null) { tmpConf.set(key, val); } } /* We need to set password as empty string if it is not provided. Setting null on conf is not allowed */ if (tmpConf.get(JDBC_PASSWORD) == null) { tmpConf.set(JDBC_PASSWORD, ""); } estimateConf = tmpConf; } return estimateConf; } protected final Connection getEstimateConnection() throws SQLException { return estimateConnectionProvider.getConnection(getEstimateConnectionConf()); } // For tests protected final ConnectionProvider getEstimateConnectionProvider() { return estimateConnectionProvider; } // For tests protected final ConnectionProvider getConnectionProvider() { return connectionProvider; } private final Map<QueryPrepareHandle, PreparedStatement> preparedQueries = new HashMap<>(); /** * Internally prepare the query * * @param pContext prepare context * @return prepared statement of the query * @throws LensException */ private PreparedStatement prepareInternal(AbstractQueryContext pContext) throws LensException { if (pContext.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + pContext.getUserQuery()); } checkConfigured(); return prepareInternal(pContext, false, false, "prepare-"); } /** * Prepare statment on the database server * @param pContext query context * @param calledForEstimate set this to true if this call will use the estimate connection pool * @param checkConfigured set this to true if this call needs to check whether JDBC driver is configured * @param metricCallStack stack for metrics API * @return prepared statement * @throws LensException */ private PreparedStatement prepareInternal(AbstractQueryContext pContext, boolean calledForEstimate, boolean checkConfigured, String metricCallStack) throws LensException { // Caller might have already verified configured status and driver query, so we don't have // to do this check twice. Caller must set checkConfigured to false in that case. if (checkConfigured) { if (pContext.getDriverQuery(this) == null) { throw new NullPointerException("Null driver query for " + pContext.getUserQuery()); } checkConfigured(); } // Only create a prepared statement and then close it MethodMetricsContext sqlRewriteGauge = MethodMetricsFactory.createMethodGauge(pContext.getDriverConf(this), true, metricCallStack + COLUMNAR_SQL_REWRITE_GAUGE); String rewrittenQuery = rewriteQuery(pContext); sqlRewriteGauge.markSuccess(); MethodMetricsContext jdbcPrepareGauge = MethodMetricsFactory.createMethodGauge(pContext.getDriverConf(this), true, metricCallStack + JDBC_PREPARE_GAUGE); PreparedStatement stmt = null; Connection conn = null; try { conn = calledForEstimate ? getEstimateConnection() : getConnection(); stmt = conn.prepareStatement(rewrittenQuery); if (!pContext.getDriverConf(this).getBoolean(JDBC_VALIDATE_SKIP_WARNINGS, DEFAULT_JDBC_VALIDATE_SKIP_WARNINGS) && stmt.getWarnings() != null) { throw new LensException(stmt.getWarnings()); } } catch (SQLException sql) { handleJDBCSQLException(sql); } finally { if (conn != null) { try { conn.close(); } catch (SQLException e) { log.error("Error closing connection: {}", rewrittenQuery, e); } } jdbcPrepareGauge.markSuccess(); } log.info("Prepared: {}", rewrittenQuery); return stmt; } /** * Handle sql exception * * @param sqlex SQLException * @throws LensException */ private LensException handleJDBCSQLException(SQLException sqlex) throws LensException { String cause = LensUtil.getCauseMessage(sqlex); if (getSqlSynataxExceptions(sqlex).contains("SyntaxError")) { throw new LensException(LensDriverErrorCode.SEMANTIC_ERROR.getLensErrorInfo(), sqlex, cause); } throw new LensException(LensDriverErrorCode.DRIVER_ERROR.getLensErrorInfo(), sqlex, cause); } private String getSqlSynataxExceptions(Throwable e) { String exp = null; if (e.getCause() != null) { exp = e.getClass() + getSqlSynataxExceptions(e.getCause()); } return exp; } /** * Prepare the given query. * * @param pContext * the context * @throws LensException * the lens exception */ @Override public void prepare(PreparedQueryContext pContext) throws LensException { if (preparedQueries.containsKey(pContext.getPrepareHandle())) { // already prepared return; } PreparedStatement stmt = prepareInternal(pContext); if (stmt != null) { preparedQueries.put(pContext.getPrepareHandle(), stmt); } } /** * Explain and prepare the given query. * * @param pContext the context * @return The query plan object; * @throws LensException the lens exception */ @Override public DriverQueryPlan explainAndPrepare(PreparedQueryContext pContext) throws LensException { checkConfigured(); prepare(pContext); return new JDBCQueryPlan(calculateQueryCost(pContext)); } /** * Close the prepare query specified by the prepared handle, releases all the resources held by the prepared query. * * @param handle The query handle * @throws LensException the lens exception */ @Override public void closePreparedQuery(QueryPrepareHandle handle) throws LensException { checkConfigured(); try { if (preparedQueries.get(handle) != null) { preparedQueries.get(handle).close(); } } catch (SQLException e) { throw new LensException(e); } } /** * Blocking execute of the query. * * @param context the context * @return returns the result set * @throws LensException the lens exception */ @Override public LensResultSet execute(QueryContext context) throws LensException { checkConfigured(); String rewrittenQuery = rewriteQuery(context); log.info("{} Execute {}", getFullyQualifiedName(), context.getQueryHandle()); QueryResult result = executeInternal(context, rewrittenQuery); return result.getLensResultSet(true); } /** * Internally executing query. * * @param context the context * @param rewrittenQuery the rewritten query * @return returns the result set * @throws LensException the lens exception */ private QueryResult executeInternal(QueryContext context, String rewrittenQuery) throws LensException { JdbcQueryContext queryContext = new JdbcQueryContext(context); queryContext.setPrepared(false); queryContext.setRewrittenQuery(rewrittenQuery); return new QueryCallable(queryContext, logSegregationContext).call(); } /** * Asynchronously execute the query. * * @param context The query context * @throws LensException the lens exception */ @Override public void executeAsync(QueryContext context) throws LensException { checkConfigured(); // Always use the driver rewritten query not user query. Since the // conf we are passing here is query context conf, we need to add jdbc xml in resource path String rewrittenQuery = rewriteQuery(context); JdbcQueryContext jdbcCtx = new JdbcQueryContext(context); jdbcCtx.setRewrittenQuery(rewrittenQuery); try { Future<QueryResult> future = asyncQueryPool.submit(new QueryCallable(jdbcCtx, logSegregationContext)); jdbcCtx.setResultFuture(future); } catch (RejectedExecutionException e) { log.error("Query execution rejected: {} reason:{}", context.getQueryHandle(), e.getMessage(), e); throw new LensException("Query execution rejected: " + context.getQueryHandle() + " reason:" + e.getMessage(), e); } queryContextMap.put(context.getQueryHandle(), jdbcCtx); log.info("{} ExecuteAsync: {}", getFullyQualifiedName(), context.getQueryHandle()); } /** * Get status of the query, specified by the handle. * * @param context The query handle * @throws LensException the lens exception */ @Override public void updateStatus(QueryContext context) throws LensException { checkConfigured(); JdbcQueryContext ctx = getQueryContext(context.getQueryHandle()); if (ctx.getLensContext().getDriverStatus().isFinished()) { // terminal state. No updates can be done. return; } if (ctx.getResultFuture().isCancelled()) { if (!context.getDriverStatus().isCanceled()) { context.getDriverStatus().setProgress(1.0); context.getDriverStatus().setState(DriverQueryState.CANCELED); context.getDriverStatus().setStatusMessage("Query Canceled"); } } else if (ctx.getResultFuture().isDone()) { context.getDriverStatus().setProgress(1.0); // Since future is already done, this call should not block if (ctx.getQueryResult() != null && ctx.getQueryResult().error != null) { if (!context.getDriverStatus().isFailed()) { context.getDriverStatus().setState(DriverQueryState.FAILED); context.getDriverStatus().setStatusMessage("Query execution failed!"); context.getDriverStatus().setErrorMessage(ctx.getQueryResult().error.getMessage()); } } else { if (!context.getDriverStatus().isFinished()) { // assuming successful context.getDriverStatus().setState(DriverQueryState.SUCCESSFUL); context.getDriverStatus().setStatusMessage(context.getQueryHandle() + " successful"); context.getDriverStatus().setResultSetAvailable(true); } } } else { if (!context.getDriverStatus().isRunning()) { context.getDriverStatus().setState(DriverQueryState.RUNNING); context.getDriverStatus().setStatusMessage(context.getQueryHandle() + " is running"); } } } @Override protected LensResultSet createResultSet(QueryContext ctx) throws LensException { checkConfigured(); return getQueryContext(ctx.getQueryHandle()).getQueryResult().getLensResultSet(true); } /** * Close the resultset for the query. * * @param handle The query handle * @throws LensException the lens exception */ @Override public void closeResultSet(QueryHandle handle) throws LensException { checkConfigured(); getQueryContext(handle).closeResult(); } /** * Cancel the execution of the query, specified by the handle. * * @param handle The query handle. * @return true if cancel was successful, false otherwise * @throws LensException the lens exception */ @Override public boolean cancelQuery(QueryHandle handle) throws LensException { checkConfigured(); JdbcQueryContext context = getQueryContext(handle); log.info("{} cancel request on query {}", getFullyQualifiedName(), handle); boolean cancelResult = context.cancel(); if (cancelResult) { context.getLensContext().setDriverStatus(DriverQueryState.CANCELED); context.closeResult(); log.info("{} Canceled query : {}", getFullyQualifiedName(), handle); } return cancelResult; } /** * Close the query specified by the handle, releases all the resources held by the query. * * @param handle The query handle * @throws LensException the lens exception */ @Override public void closeQuery(QueryHandle handle) throws LensException { checkConfigured(); try { JdbcQueryContext ctx = getQueryContext(handle); if (ctx != null) { ctx.getResultFuture().cancel(true); ctx.closeResult(); } } catch (LensException exc) { log.error("{} Failed to close query {}", getFullyQualifiedName(), handle.getHandleId()); } finally { queryContextMap.remove(handle); } log.info("{} Closed query {}", getFullyQualifiedName(), handle.getHandleId()); } /** * Close the driver, releasing all resouces used up by the driver. * * @throws LensException the lens exception */ @Override public void close() throws LensException { checkConfigured(); try { for (QueryHandle query : new ArrayList<>(queryContextMap.keySet())) { try { closeQuery(query); } catch (LensException e) { log.warn("{} Error closing query : {}", getFullyQualifiedName(), query.getHandleId(), e); } } for (QueryPrepareHandle query : preparedQueries.keySet()) { try { try { preparedQueries.get(query).close(); } catch (SQLException e) { throw new LensException(); } } catch (LensException e) { log.warn("{} Error closing prapared query : {}", getFullyQualifiedName(), query, e); } } } finally { queryContextMap.clear(); } } /** * Add a listener for driver events. * * @param driverEventListener the driver event listener */ @Override public void registerDriverEventListener(LensEventListener<DriverEvent> driverEventListener) { } /* * (non-Javadoc) * * @see java.io.Externalizable#readExternal(java.io.ObjectInput) */ @Override public void readExternal(ObjectInput arg0) throws IOException, ClassNotFoundException { // TODO Auto-generated method stub } /* * (non-Javadoc) * * @see java.io.Externalizable#writeExternal(java.io.ObjectOutput) */ @Override public void writeExternal(ObjectOutput arg0) throws IOException { // TODO Auto-generated method stub } @Override public StatusUpdateMethod getStatusUpdateMethod() { return StatusUpdateMethod.PUSH; } }
googleapis/google-cloud-java
37,603
java-accesscontextmanager/google-identity-accesscontextmanager/src/main/java/com/google/identity/accesscontextmanager/v1/AccessContextManagerSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.identity.accesscontextmanager.v1; import static com.google.identity.accesscontextmanager.v1.AccessContextManagerClient.ListAccessLevelsPagedResponse; import static com.google.identity.accesscontextmanager.v1.AccessContextManagerClient.ListAccessPoliciesPagedResponse; import static com.google.identity.accesscontextmanager.v1.AccessContextManagerClient.ListGcpUserAccessBindingsPagedResponse; import static com.google.identity.accesscontextmanager.v1.AccessContextManagerClient.ListServicePerimetersPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.ClientSettings; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.identity.accesscontextmanager.v1.stub.AccessContextManagerStubSettings; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link AccessContextManagerClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (accesscontextmanager.googleapis.com) and default port (443) * are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getAccessPolicy: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * AccessContextManagerSettings.Builder accessContextManagerSettingsBuilder = * AccessContextManagerSettings.newBuilder(); * accessContextManagerSettingsBuilder * .getAccessPolicySettings() * .setRetrySettings( * accessContextManagerSettingsBuilder * .getAccessPolicySettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * AccessContextManagerSettings accessContextManagerSettings = * accessContextManagerSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createAccessPolicy: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * AccessContextManagerSettings.Builder accessContextManagerSettingsBuilder = * AccessContextManagerSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * accessContextManagerSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @Generated("by gapic-generator-java") public class AccessContextManagerSettings extends ClientSettings<AccessContextManagerSettings> { /** Returns the object with the settings used for calls to listAccessPolicies. */ public PagedCallSettings< ListAccessPoliciesRequest, ListAccessPoliciesResponse, ListAccessPoliciesPagedResponse> listAccessPoliciesSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).listAccessPoliciesSettings(); } /** Returns the object with the settings used for calls to getAccessPolicy. */ public UnaryCallSettings<GetAccessPolicyRequest, AccessPolicy> getAccessPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).getAccessPolicySettings(); } /** Returns the object with the settings used for calls to createAccessPolicy. */ public UnaryCallSettings<AccessPolicy, Operation> createAccessPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).createAccessPolicySettings(); } /** Returns the object with the settings used for calls to createAccessPolicy. */ public OperationCallSettings<AccessPolicy, AccessPolicy, AccessContextManagerOperationMetadata> createAccessPolicyOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .createAccessPolicyOperationSettings(); } /** Returns the object with the settings used for calls to updateAccessPolicy. */ public UnaryCallSettings<UpdateAccessPolicyRequest, Operation> updateAccessPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).updateAccessPolicySettings(); } /** Returns the object with the settings used for calls to updateAccessPolicy. */ public OperationCallSettings< UpdateAccessPolicyRequest, AccessPolicy, AccessContextManagerOperationMetadata> updateAccessPolicyOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .updateAccessPolicyOperationSettings(); } /** Returns the object with the settings used for calls to deleteAccessPolicy. */ public UnaryCallSettings<DeleteAccessPolicyRequest, Operation> deleteAccessPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).deleteAccessPolicySettings(); } /** Returns the object with the settings used for calls to deleteAccessPolicy. */ public OperationCallSettings< DeleteAccessPolicyRequest, Empty, AccessContextManagerOperationMetadata> deleteAccessPolicyOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .deleteAccessPolicyOperationSettings(); } /** Returns the object with the settings used for calls to listAccessLevels. */ public PagedCallSettings< ListAccessLevelsRequest, ListAccessLevelsResponse, ListAccessLevelsPagedResponse> listAccessLevelsSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).listAccessLevelsSettings(); } /** Returns the object with the settings used for calls to getAccessLevel. */ public UnaryCallSettings<GetAccessLevelRequest, AccessLevel> getAccessLevelSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).getAccessLevelSettings(); } /** Returns the object with the settings used for calls to createAccessLevel. */ public UnaryCallSettings<CreateAccessLevelRequest, Operation> createAccessLevelSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).createAccessLevelSettings(); } /** Returns the object with the settings used for calls to createAccessLevel. */ public OperationCallSettings< CreateAccessLevelRequest, AccessLevel, AccessContextManagerOperationMetadata> createAccessLevelOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .createAccessLevelOperationSettings(); } /** Returns the object with the settings used for calls to updateAccessLevel. */ public UnaryCallSettings<UpdateAccessLevelRequest, Operation> updateAccessLevelSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).updateAccessLevelSettings(); } /** Returns the object with the settings used for calls to updateAccessLevel. */ public OperationCallSettings< UpdateAccessLevelRequest, AccessLevel, AccessContextManagerOperationMetadata> updateAccessLevelOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .updateAccessLevelOperationSettings(); } /** Returns the object with the settings used for calls to deleteAccessLevel. */ public UnaryCallSettings<DeleteAccessLevelRequest, Operation> deleteAccessLevelSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).deleteAccessLevelSettings(); } /** Returns the object with the settings used for calls to deleteAccessLevel. */ public OperationCallSettings< DeleteAccessLevelRequest, Empty, AccessContextManagerOperationMetadata> deleteAccessLevelOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .deleteAccessLevelOperationSettings(); } /** Returns the object with the settings used for calls to replaceAccessLevels. */ public UnaryCallSettings<ReplaceAccessLevelsRequest, Operation> replaceAccessLevelsSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).replaceAccessLevelsSettings(); } /** Returns the object with the settings used for calls to replaceAccessLevels. */ public OperationCallSettings< ReplaceAccessLevelsRequest, ReplaceAccessLevelsResponse, AccessContextManagerOperationMetadata> replaceAccessLevelsOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .replaceAccessLevelsOperationSettings(); } /** Returns the object with the settings used for calls to listServicePerimeters. */ public PagedCallSettings< ListServicePerimetersRequest, ListServicePerimetersResponse, ListServicePerimetersPagedResponse> listServicePerimetersSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).listServicePerimetersSettings(); } /** Returns the object with the settings used for calls to getServicePerimeter. */ public UnaryCallSettings<GetServicePerimeterRequest, ServicePerimeter> getServicePerimeterSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).getServicePerimeterSettings(); } /** Returns the object with the settings used for calls to createServicePerimeter. */ public UnaryCallSettings<CreateServicePerimeterRequest, Operation> createServicePerimeterSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).createServicePerimeterSettings(); } /** Returns the object with the settings used for calls to createServicePerimeter. */ public OperationCallSettings< CreateServicePerimeterRequest, ServicePerimeter, AccessContextManagerOperationMetadata> createServicePerimeterOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .createServicePerimeterOperationSettings(); } /** Returns the object with the settings used for calls to updateServicePerimeter. */ public UnaryCallSettings<UpdateServicePerimeterRequest, Operation> updateServicePerimeterSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).updateServicePerimeterSettings(); } /** Returns the object with the settings used for calls to updateServicePerimeter. */ public OperationCallSettings< UpdateServicePerimeterRequest, ServicePerimeter, AccessContextManagerOperationMetadata> updateServicePerimeterOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .updateServicePerimeterOperationSettings(); } /** Returns the object with the settings used for calls to deleteServicePerimeter. */ public UnaryCallSettings<DeleteServicePerimeterRequest, Operation> deleteServicePerimeterSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).deleteServicePerimeterSettings(); } /** Returns the object with the settings used for calls to deleteServicePerimeter. */ public OperationCallSettings< DeleteServicePerimeterRequest, Empty, AccessContextManagerOperationMetadata> deleteServicePerimeterOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .deleteServicePerimeterOperationSettings(); } /** Returns the object with the settings used for calls to replaceServicePerimeters. */ public UnaryCallSettings<ReplaceServicePerimetersRequest, Operation> replaceServicePerimetersSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .replaceServicePerimetersSettings(); } /** Returns the object with the settings used for calls to replaceServicePerimeters. */ public OperationCallSettings< ReplaceServicePerimetersRequest, ReplaceServicePerimetersResponse, AccessContextManagerOperationMetadata> replaceServicePerimetersOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .replaceServicePerimetersOperationSettings(); } /** Returns the object with the settings used for calls to commitServicePerimeters. */ public UnaryCallSettings<CommitServicePerimetersRequest, Operation> commitServicePerimetersSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).commitServicePerimetersSettings(); } /** Returns the object with the settings used for calls to commitServicePerimeters. */ public OperationCallSettings< CommitServicePerimetersRequest, CommitServicePerimetersResponse, AccessContextManagerOperationMetadata> commitServicePerimetersOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .commitServicePerimetersOperationSettings(); } /** Returns the object with the settings used for calls to listGcpUserAccessBindings. */ public PagedCallSettings< ListGcpUserAccessBindingsRequest, ListGcpUserAccessBindingsResponse, ListGcpUserAccessBindingsPagedResponse> listGcpUserAccessBindingsSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .listGcpUserAccessBindingsSettings(); } /** Returns the object with the settings used for calls to getGcpUserAccessBinding. */ public UnaryCallSettings<GetGcpUserAccessBindingRequest, GcpUserAccessBinding> getGcpUserAccessBindingSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).getGcpUserAccessBindingSettings(); } /** Returns the object with the settings used for calls to createGcpUserAccessBinding. */ public UnaryCallSettings<CreateGcpUserAccessBindingRequest, Operation> createGcpUserAccessBindingSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .createGcpUserAccessBindingSettings(); } /** Returns the object with the settings used for calls to createGcpUserAccessBinding. */ public OperationCallSettings< CreateGcpUserAccessBindingRequest, GcpUserAccessBinding, GcpUserAccessBindingOperationMetadata> createGcpUserAccessBindingOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .createGcpUserAccessBindingOperationSettings(); } /** Returns the object with the settings used for calls to updateGcpUserAccessBinding. */ public UnaryCallSettings<UpdateGcpUserAccessBindingRequest, Operation> updateGcpUserAccessBindingSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .updateGcpUserAccessBindingSettings(); } /** Returns the object with the settings used for calls to updateGcpUserAccessBinding. */ public OperationCallSettings< UpdateGcpUserAccessBindingRequest, GcpUserAccessBinding, GcpUserAccessBindingOperationMetadata> updateGcpUserAccessBindingOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .updateGcpUserAccessBindingOperationSettings(); } /** Returns the object with the settings used for calls to deleteGcpUserAccessBinding. */ public UnaryCallSettings<DeleteGcpUserAccessBindingRequest, Operation> deleteGcpUserAccessBindingSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .deleteGcpUserAccessBindingSettings(); } /** Returns the object with the settings used for calls to deleteGcpUserAccessBinding. */ public OperationCallSettings< DeleteGcpUserAccessBindingRequest, Empty, GcpUserAccessBindingOperationMetadata> deleteGcpUserAccessBindingOperationSettings() { return ((AccessContextManagerStubSettings) getStubSettings()) .deleteGcpUserAccessBindingOperationSettings(); } /** Returns the object with the settings used for calls to setIamPolicy. */ public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).setIamPolicySettings(); } /** Returns the object with the settings used for calls to getIamPolicy. */ public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() { return ((AccessContextManagerStubSettings) getStubSettings()).getIamPolicySettings(); } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return ((AccessContextManagerStubSettings) getStubSettings()).testIamPermissionsSettings(); } public static final AccessContextManagerSettings create(AccessContextManagerStubSettings stub) throws IOException { return new AccessContextManagerSettings.Builder(stub.toBuilder()).build(); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return AccessContextManagerStubSettings.defaultExecutorProviderBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return AccessContextManagerStubSettings.getDefaultEndpoint(); } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return AccessContextManagerStubSettings.getDefaultServiceScopes(); } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return AccessContextManagerStubSettings.defaultCredentialsProviderBuilder(); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return AccessContextManagerStubSettings.defaultGrpcTransportProviderBuilder(); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return AccessContextManagerStubSettings.defaultHttpJsonTransportProviderBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return AccessContextManagerStubSettings.defaultTransportChannelProvider(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return AccessContextManagerStubSettings.defaultApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected AccessContextManagerSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); } /** Builder for AccessContextManagerSettings. */ public static class Builder extends ClientSettings.Builder<AccessContextManagerSettings, Builder> { protected Builder() throws IOException { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(AccessContextManagerStubSettings.newBuilder(clientContext)); } protected Builder(AccessContextManagerSettings settings) { super(settings.getStubSettings().toBuilder()); } protected Builder(AccessContextManagerStubSettings.Builder stubSettings) { super(stubSettings); } private static Builder createDefault() { return new Builder(AccessContextManagerStubSettings.newBuilder()); } private static Builder createHttpJsonDefault() { return new Builder(AccessContextManagerStubSettings.newHttpJsonBuilder()); } public AccessContextManagerStubSettings.Builder getStubSettingsBuilder() { return ((AccessContextManagerStubSettings.Builder) getStubSettings()); } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods( getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater); return this; } /** Returns the builder for the settings used for calls to listAccessPolicies. */ public PagedCallSettings.Builder< ListAccessPoliciesRequest, ListAccessPoliciesResponse, ListAccessPoliciesPagedResponse> listAccessPoliciesSettings() { return getStubSettingsBuilder().listAccessPoliciesSettings(); } /** Returns the builder for the settings used for calls to getAccessPolicy. */ public UnaryCallSettings.Builder<GetAccessPolicyRequest, AccessPolicy> getAccessPolicySettings() { return getStubSettingsBuilder().getAccessPolicySettings(); } /** Returns the builder for the settings used for calls to createAccessPolicy. */ public UnaryCallSettings.Builder<AccessPolicy, Operation> createAccessPolicySettings() { return getStubSettingsBuilder().createAccessPolicySettings(); } /** Returns the builder for the settings used for calls to createAccessPolicy. */ public OperationCallSettings.Builder< AccessPolicy, AccessPolicy, AccessContextManagerOperationMetadata> createAccessPolicyOperationSettings() { return getStubSettingsBuilder().createAccessPolicyOperationSettings(); } /** Returns the builder for the settings used for calls to updateAccessPolicy. */ public UnaryCallSettings.Builder<UpdateAccessPolicyRequest, Operation> updateAccessPolicySettings() { return getStubSettingsBuilder().updateAccessPolicySettings(); } /** Returns the builder for the settings used for calls to updateAccessPolicy. */ public OperationCallSettings.Builder< UpdateAccessPolicyRequest, AccessPolicy, AccessContextManagerOperationMetadata> updateAccessPolicyOperationSettings() { return getStubSettingsBuilder().updateAccessPolicyOperationSettings(); } /** Returns the builder for the settings used for calls to deleteAccessPolicy. */ public UnaryCallSettings.Builder<DeleteAccessPolicyRequest, Operation> deleteAccessPolicySettings() { return getStubSettingsBuilder().deleteAccessPolicySettings(); } /** Returns the builder for the settings used for calls to deleteAccessPolicy. */ public OperationCallSettings.Builder< DeleteAccessPolicyRequest, Empty, AccessContextManagerOperationMetadata> deleteAccessPolicyOperationSettings() { return getStubSettingsBuilder().deleteAccessPolicyOperationSettings(); } /** Returns the builder for the settings used for calls to listAccessLevels. */ public PagedCallSettings.Builder< ListAccessLevelsRequest, ListAccessLevelsResponse, ListAccessLevelsPagedResponse> listAccessLevelsSettings() { return getStubSettingsBuilder().listAccessLevelsSettings(); } /** Returns the builder for the settings used for calls to getAccessLevel. */ public UnaryCallSettings.Builder<GetAccessLevelRequest, AccessLevel> getAccessLevelSettings() { return getStubSettingsBuilder().getAccessLevelSettings(); } /** Returns the builder for the settings used for calls to createAccessLevel. */ public UnaryCallSettings.Builder<CreateAccessLevelRequest, Operation> createAccessLevelSettings() { return getStubSettingsBuilder().createAccessLevelSettings(); } /** Returns the builder for the settings used for calls to createAccessLevel. */ public OperationCallSettings.Builder< CreateAccessLevelRequest, AccessLevel, AccessContextManagerOperationMetadata> createAccessLevelOperationSettings() { return getStubSettingsBuilder().createAccessLevelOperationSettings(); } /** Returns the builder for the settings used for calls to updateAccessLevel. */ public UnaryCallSettings.Builder<UpdateAccessLevelRequest, Operation> updateAccessLevelSettings() { return getStubSettingsBuilder().updateAccessLevelSettings(); } /** Returns the builder for the settings used for calls to updateAccessLevel. */ public OperationCallSettings.Builder< UpdateAccessLevelRequest, AccessLevel, AccessContextManagerOperationMetadata> updateAccessLevelOperationSettings() { return getStubSettingsBuilder().updateAccessLevelOperationSettings(); } /** Returns the builder for the settings used for calls to deleteAccessLevel. */ public UnaryCallSettings.Builder<DeleteAccessLevelRequest, Operation> deleteAccessLevelSettings() { return getStubSettingsBuilder().deleteAccessLevelSettings(); } /** Returns the builder for the settings used for calls to deleteAccessLevel. */ public OperationCallSettings.Builder< DeleteAccessLevelRequest, Empty, AccessContextManagerOperationMetadata> deleteAccessLevelOperationSettings() { return getStubSettingsBuilder().deleteAccessLevelOperationSettings(); } /** Returns the builder for the settings used for calls to replaceAccessLevels. */ public UnaryCallSettings.Builder<ReplaceAccessLevelsRequest, Operation> replaceAccessLevelsSettings() { return getStubSettingsBuilder().replaceAccessLevelsSettings(); } /** Returns the builder for the settings used for calls to replaceAccessLevels. */ public OperationCallSettings.Builder< ReplaceAccessLevelsRequest, ReplaceAccessLevelsResponse, AccessContextManagerOperationMetadata> replaceAccessLevelsOperationSettings() { return getStubSettingsBuilder().replaceAccessLevelsOperationSettings(); } /** Returns the builder for the settings used for calls to listServicePerimeters. */ public PagedCallSettings.Builder< ListServicePerimetersRequest, ListServicePerimetersResponse, ListServicePerimetersPagedResponse> listServicePerimetersSettings() { return getStubSettingsBuilder().listServicePerimetersSettings(); } /** Returns the builder for the settings used for calls to getServicePerimeter. */ public UnaryCallSettings.Builder<GetServicePerimeterRequest, ServicePerimeter> getServicePerimeterSettings() { return getStubSettingsBuilder().getServicePerimeterSettings(); } /** Returns the builder for the settings used for calls to createServicePerimeter. */ public UnaryCallSettings.Builder<CreateServicePerimeterRequest, Operation> createServicePerimeterSettings() { return getStubSettingsBuilder().createServicePerimeterSettings(); } /** Returns the builder for the settings used for calls to createServicePerimeter. */ public OperationCallSettings.Builder< CreateServicePerimeterRequest, ServicePerimeter, AccessContextManagerOperationMetadata> createServicePerimeterOperationSettings() { return getStubSettingsBuilder().createServicePerimeterOperationSettings(); } /** Returns the builder for the settings used for calls to updateServicePerimeter. */ public UnaryCallSettings.Builder<UpdateServicePerimeterRequest, Operation> updateServicePerimeterSettings() { return getStubSettingsBuilder().updateServicePerimeterSettings(); } /** Returns the builder for the settings used for calls to updateServicePerimeter. */ public OperationCallSettings.Builder< UpdateServicePerimeterRequest, ServicePerimeter, AccessContextManagerOperationMetadata> updateServicePerimeterOperationSettings() { return getStubSettingsBuilder().updateServicePerimeterOperationSettings(); } /** Returns the builder for the settings used for calls to deleteServicePerimeter. */ public UnaryCallSettings.Builder<DeleteServicePerimeterRequest, Operation> deleteServicePerimeterSettings() { return getStubSettingsBuilder().deleteServicePerimeterSettings(); } /** Returns the builder for the settings used for calls to deleteServicePerimeter. */ public OperationCallSettings.Builder< DeleteServicePerimeterRequest, Empty, AccessContextManagerOperationMetadata> deleteServicePerimeterOperationSettings() { return getStubSettingsBuilder().deleteServicePerimeterOperationSettings(); } /** Returns the builder for the settings used for calls to replaceServicePerimeters. */ public UnaryCallSettings.Builder<ReplaceServicePerimetersRequest, Operation> replaceServicePerimetersSettings() { return getStubSettingsBuilder().replaceServicePerimetersSettings(); } /** Returns the builder for the settings used for calls to replaceServicePerimeters. */ public OperationCallSettings.Builder< ReplaceServicePerimetersRequest, ReplaceServicePerimetersResponse, AccessContextManagerOperationMetadata> replaceServicePerimetersOperationSettings() { return getStubSettingsBuilder().replaceServicePerimetersOperationSettings(); } /** Returns the builder for the settings used for calls to commitServicePerimeters. */ public UnaryCallSettings.Builder<CommitServicePerimetersRequest, Operation> commitServicePerimetersSettings() { return getStubSettingsBuilder().commitServicePerimetersSettings(); } /** Returns the builder for the settings used for calls to commitServicePerimeters. */ public OperationCallSettings.Builder< CommitServicePerimetersRequest, CommitServicePerimetersResponse, AccessContextManagerOperationMetadata> commitServicePerimetersOperationSettings() { return getStubSettingsBuilder().commitServicePerimetersOperationSettings(); } /** Returns the builder for the settings used for calls to listGcpUserAccessBindings. */ public PagedCallSettings.Builder< ListGcpUserAccessBindingsRequest, ListGcpUserAccessBindingsResponse, ListGcpUserAccessBindingsPagedResponse> listGcpUserAccessBindingsSettings() { return getStubSettingsBuilder().listGcpUserAccessBindingsSettings(); } /** Returns the builder for the settings used for calls to getGcpUserAccessBinding. */ public UnaryCallSettings.Builder<GetGcpUserAccessBindingRequest, GcpUserAccessBinding> getGcpUserAccessBindingSettings() { return getStubSettingsBuilder().getGcpUserAccessBindingSettings(); } /** Returns the builder for the settings used for calls to createGcpUserAccessBinding. */ public UnaryCallSettings.Builder<CreateGcpUserAccessBindingRequest, Operation> createGcpUserAccessBindingSettings() { return getStubSettingsBuilder().createGcpUserAccessBindingSettings(); } /** Returns the builder for the settings used for calls to createGcpUserAccessBinding. */ public OperationCallSettings.Builder< CreateGcpUserAccessBindingRequest, GcpUserAccessBinding, GcpUserAccessBindingOperationMetadata> createGcpUserAccessBindingOperationSettings() { return getStubSettingsBuilder().createGcpUserAccessBindingOperationSettings(); } /** Returns the builder for the settings used for calls to updateGcpUserAccessBinding. */ public UnaryCallSettings.Builder<UpdateGcpUserAccessBindingRequest, Operation> updateGcpUserAccessBindingSettings() { return getStubSettingsBuilder().updateGcpUserAccessBindingSettings(); } /** Returns the builder for the settings used for calls to updateGcpUserAccessBinding. */ public OperationCallSettings.Builder< UpdateGcpUserAccessBindingRequest, GcpUserAccessBinding, GcpUserAccessBindingOperationMetadata> updateGcpUserAccessBindingOperationSettings() { return getStubSettingsBuilder().updateGcpUserAccessBindingOperationSettings(); } /** Returns the builder for the settings used for calls to deleteGcpUserAccessBinding. */ public UnaryCallSettings.Builder<DeleteGcpUserAccessBindingRequest, Operation> deleteGcpUserAccessBindingSettings() { return getStubSettingsBuilder().deleteGcpUserAccessBindingSettings(); } /** Returns the builder for the settings used for calls to deleteGcpUserAccessBinding. */ public OperationCallSettings.Builder< DeleteGcpUserAccessBindingRequest, Empty, GcpUserAccessBindingOperationMetadata> deleteGcpUserAccessBindingOperationSettings() { return getStubSettingsBuilder().deleteGcpUserAccessBindingOperationSettings(); } /** Returns the builder for the settings used for calls to setIamPolicy. */ public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() { return getStubSettingsBuilder().setIamPolicySettings(); } /** Returns the builder for the settings used for calls to getIamPolicy. */ public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getStubSettingsBuilder().getIamPolicySettings(); } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return getStubSettingsBuilder().testIamPermissionsSettings(); } @Override public AccessContextManagerSettings build() throws IOException { return new AccessContextManagerSettings(this); } } }
openjdk/jdk8
37,597
jdk/src/share/classes/jdk/internal/org/objectweb/asm/MethodVisitor.java
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * This file is available under and governed by the GNU General Public * License version 2 only, as published by the Free Software Foundation. * However, the following notice accompanied the original version of this * file: * * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package jdk.internal.org.objectweb.asm; /** * A visitor to visit a Java method. The methods of this class must be called in * the following order: ( <tt>visitParameter</tt> )* [ * <tt>visitAnnotationDefault</tt> ] ( <tt>visitAnnotation</tt> | * <tt>visitTypeAnnotation</tt> | <tt>visitAttribute</tt> )* [ * <tt>visitCode</tt> ( <tt>visitFrame</tt> | <tt>visit<i>X</i>Insn</tt> | * <tt>visitLabel</tt> | <tt>visitInsnAnnotation</tt> | * <tt>visitTryCatchBlock</tt> | <tt>visitTryCatchBlockAnnotation</tt> | * <tt>visitLocalVariable</tt> | <tt>visitLocalVariableAnnotation</tt> | * <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In * addition, the <tt>visit<i>X</i>Insn</tt> and <tt>visitLabel</tt> methods must * be called in the sequential order of the bytecode instructions of the visited * code, <tt>visitInsnAnnotation</tt> must be called <i>after</i> the annotated * instruction, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the * labels passed as arguments have been visited, * <tt>visitTryCatchBlockAnnotation</tt> must be called <i>after</i> the * corresponding try catch block has been visited, and the * <tt>visitLocalVariable</tt>, <tt>visitLocalVariableAnnotation</tt> and * <tt>visitLineNumber</tt> methods must be called <i>after</i> the labels * passed as arguments have been visited. * * @author Eric Bruneton */ public abstract class MethodVisitor { /** * The ASM API version implemented by this visitor. The value of this field * must be one of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ protected final int api; /** * The method visitor to which this visitor must delegate method calls. May * be null. */ protected MethodVisitor mv; /** * Constructs a new {@link MethodVisitor}. * * @param api * the ASM API version implemented by this visitor. Must be one * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. */ public MethodVisitor(final int api) { this(api, null); } /** * Constructs a new {@link MethodVisitor}. * * @param api * the ASM API version implemented by this visitor. Must be one * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param mv * the method visitor to which this visitor must delegate method * calls. May be null. */ public MethodVisitor(final int api, final MethodVisitor mv) { if (api != Opcodes.ASM4 && api != Opcodes.ASM5) { throw new IllegalArgumentException(); } this.api = api; this.mv = mv; } // ------------------------------------------------------------------------- // Parameters, annotations and non standard attributes // ------------------------------------------------------------------------- /** * Visits a parameter of this method. * * @param name * parameter name or null if none is provided. * @param access * the parameter's access flags, only <tt>ACC_FINAL</tt>, * <tt>ACC_SYNTHETIC</tt> or/and <tt>ACC_MANDATED</tt> are * allowed (see {@link Opcodes}). */ public void visitParameter(String name, int access) { if (api < Opcodes.ASM5) { throw new RuntimeException(); } if (mv != null) { mv.visitParameter(name, access); } } /** * Visits the default value of this annotation interface method. * * @return a visitor to the visit the actual default value of this * annotation interface method, or <tt>null</tt> if this visitor is * not interested in visiting this default value. The 'name' * parameters passed to the methods of this annotation visitor are * ignored. Moreover, exacly one visit method must be called on this * annotation visitor, followed by visitEnd. */ public AnnotationVisitor visitAnnotationDefault() { if (mv != null) { return mv.visitAnnotationDefault(); } return null; } /** * Visits an annotation of this method. * * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitAnnotation(String desc, boolean visible) { if (mv != null) { return mv.visitAnnotation(desc, visible); } return null; } /** * Visits an annotation on a type in the method signature. * * @param typeRef * a reference to the annotated type. The sort of this type * reference must be {@link TypeReference#METHOD_TYPE_PARAMETER * METHOD_TYPE_PARAMETER}, * {@link TypeReference#METHOD_TYPE_PARAMETER_BOUND * METHOD_TYPE_PARAMETER_BOUND}, * {@link TypeReference#METHOD_RETURN METHOD_RETURN}, * {@link TypeReference#METHOD_RECEIVER METHOD_RECEIVER}, * {@link TypeReference#METHOD_FORMAL_PARAMETER * METHOD_FORMAL_PARAMETER} or {@link TypeReference#THROWS * THROWS}. See {@link TypeReference}. * @param typePath * the path to the annotated type argument, wildcard bound, array * element type, or static inner type within 'typeRef'. May be * <tt>null</tt> if the annotation targets 'typeRef' as a whole. * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitTypeAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { if (api < Opcodes.ASM5) { throw new RuntimeException(); } if (mv != null) { return mv.visitTypeAnnotation(typeRef, typePath, desc, visible); } return null; } /** * Visits an annotation of a parameter this method. * * @param parameter * the parameter index. * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitParameterAnnotation(int parameter, String desc, boolean visible) { if (mv != null) { return mv.visitParameterAnnotation(parameter, desc, visible); } return null; } /** * Visits a non standard attribute of this method. * * @param attr * an attribute. */ public void visitAttribute(Attribute attr) { if (mv != null) { mv.visitAttribute(attr); } } /** * Starts the visit of the method's code, if any (i.e. non abstract method). */ public void visitCode() { if (mv != null) { mv.visitCode(); } } /** * Visits the current state of the local variables and operand stack * elements. This method must(*) be called <i>just before</i> any * instruction <b>i</b> that follows an unconditional branch instruction * such as GOTO or THROW, that is the target of a jump instruction, or that * starts an exception handler block. The visited types must describe the * values of the local variables and of the operand stack elements <i>just * before</i> <b>i</b> is executed.<br> * <br> * (*) this is mandatory only for classes whose version is greater than or * equal to {@link Opcodes#V1_6 V1_6}. <br> * <br> * The frames of a method must be given either in expanded form, or in * compressed form (all frames must use the same format, i.e. you must not * mix expanded and compressed frames within a single method): * <ul> * <li>In expanded form, all frames must have the F_NEW type.</li> * <li>In compressed form, frames are basically "deltas" from the state of * the previous frame: * <ul> * <li>{@link Opcodes#F_SAME} representing frame with exactly the same * locals as the previous frame and with the empty stack.</li> * <li>{@link Opcodes#F_SAME1} representing frame with exactly the same * locals as the previous frame and with single value on the stack ( * <code>nStack</code> is 1 and <code>stack[0]</code> contains value for the * type of the stack item).</li> * <li>{@link Opcodes#F_APPEND} representing frame with current locals are * the same as the locals in the previous frame, except that additional * locals are defined (<code>nLocal</code> is 1, 2 or 3 and * <code>local</code> elements contains values representing added types).</li> * <li>{@link Opcodes#F_CHOP} representing frame with current locals are the * same as the locals in the previous frame, except that the last 1-3 locals * are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li> * <li>{@link Opcodes#F_FULL} representing complete frame data.</li> * </ul> * </li> * </ul> * <br> * In both cases the first frame, corresponding to the method's parameters * and access flags, is implicit and must not be visited. Also, it is * illegal to visit two or more frames for the same code location (i.e., at * least one instruction must be visited between two calls to visitFrame). * * @param type * the type of this stack map frame. Must be * {@link Opcodes#F_NEW} for expanded frames, or * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND}, * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for * compressed frames. * @param nLocal * the number of local variables in the visited frame. * @param local * the local variable types in this frame. This array must not be * modified. Primitive types are represented by * {@link Opcodes#TOP}, {@link Opcodes#INTEGER}, * {@link Opcodes#FLOAT}, {@link Opcodes#LONG}, * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or * {@link Opcodes#UNINITIALIZED_THIS} (long and double are * represented by a single element). Reference types are * represented by String objects (representing internal names), * and uninitialized types by Label objects (this label * designates the NEW instruction that created this uninitialized * value). * @param nStack * the number of operand stack elements in the visited frame. * @param stack * the operand stack types in this frame. This array must not be * modified. Its content has the same format as the "local" * array. * @throws IllegalStateException * if a frame is visited just after another one, without any * instruction between the two (unless this frame is a * Opcodes#F_SAME frame, in which case it is silently ignored). */ public void visitFrame(int type, int nLocal, Object[] local, int nStack, Object[] stack) { if (mv != null) { mv.visitFrame(type, nLocal, local, nStack, stack); } } // ------------------------------------------------------------------------- // Normal instructions // ------------------------------------------------------------------------- /** * Visits a zero operand instruction. * * @param opcode * the opcode of the instruction to be visited. This opcode is * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB, * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM, * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR, * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D, * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S, * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN, * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER, * or MONITOREXIT. */ public void visitInsn(int opcode) { if (mv != null) { mv.visitInsn(opcode); } } /** * Visits an instruction with a single int operand. * * @param opcode * the opcode of the instruction to be visited. This opcode is * either BIPUSH, SIPUSH or NEWARRAY. * @param operand * the operand of the instruction to be visited.<br> * When opcode is BIPUSH, operand value should be between * Byte.MIN_VALUE and Byte.MAX_VALUE.<br> * When opcode is SIPUSH, operand value should be between * Short.MIN_VALUE and Short.MAX_VALUE.<br> * When opcode is NEWARRAY, operand value should be one of * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR}, * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE}, * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT}, * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}. */ public void visitIntInsn(int opcode, int operand) { if (mv != null) { mv.visitIntInsn(opcode, operand); } } /** * Visits a local variable instruction. A local variable instruction is an * instruction that loads or stores the value of a local variable. * * @param opcode * the opcode of the local variable instruction to be visited. * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, * ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET. * @param var * the operand of the instruction to be visited. This operand is * the index of a local variable. */ public void visitVarInsn(int opcode, int var) { if (mv != null) { mv.visitVarInsn(opcode, var); } } /** * Visits a type instruction. A type instruction is an instruction that * takes the internal name of a class as parameter. * * @param opcode * the opcode of the type instruction to be visited. This opcode * is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF. * @param type * the operand of the instruction to be visited. This operand * must be the internal name of an object or array class (see * {@link Type#getInternalName() getInternalName}). */ public void visitTypeInsn(int opcode, String type) { if (mv != null) { mv.visitTypeInsn(opcode, type); } } /** * Visits a field instruction. A field instruction is an instruction that * loads or stores the value of a field of an object. * * @param opcode * the opcode of the type instruction to be visited. This opcode * is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD. * @param owner * the internal name of the field's owner class (see * {@link Type#getInternalName() getInternalName}). * @param name * the field's name. * @param desc * the field's descriptor (see {@link Type Type}). */ public void visitFieldInsn(int opcode, String owner, String name, String desc) { if (mv != null) { mv.visitFieldInsn(opcode, owner, name, desc); } } /** * Visits a method instruction. A method instruction is an instruction that * invokes a method. * * @param opcode * the opcode of the type instruction to be visited. This opcode * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or * INVOKEINTERFACE. * @param owner * the internal name of the method's owner class (see * {@link Type#getInternalName() getInternalName}). * @param name * the method's name. * @param desc * the method's descriptor (see {@link Type Type}). */ @Deprecated public void visitMethodInsn(int opcode, String owner, String name, String desc) { if (api >= Opcodes.ASM5) { boolean itf = opcode == Opcodes.INVOKEINTERFACE; visitMethodInsn(opcode, owner, name, desc, itf); return; } if (mv != null) { mv.visitMethodInsn(opcode, owner, name, desc); } } /** * Visits a method instruction. A method instruction is an instruction that * invokes a method. * * @param opcode * the opcode of the type instruction to be visited. This opcode * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or * INVOKEINTERFACE. * @param owner * the internal name of the method's owner class (see * {@link Type#getInternalName() getInternalName}). * @param name * the method's name. * @param desc * the method's descriptor (see {@link Type Type}). * @param itf * if the method's owner class is an interface. */ public void visitMethodInsn(int opcode, String owner, String name, String desc, boolean itf) { if (api < Opcodes.ASM5) { if (itf != (opcode == Opcodes.INVOKEINTERFACE)) { throw new IllegalArgumentException( "INVOKESPECIAL/STATIC on interfaces require ASM 5"); } visitMethodInsn(opcode, owner, name, desc); return; } if (mv != null) { mv.visitMethodInsn(opcode, owner, name, desc, itf); } } /** * Visits an invokedynamic instruction. * * @param name * the method's name. * @param desc * the method's descriptor (see {@link Type Type}). * @param bsm * the bootstrap method. * @param bsmArgs * the bootstrap method constant arguments. Each argument must be * an {@link Integer}, {@link Float}, {@link Long}, * {@link Double}, {@link String}, {@link Type} or {@link Handle} * value. This method is allowed to modify the content of the * array so a caller should expect that this array may change. */ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) { if (mv != null) { mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs); } } /** * Visits a jump instruction. A jump instruction is an instruction that may * jump to another instruction. * * @param opcode * the opcode of the type instruction to be visited. This opcode * is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL. * @param label * the operand of the instruction to be visited. This operand is * a label that designates the instruction to which the jump * instruction may jump. */ public void visitJumpInsn(int opcode, Label label) { if (mv != null) { mv.visitJumpInsn(opcode, label); } } /** * Visits a label. A label designates the instruction that will be visited * just after it. * * @param label * a {@link Label Label} object. */ public void visitLabel(Label label) { if (mv != null) { mv.visitLabel(label); } } // ------------------------------------------------------------------------- // Special instructions // ------------------------------------------------------------------------- /** * Visits a LDC instruction. Note that new constant types may be added in * future versions of the Java Virtual Machine. To easily detect new * constant types, implementations of this method should check for * unexpected constant types, like this: * * <pre> * if (cst instanceof Integer) { * // ... * } else if (cst instanceof Float) { * // ... * } else if (cst instanceof Long) { * // ... * } else if (cst instanceof Double) { * // ... * } else if (cst instanceof String) { * // ... * } else if (cst instanceof Type) { * int sort = ((Type) cst).getSort(); * if (sort == Type.OBJECT) { * // ... * } else if (sort == Type.ARRAY) { * // ... * } else if (sort == Type.METHOD) { * // ... * } else { * // throw an exception * } * } else if (cst instanceof Handle) { * // ... * } else { * // throw an exception * } * </pre> * * @param cst * the constant to be loaded on the stack. This parameter must be * a non null {@link Integer}, a {@link Float}, a {@link Long}, a * {@link Double}, a {@link String}, a {@link Type} of OBJECT or * ARRAY sort for <tt>.class</tt> constants, for classes whose * version is 49.0, a {@link Type} of METHOD sort or a * {@link Handle} for MethodType and MethodHandle constants, for * classes whose version is 51.0. */ public void visitLdcInsn(Object cst) { if (mv != null) { mv.visitLdcInsn(cst); } } /** * Visits an IINC instruction. * * @param var * index of the local variable to be incremented. * @param increment * amount to increment the local variable by. */ public void visitIincInsn(int var, int increment) { if (mv != null) { mv.visitIincInsn(var, increment); } } /** * Visits a TABLESWITCH instruction. * * @param min * the minimum key value. * @param max * the maximum key value. * @param dflt * beginning of the default handler block. * @param labels * beginnings of the handler blocks. <tt>labels[i]</tt> is the * beginning of the handler block for the <tt>min + i</tt> key. */ public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) { if (mv != null) { mv.visitTableSwitchInsn(min, max, dflt, labels); } } /** * Visits a LOOKUPSWITCH instruction. * * @param dflt * beginning of the default handler block. * @param keys * the values of the keys. * @param labels * beginnings of the handler blocks. <tt>labels[i]</tt> is the * beginning of the handler block for the <tt>keys[i]</tt> key. */ public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) { if (mv != null) { mv.visitLookupSwitchInsn(dflt, keys, labels); } } /** * Visits a MULTIANEWARRAY instruction. * * @param desc * an array type descriptor (see {@link Type Type}). * @param dims * number of dimensions of the array to allocate. */ public void visitMultiANewArrayInsn(String desc, int dims) { if (mv != null) { mv.visitMultiANewArrayInsn(desc, dims); } } /** * Visits an annotation on an instruction. This method must be called just * <i>after</i> the annotated instruction. It can be called several times * for the same instruction. * * @param typeRef * a reference to the annotated type. The sort of this type * reference must be {@link TypeReference#INSTANCEOF INSTANCEOF}, * {@link TypeReference#NEW NEW}, * {@link TypeReference#CONSTRUCTOR_REFERENCE * CONSTRUCTOR_REFERENCE}, {@link TypeReference#METHOD_REFERENCE * METHOD_REFERENCE}, {@link TypeReference#CAST CAST}, * {@link TypeReference#CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT * CONSTRUCTOR_INVOCATION_TYPE_ARGUMENT}, * {@link TypeReference#METHOD_INVOCATION_TYPE_ARGUMENT * METHOD_INVOCATION_TYPE_ARGUMENT}, * {@link TypeReference#CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT * CONSTRUCTOR_REFERENCE_TYPE_ARGUMENT}, or * {@link TypeReference#METHOD_REFERENCE_TYPE_ARGUMENT * METHOD_REFERENCE_TYPE_ARGUMENT}. See {@link TypeReference}. * @param typePath * the path to the annotated type argument, wildcard bound, array * element type, or static inner type within 'typeRef'. May be * <tt>null</tt> if the annotation targets 'typeRef' as a whole. * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitInsnAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { if (api < Opcodes.ASM5) { throw new RuntimeException(); } if (mv != null) { return mv.visitInsnAnnotation(typeRef, typePath, desc, visible); } return null; } // ------------------------------------------------------------------------- // Exceptions table entries, debug information, max stack and max locals // ------------------------------------------------------------------------- /** * Visits a try catch block. * * @param start * beginning of the exception handler's scope (inclusive). * @param end * end of the exception handler's scope (exclusive). * @param handler * beginning of the exception handler's code. * @param type * internal name of the type of exceptions handled by the * handler, or <tt>null</tt> to catch any exceptions (for * "finally" blocks). * @throws IllegalArgumentException * if one of the labels has already been visited by this visitor * (by the {@link #visitLabel visitLabel} method). */ public void visitTryCatchBlock(Label start, Label end, Label handler, String type) { if (mv != null) { mv.visitTryCatchBlock(start, end, handler, type); } } /** * Visits an annotation on an exception handler type. This method must be * called <i>after</i> the {@link #visitTryCatchBlock} for the annotated * exception handler. It can be called several times for the same exception * handler. * * @param typeRef * a reference to the annotated type. The sort of this type * reference must be {@link TypeReference#EXCEPTION_PARAMETER * EXCEPTION_PARAMETER}. See {@link TypeReference}. * @param typePath * the path to the annotated type argument, wildcard bound, array * element type, or static inner type within 'typeRef'. May be * <tt>null</tt> if the annotation targets 'typeRef' as a whole. * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitTryCatchAnnotation(int typeRef, TypePath typePath, String desc, boolean visible) { if (api < Opcodes.ASM5) { throw new RuntimeException(); } if (mv != null) { return mv.visitTryCatchAnnotation(typeRef, typePath, desc, visible); } return null; } /** * Visits a local variable declaration. * * @param name * the name of a local variable. * @param desc * the type descriptor of this local variable. * @param signature * the type signature of this local variable. May be * <tt>null</tt> if the local variable type does not use generic * types. * @param start * the first instruction corresponding to the scope of this local * variable (inclusive). * @param end * the last instruction corresponding to the scope of this local * variable (exclusive). * @param index * the local variable's index. * @throws IllegalArgumentException * if one of the labels has not already been visited by this * visitor (by the {@link #visitLabel visitLabel} method). */ public void visitLocalVariable(String name, String desc, String signature, Label start, Label end, int index) { if (mv != null) { mv.visitLocalVariable(name, desc, signature, start, end, index); } } /** * Visits an annotation on a local variable type. * * @param typeRef * a reference to the annotated type. The sort of this type * reference must be {@link TypeReference#LOCAL_VARIABLE * LOCAL_VARIABLE} or {@link TypeReference#RESOURCE_VARIABLE * RESOURCE_VARIABLE}. See {@link TypeReference}. * @param typePath * the path to the annotated type argument, wildcard bound, array * element type, or static inner type within 'typeRef'. May be * <tt>null</tt> if the annotation targets 'typeRef' as a whole. * @param start * the fist instructions corresponding to the continuous ranges * that make the scope of this local variable (inclusive). * @param end * the last instructions corresponding to the continuous ranges * that make the scope of this local variable (exclusive). This * array must have the same size as the 'start' array. * @param index * the local variable's index in each range. This array must have * the same size as the 'start' array. * @param desc * the class descriptor of the annotation class. * @param visible * <tt>true</tt> if the annotation is visible at runtime. * @return a visitor to visit the annotation values, or <tt>null</tt> if * this visitor is not interested in visiting this annotation. */ public AnnotationVisitor visitLocalVariableAnnotation(int typeRef, TypePath typePath, Label[] start, Label[] end, int[] index, String desc, boolean visible) { if (api < Opcodes.ASM5) { throw new RuntimeException(); } if (mv != null) { return mv.visitLocalVariableAnnotation(typeRef, typePath, start, end, index, desc, visible); } return null; } /** * Visits a line number declaration. * * @param line * a line number. This number refers to the source file from * which the class was compiled. * @param start * the first instruction corresponding to this line number. * @throws IllegalArgumentException * if <tt>start</tt> has not already been visited by this * visitor (by the {@link #visitLabel visitLabel} method). */ public void visitLineNumber(int line, Label start) { if (mv != null) { mv.visitLineNumber(line, start); } } /** * Visits the maximum stack size and the maximum number of local variables * of the method. * * @param maxStack * maximum stack size of the method. * @param maxLocals * maximum number of local variables for the method. */ public void visitMaxs(int maxStack, int maxLocals) { if (mv != null) { mv.visitMaxs(maxStack, maxLocals); } } /** * Visits the end of the method. This method, which is the last one to be * called, is used to inform the visitor that all the annotations and * attributes of the method have been visited. */ public void visitEnd() { if (mv != null) { mv.visitEnd(); } } }
apache/derby
37,249
java/org.apache.derby.tests/org/apache/derbyTesting/functionTests/tests/jdbcapi/StatementPoolingTest.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.StatementPoolingTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import javax.sql.ConnectionPoolDataSource; import javax.sql.PooledConnection; import junit.framework.Test; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.BaseJDBCTestSetup; import org.apache.derbyTesting.junit.BaseTestSuite; import org.apache.derbyTesting.junit.J2EEDataSource; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.TestConfiguration; /** * A set of tests specifically targeted at connections that support statement * pooling. */ public class StatementPoolingTest extends BaseJDBCTestCase { private LogicalPooledConnectionFactory lpcf; public StatementPoolingTest(String name) { super(name); } public void tearDown() throws Exception { closePooledConnectionFactory(); super.tearDown(); } /** Closes the connection factory associated with this test. */ private void closePooledConnectionFactory() throws SQLException { if (lpcf != null) { try { lpcf.close(); } finally { lpcf = null; } } } /** * Returns a logical connection from a pooled connection obtained from a * data source configured with a default statement cache size. * * @return A logical connection. * @throws SQLException if obtaining the connection fails */ private Connection getCachingConnection() throws SQLException { return getCachingConnection(7); } /** * Returns a logical connection from a pooled connection obtained from a * data source configured to have the specified statement cache size. * * @param cacheSize statement cache size * @return A logical connection. * @throws SQLException if obtaining the connection fails */ private Connection getCachingConnection(int cacheSize) throws SQLException { if (lpcf == null) { lpcf = new LogicalPooledConnectionFactory(); } return lpcf.getConnection(cacheSize); } /** * Tests that the statement cache is able to throw out prepared statements * when it reaches maximum capacity. * * @throws SQLException if something goes wrong... */ public void testCacheOverflow() throws SQLException { final int stmtCount = 150; Connection con = getCachingConnection(11); for (int i=0; i < stmtCount; i++) { // Yes, the "values + i" is intended here. PreparedStatement pStmt = con.prepareStatement("values " + i); ResultSet rs = pStmt.executeQuery(); JDBC.assertSingleValueResultSet(rs, Integer.toString(i)); pStmt.close(); } con.close(); } /** * Verifies that statement pooling is enabled by checking the names of the * implementation classes in Derby. * * @throws SQLException if creating the JDBC objects fail */ public void testPoolingEnabledByCheckingImplementationDetails() throws SQLException { final String conClass = "CachingLogicalConnection"; final String psClass = "LogicalPreparedStatement"; final String csClass = "LogicalCallableStatement"; final String dmdClass = "LogicalDatabaseMetaData"; Connection con = getCachingConnection(); assertClassName(con, conClass); assertClassName(con.prepareStatement("values 1"), psClass); assertClassName(con.prepareStatement("values 1", Statement.RETURN_GENERATED_KEYS), psClass); assertClassName(con.prepareStatement("values 1", Statement.NO_GENERATED_KEYS), psClass); assertClassName(con.prepareStatement("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), psClass); assertClassName(con.prepareStatement("values 1", ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE), psClass); assertClassName(con.prepareStatement("values 1", (String[])null), psClass); assertClassName(con.prepareStatement("values 1", new String[] {}), psClass); assertClassName(con.prepareCall("values 1"), csClass); assertClassName(con.prepareCall("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY), csClass); assertClassName(con.prepareCall("values 1", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT), csClass); assertClassName(con.getMetaData(), dmdClass); } /** * Assert that the name of the class of the object is what is expected. * <p> * The assert does not consider package names, only the name passed in as * {@code expectedName} and the passed in name concatenated with "40". * <p> * <b>WARNING</b>: This method is not a general utility method. Please look * at the implementation to determine if you can use it. * * @param obj object to check * @param expectedName the expected name of the class * @throws AssertionFailedError if the class name is not as expected */ private static void assertClassName(Object obj, String expectedName) { assertNotNull("The expected name cannot be <null>", expectedName); assertNotNull("The object cannot be <null>", obj); String[] names = obj.getClass().getName().split("\\."); final String simpleName = names[names.length -1]; if (JDBC.vmSupportsJDBC4() && !expectedName.endsWith("40")) { if (JDBC.vmSupportsJDBC42() && expectedName.contains("Statement")) { expectedName += "42"; } else if (usingEmbedded()) { expectedName += "40"; } } assertEquals(expectedName, simpleName); } /** * This test merely checks that creating a logical prepared statement does * not fail. * * @throws SQLException if creating the prepared statement fails */ public void testPrepareStatementPath() throws SQLException { PreparedStatement ps = prepareStatement("values 9708"); ps.close(); } /** * This test merely checks that creating a logical callable statement does * not fail. * * @throws SQLException if creating the callable statement fails */ public void testPrepareCallPath() throws SQLException { CallableStatement cs = prepareCall( "CALL SYSCS_UTIL.SYSCS_SET_RUNTIMESTATISTICS(0)"); cs.close(); } /** * This test merely checks that creating a logical callable statement, which * is not really a call, does not fail. * * @throws SQLException if creating the callable statement fails */ public void testPrepareCallWithNoCallPath() throws SQLException { CallableStatement cs = prepareCall("values 1"); cs.close(); } /** * Tests that closing the prepared statement also closes the result set. * * @throws SQLException if something goes wrong... */ public void testClosingPSClosesRS() throws SQLException { PreparedStatement ps = prepareStatement("values 99"); ResultSet rs = ps.executeQuery(); ps.close(); try { rs.next(); fail("Result set should have been closed"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } } /** * Tests that the connection holdability is reset, when it is first * modified, the connection closed and a new logical connection obtained. * * @throws SQLException if something goes wrong... */ public void testHoldabilityIsResetExplicitClose() throws SQLException { doTestHoldabilityIsReset(true); } /** * Tests that the connection holdability is reset, when it is first * modified, and a new logical connection obtained without first explicitly * closing the previous one. * * @throws SQLException if something goes wrong... */ public void testHoldabilityIsResetNoExplicitClose() throws SQLException { doTestHoldabilityIsReset(false); } /** * Test sequence for testing if the connection holdability is reset. * * @param closeConnection determines if the logical connection is * explicitly closed before a new one is obtained * @throws SQLException if something goes wrong... */ private void doTestHoldabilityIsReset(final boolean closeConnection) throws SQLException { // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = getCachingConnection(); assertEquals("Unexpected default holdability", ResultSet.HOLD_CURSORS_OVER_COMMIT, con.getHoldability()); con.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); assertEquals("Holdability not updated", ResultSet.CLOSE_CURSORS_AT_COMMIT, con.getHoldability()); if (closeConnection) { con.close(); } con = getCachingConnection(); assertEquals("Holdability not reset", ResultSet.HOLD_CURSORS_OVER_COMMIT, con.getHoldability()); } public void testIsolationLevelIsResetExplicitCloseQuery() throws SQLException { doTestIsolationLevelIsReset(true, true); } public void testIsolationLevelIsResetExplicitCloseNoQuery() throws SQLException { doTestIsolationLevelIsReset(true, false); } public void testIsolationLevelIsResetNoExplicitCloseNoQuery() throws SQLException { doTestIsolationLevelIsReset(false, false); } public void testIsolationLevelIsResetNoExplicitCloseQuery() throws SQLException { doTestIsolationLevelIsReset(false, true); } /** * Tests if the connection isolation level is reset when a new connection * is obtained. * <p> * The two arguments are introduced to test different scenarios; explicit * and implicit connection closing, and session data caching (piggybacked * information). * * @param closeConnection tells if the connection is explicitly closed * before a new one is obtained * @param executeQuery tells if a query is executed on the connection before * a new connection is obtained. * @throws SQLException if something goes wrong... */ private void doTestIsolationLevelIsReset(final boolean closeConnection, final boolean executeQuery) throws SQLException { // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = getCachingConnection(); assertEquals("Unexpected default isolation level", Connection.TRANSACTION_READ_COMMITTED, con.getTransactionIsolation()); con.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); assertEquals("Isolation level not updated", Connection.TRANSACTION_REPEATABLE_READ, con.getTransactionIsolation()); if (executeQuery) { PreparedStatement ps = con.prepareStatement("values 2"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "2"); ps.close(); } if (closeConnection) { con.close(); } con = getCachingConnection(); assertEquals("Isolation level not reset", Connection.TRANSACTION_READ_COMMITTED, con.getTransactionIsolation()); } /** * Tests that closing the caching logical connection closes the logical * prepared statement, but not the physical prepared statement. * <p> * Since there are no public interface methods to test this, the approach * taken will be as follows: * <ol> <li>Create a new table.</li> * <li>Prepare a statement selecting from the table.</li> * <li>Close the statement, putting it into the cache.</li> * <li>Delete the table.</li> * <li>Prepare the selecting statement again.</li> * <li>Execute the statement.</li> * </ol> * If the physical statement was closed when closing the caching logical * connection, the prepare will fail. If it was left open, the prepare will * succeed because the statement is fetched from the cache, but the * execution will fail because the table no longer exists. * * @throws SQLException if something goes wrong... */ public void testCachingLogicalConnectionCloseLeavesPhysicalStatementsOpen() throws SQLException { final String SELECT_SQL = "select * from clcclso"; // Keep track of our own connection, the framework currently creates // a new pooled connection and then obtains a connection from that. // Statement pooling only works within a single pooled connection. Connection con = getCachingConnection(); con.setAutoCommit(false); Statement stmt = createStatement(); stmt.executeUpdate("create table clcclso (id int)"); PreparedStatement ps = con.prepareStatement(SELECT_SQL); con.commit(); con.close(); try { // Should fail because the logical statement has been closed. ps.execute(); fail("Logical connection close did not close logical statement."); } catch (SQLException sqle) { // Already closed. assertSQLState("XJ012", sqle); } stmt = createStatement(); stmt.executeUpdate("drop table clcclso"); commit(); // If an exception is thrown here, statement pooling is disabled or not // working correctly. con = getCachingConnection(); ps = con.prepareStatement(SELECT_SQL); // From cache. try { // Should fail here because the referenced table has been deleted. ps.execute(); fail("Execution should have failed"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } ps.close(); // Make sure the connection is still valid. ps = con.prepareStatement("values 976"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "976"); ps.close(); con.close(); } /** * Checks if a reset of one statement affects other open statement on the * connection. * * @throws SQLException if something goes wrong... */ public void resTestCloseDoesNotAffectOtherStatement() throws SQLException { final String sql = "select * from stmtpooltest where val > 0 and val " + "<= 7 order by val"; PreparedStatement psForward = prepareStatement(sql); ResultSet rsForward = psForward.executeQuery(); assertTrue(rsForward.next()); assertEquals("1", rsForward.getString(1)); assertTrue(rsForward.next()); assertEquals("2", rsForward.getString(1)); PreparedStatement psScrollable = prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); ResultSet rsScrollable = psScrollable.executeQuery(); // Read seven rows from the scrollable rs, position at last row. for (int val=1; val <= 7; val++) { assertTrue(rsScrollable.next()); assertEquals(val, rsScrollable.getInt(1)); } // Create a statement, then close it. PreparedStatement psToClose = prepareStatement( "select val from stmtpooltest where val = 5"); JDBC.assertSingleValueResultSet(psToClose.executeQuery(), "5"); psToClose.close(); assertTrue(rsForward.next()); assertEquals("3", rsForward.getString(1)); assertTrue(rsScrollable.first()); assertEquals("1", rsScrollable.getString(1)); // Should fetch a cached statement. psToClose = prepareStatement( "select val from stmtpooltest where val = 5"); JDBC.assertSingleValueResultSet(psToClose.executeQuery(), "5"); psToClose.close(); assertTrue(rsScrollable.last()); assertEquals("7", rsScrollable.getString(1)); assertFalse(rsScrollable.next()); rsScrollable.close(); assertTrue(rsForward.next()); assertEquals("4", rsForward.getString(1)); rsForward.close(); } /** * Tests that closing a logical prepared statement referring a non-existing * table works. * <p> * In this test, the prepared statement that will be made invalid by the * delete is held open by the user. * * @throws SQLException if something goes wrong... */ public void testDeleteReferringTableWhenOpen() throws SQLException { getConnection().setAutoCommit(false); // Create a table, insert a row, then create a statement selecting it. Statement stmt = createStatement(); stmt.executeUpdate("create table testdeletewhenopen (id int)"); stmt.executeUpdate("insert into testdeletewhenopen values 1"); PreparedStatement ps = prepareStatement( "select * from testdeletewhenopen"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "1"); // Now delete the table and logically close the prepared statement. stmt.executeUpdate("drop table testdeletewhenopen"); stmt.close(); ps.close(); // If running without statement pooling, you will get exception here. ps = prepareStatement("select * from testdeletewhenopen"); // If we get this far, there is a big change we have fetched an // invalid statement from the cache, but we won't get the exception // until we try to execute it. try { ps.executeQuery(); fail("Prepared statement not valid, referring non-existing table"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } } /** * Tests that closing a logical prepared statement referring a non-existing * table works. * <p> * In this test, the prepared statement that will be made invalid by the * delete is in the statement cache when the delete happens. * * @throws SQLException if something goes wrong... */ public void testDeleteReferringTableWhenInCache() throws SQLException { getConnection().setAutoCommit(false); // Create a table, insert a row, then create a statement selecting it. Statement stmt = createStatement(); stmt.executeUpdate("create table testdeletewhenopen (id int)"); stmt.executeUpdate("insert into testdeletewhenopen values 1"); PreparedStatement ps = prepareStatement( "select * from testdeletewhenopen"); JDBC.assertSingleValueResultSet(ps.executeQuery(), "1"); // Put the statement into the cache. ps.close(); // Now delete the table and fetch the cached prepared statement. stmt.executeUpdate("drop table testdeletewhenopen"); stmt.close(); // If running without statement pooling, you will get exception here. ps = prepareStatement("select * from testdeletewhenopen"); // If we get this far, there is a big change we have fetched an // invalid statement from the cache, but we won't get the exception // until we try to execute it. try { ps.executeQuery(); fail("Prepared statement not valid, referring non-existing table"); } catch (SQLException sqle) { assertSQLState("42X05", sqle); } } public void resTestCloseCursorsAtCommit() throws SQLException { doTestResultSetCloseForHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); } public void resTestHoldCursorsOverCommit() throws SQLException { doTestResultSetCloseForHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); } /** * Tests that a temporary table created in one logical connection is gone * in the next logical connection. * * @throws SQLException if the test fails for some reason */ public void testTemporaryTablesAreDeletedInNewLogicalConnection() throws SQLException { Connection lcOne = getCachingConnection(); // Create the first logical connection and the temporary table. Statement stmt = lcOne.createStatement(); stmt.executeUpdate("DECLARE GLOBAL TEMPORARY TABLE cpds_temp_table " + "(id int) ON COMMIT PRESERVE ROWS NOT LOGGED"); // The temporary table is created in SESSION. JDBC.assertEmpty( stmt.executeQuery("select * from SESSION.cpds_temp_table")); stmt.executeUpdate("insert into SESSION.cpds_temp_table values 1"); lcOne.commit(); lcOne.close(); // Create the second logical connection and try to query the temp table. Connection lcTwo = getCachingConnection(); stmt = lcTwo.createStatement(); try { stmt.executeQuery("select * from SESSION.cpds_temp_table"); fail("Temporary table still existing in new logical connection."); } catch (SQLException sqle) { // Expect syntax error. assertSQLState("42X05", sqle); } lcTwo.rollback(); lcTwo.close(); } /** * Tests if the holdability settings is taking effect, and also that the * result set is closed when the connection is closed. * * @param holdability result set holdability as specified by * {@link java.sql.ResultSet} * @throws SQLException if something goes wrong... */ private void doTestResultSetCloseForHoldability(int holdability) throws SQLException { getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement( "select * from stmtpooltest order by val", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, holdability); ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); assertEquals(1, rs.getInt(1)); commit(); if (holdability == ResultSet.HOLD_CURSORS_OVER_COMMIT) { assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); rollback(); } getConnection().close(); try { rs.next(); fail("Should have thrown exception"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. */ public void testGetStatementCallable() throws SQLException { doTestGetStatement(prepareCall("values 7653")); } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. */ public void testGetStatementPrepared() throws SQLException { doTestGetStatement(prepareStatement("values 7652")); } /** * Make sure {@link ResultSet#getStatement} returns the same object as the * one that created the result set. * * @param ps prepared or callable statement to test with * @throws SQLException if something goes wrong... */ private void doTestGetStatement(PreparedStatement ps) throws SQLException { ResultSet psRs = ps.executeQuery(); assertSame(ps, psRs.getStatement()); psRs.close(); // Try another way. ps.execute(); psRs = ps.getResultSet(); assertSame(ps, psRs.getStatement()); assertFalse(ps.getMoreResults()); assertNull(ps.getResultSet()); // This one should fail. try { psRs = ps.executeQuery("values 99"); fail("executeQuery(String) should be disallowed"); } catch (SQLException sqle) { assertSQLState("XJ016", sqle); } } /** * Checks if closing the logical connection closes the logical statement. * * @throws SQLException if something goes wrong... */ public void resTestLogicalConnectionCloseInvalidatesLogicalStatement() throws SQLException { Connection con = getConnection(); PreparedStatement ps = con.prepareStatement("select * from stmtpooltest"); // Don't execute the statement. assertNotNull(ps.getMetaData()); con.close(); try { ps.getMetaData(); fail("Logical statement should be closed and throw exception"); } catch (SQLException sqle) { assertSQLState("XJ012", sqle); } con = getConnection(); ps = con.prepareStatement("select * from stmtpooltest order by val"); // Execute the statement this time. ResultSet rs = ps.executeQuery(); assertTrue(rs.next()); assertNotNull(ps.getMetaData()); rs.close(); con.close(); try { ps.getMetaData(); fail("Logical statement should be closed and throw exception"); } catch (SQLException sqle) { assertSQLState("XJ012", sqle); } } /** * Tests that nothing is committed on the connection when autocommit is * disabled. * * @throws SQLException if something goes wrong... */ public void resTestNoCommitOnReuse() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs =stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); rollback(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertEmpty(rs); } /** * Tests that autocommit is working. * * @throws SQLException if something goes wrong... */ public void resTestCommitOnReuse() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(true); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs =stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); rollback(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); // Clean up assertEquals(2, stmt.executeUpdate("delete from stmtpooldata")); } /** * Tests that nothing is committed on the connection when autocommit is * disabled. * * @throws SQLException if something goes wrong... */ public void resTestNoDataCommittedOnInvalidTransactionState() throws SQLException { // Make sure the table is empty. cleanTableExceptedToBeEmpty(); // Start test final String sql = "insert into stmtpooldata (val) values ?"; getConnection().setAutoCommit(false); PreparedStatement ps = prepareStatement(sql); ps.setInt(1, 68); assertEquals(1, ps.executeUpdate()); ps.close(); ps = prepareStatement(sql); ps.setInt(1, 77); assertEquals(1, ps.executeUpdate()); Statement stmt = createStatement(); ResultSet rs = stmt.executeQuery( "select val from stmtpooldata order by val"); JDBC.assertFullResultSet(rs, new String[][] {{"68"},{"77"}}); try { getConnection().close(); // We should not get here, but let's see what has happened. // Possible symptoms: // - lock timeout: connection resources has not been freed. // - no rows: rollback was issued. // - two rows: commit was issued. stmt = createStatement(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); int rows = 0; while (rs.next()) { rows++; } fail("Connection should not be allowed to close. Rows in table: " + rows); } catch (SQLException sqle) { assertSQLState("25001", sqle); rollback(); } stmt = createStatement(); rs = stmt.executeQuery("select val from stmtpooldata order by val"); JDBC.assertEmpty(rs); } /** * Deletes row from a test table that is expected to be empty. * * @throws SQLException if a database operation fails */ private void cleanTableExceptedToBeEmpty() throws SQLException { Statement stmt = createStatement(); ResultSet rs = stmt.executeQuery("select * from stmtpooldata"); int rowCount = 0; while (rs.next()) { rowCount++; } rs.close(); // Delete rows if any, and print a warning if verbosity is on. if (rowCount > 0) { println("Expected empty table, got " + rowCount + " rows."); assertEquals(rowCount, stmt.executeUpdate("delete from stmtpooldata")); } } public static Test suite() { BaseTestSuite suite = new BaseTestSuite("StatementPoolingTest suite"); BaseTestSuite baseSuite = new BaseTestSuite(StatementPoolingTest.class); // Statement pooling is not yet enabled for XA. //suite.addTest(TestConfiguration.connectionXADecorator(baseSuite)); suite.addTest(TestConfiguration.connectionCPDecorator(baseSuite)); // Add tests that require data from the database. BaseTestSuite reqDataSuite = new BaseTestSuite("Requires data suite"); reqDataSuite.addTest(new StatementPoolingTest( "resTestCloseDoesNotAffectOtherStatement")); reqDataSuite.addTest(new StatementPoolingTest( "resTestLogicalConnectionCloseInvalidatesLogicalStatement")); reqDataSuite.addTest(new StatementPoolingTest( "resTestHoldCursorsOverCommit")); reqDataSuite.addTest(new StatementPoolingTest( "resTestCloseCursorsAtCommit")); reqDataSuite.addTest(new StatementPoolingTest( "resTestNoCommitOnReuse")); reqDataSuite.addTest(new StatementPoolingTest( "resTestCommitOnReuse")); reqDataSuite.addTest(new StatementPoolingTest( "resTestNoDataCommittedOnInvalidTransactionState")); suite.addTest(TestConfiguration.connectionCPDecorator( new BaseJDBCTestSetup(reqDataSuite) { public void setUp() throws Exception { // Generate some data we can use in the tests. Statement stmt = getConnection().createStatement(); try { stmt.executeUpdate("drop table stmtpooltest"); } catch (SQLException sqle) { assertSQLState("42Y55", sqle); } stmt.executeUpdate("create table stmtpooltest (" + "id int generated always as identity," + "val int)"); PreparedStatement ps = getConnection().prepareStatement( "insert into stmtpooltest values (DEFAULT, ?)"); // Insert data with val in range [1,7]. for (int val=1; val <= 7; val++) { ps.setInt(1, val); ps.addBatch(); } ps.executeBatch(); try { stmt.executeUpdate("drop table stmtpooldata"); } catch (SQLException sqle) { assertSQLState("42Y55", sqle); } stmt.executeUpdate("create table stmtpooldata (" + "id int generated always as identity," + "val int)"); // Leave this table empty. } })); return TestConfiguration.clientServerDecorator(suite); } /** * A simple factory for obtaining logical connections from a pooled * connection created from a data source configured with statement caching. * <p> * For now we only support holding one pooled connection open, but the * factory can easily be extended to hold several pooled (physical) * connection open if a test requires it. */ //@NotThreadSafe private static class LogicalPooledConnectionFactory { private int curCacheSize; private PooledConnection pooledConnection; public Connection getConnection(int cacheSize) throws SQLException { if (pooledConnection == null || curCacheSize != cacheSize) { close(); ConnectionPoolDataSource cpDs = J2EEDataSource.getConnectionPoolDataSource(); J2EEDataSource.setBeanProperty( cpDs, "maxStatements", cacheSize); J2EEDataSource.setBeanProperty( cpDs, "createDatabase", "create"); pooledConnection = cpDs.getPooledConnection(); curCacheSize = cacheSize; } return pooledConnection.getConnection(); } public void close() throws SQLException { if (pooledConnection != null) { try { pooledConnection.close(); } finally { pooledConnection = null; curCacheSize = -1; } } } } }
google/guava
37,053
guava-testlib/test/com/google/common/testing/ClassSanityTesterTest.java
/* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.testing; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertThrows; import com.google.common.base.Functions; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.testing.ClassSanityTester.FactoryMethodReturnsNullException; import com.google.common.testing.ClassSanityTester.ParameterHasNoDistinctValueException; import com.google.common.testing.ClassSanityTester.ParameterNotInstantiableException; import com.google.common.testing.NullPointerTester.Visibility; import com.google.errorprone.annotations.Keep; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.util.AbstractList; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import junit.framework.TestCase; import org.jspecify.annotations.NullUnmarked; import org.jspecify.annotations.Nullable; /** * Unit tests for {@link ClassSanityTester}. * * @author Ben Yu */ @NullUnmarked public class ClassSanityTesterTest extends TestCase { private final ClassSanityTester tester = new ClassSanityTester(); public void testEqualsOnReturnValues_good() throws Exception { tester.forAllPublicStaticMethods(GoodEqualsFactory.class).testEquals(); } public static class GoodEqualsFactory { public static Object good( String a, int b, // oneConstantOnly doesn't matter since it's not nullable and can be only 1 value. @SuppressWarnings("unused") OneConstantEnum oneConstantOnly, // noConstant doesn't matter since it can only be null @SuppressWarnings("unused") @Nullable NoConstantEnum noConstant) { return new GoodEquals(a, b); } // instance method ignored public Object badIgnored() { return new BadEquals(); } // primitive ignored public int returnsInt() { throw new UnsupportedOperationException(); } // void ignored public void voidMethod() { throw new UnsupportedOperationException(); } // non-public method ignored static Object badButNotPublic() { return new BadEquals(); } } public void testForAllPublicStaticMethods_noPublicStaticMethods() throws Exception { try { tester.forAllPublicStaticMethods(NoPublicStaticMethods.class).testEquals(); } catch (AssertionError expected) { assertThat(expected) .hasMessageThat() .isEqualTo( "No public static methods that return java.lang.Object or subtype are found in " + NoPublicStaticMethods.class + "."); return; } fail(); } public void testEqualsOnReturnValues_bad() throws Exception { try { tester.forAllPublicStaticMethods(BadEqualsFactory.class).testEquals(); } catch (AssertionError expected) { return; } fail(); } private static class BadEqualsFactory { /** oneConstantOnly matters now since it can be either null or the constant. */ @Keep public static Object bad(String a, int b, @Nullable OneConstantEnum oneConstantOnly) { return new GoodEquals(a, b); } } public void testNullsOnReturnValues_good() throws Exception { tester.forAllPublicStaticMethods(GoodNullsFactory.class).testNulls(); } private static class GoodNullsFactory { @Keep public static Object good(String s) { return new GoodNulls(s); } } public void testNullsOnReturnValues_bad() throws Exception { try { tester.forAllPublicStaticMethods(BadNullsFactory.class).thatReturn(Object.class).testNulls(); } catch (AssertionError expected) { return; } fail(); } public void testNullsOnReturnValues_returnTypeFiltered() throws Exception { try { tester .forAllPublicStaticMethods(BadNullsFactory.class) .thatReturn(Iterable.class) .testNulls(); } catch (AssertionError expected) { assertThat(expected) .hasMessageThat() .isEqualTo( "No public static methods that return java.lang.Iterable or subtype are found in " + BadNullsFactory.class + "."); return; } fail(); } public static final class BadNullsFactory { public static Object bad(@SuppressWarnings("unused") String a) { return new BadNulls(); } private BadNullsFactory() {} } @AndroidIncompatible // TODO(cpovirk): ClassNotFoundException... ClassSanityTesterTest$AnInterface public void testSerializableOnReturnValues_good() throws Exception { tester.forAllPublicStaticMethods(GoodSerializableFactory.class).testSerializable(); } public static final class GoodSerializableFactory { public static Object good(Runnable r) { return r; } public static Object good(AnInterface i) { return i; } private GoodSerializableFactory() {} } public void testSerializableOnReturnValues_bad() throws Exception { try { tester.forAllPublicStaticMethods(BadSerializableFactory.class).testSerializable(); } catch (AssertionError expected) { return; } fail(); } public static final class BadSerializableFactory { public static Object bad() { return new Serializable() { @Keep private final Object notSerializable = new Object(); }; } private BadSerializableFactory() {} } public void testEqualsAndSerializableOnReturnValues_equalsIsGoodButNotSerializable() throws Exception { try { tester.forAllPublicStaticMethods(GoodEqualsFactory.class).testEqualsAndSerializable(); } catch (AssertionError expected) { return; } fail("should have failed"); } public void testEqualsAndSerializableOnReturnValues_serializableButNotEquals() throws Exception { try { tester.forAllPublicStaticMethods(GoodSerializableFactory.class).testEqualsAndSerializable(); } catch (AssertionError expected) { return; } fail("should have failed"); } @AndroidIncompatible // TODO(cpovirk): ClassNotFoundException... ClassSanityTesterTest$AnInterface public void testEqualsAndSerializableOnReturnValues_good() throws Exception { tester .forAllPublicStaticMethods(GoodEqualsAndSerializableFactory.class) .testEqualsAndSerializable(); } public static final class GoodEqualsAndSerializableFactory { public static Object good(AnInterface s) { return Functions.constant(s); } private GoodEqualsAndSerializableFactory() {} } public void testEqualsForReturnValues_factoryReturnsNullButNotAnnotated() throws Exception { try { tester.forAllPublicStaticMethods(FactoryThatReturnsNullButNotAnnotated.class).testEquals(); } catch (AssertionError expected) { return; } fail(); } public void testNullsForReturnValues_factoryReturnsNullButNotAnnotated() throws Exception { try { tester.forAllPublicStaticMethods(FactoryThatReturnsNullButNotAnnotated.class).testNulls(); } catch (AssertionError expected) { return; } fail(); } public void testSerializableForReturnValues_factoryReturnsNullButNotAnnotated() throws Exception { try { tester .forAllPublicStaticMethods(FactoryThatReturnsNullButNotAnnotated.class) .testSerializable(); } catch (AssertionError expected) { return; } fail(); } public void testEqualsAndSerializableForReturnValues_factoryReturnsNullButNotAnnotated() throws Exception { try { tester .forAllPublicStaticMethods(FactoryThatReturnsNullButNotAnnotated.class) .testEqualsAndSerializable(); } catch (AssertionError expected) { return; } fail(); } public static final class FactoryThatReturnsNullButNotAnnotated { public static Object bad() { return null; } private FactoryThatReturnsNullButNotAnnotated() {} } public void testEqualsForReturnValues_factoryReturnsNullAndAnnotated() throws Exception { tester.forAllPublicStaticMethods(FactoryThatReturnsNullAndAnnotated.class).testEquals(); } public void testNullsForReturnValues_factoryReturnsNullAndAnnotated() throws Exception { tester.forAllPublicStaticMethods(FactoryThatReturnsNullAndAnnotated.class).testNulls(); } public void testSerializableForReturnValues_factoryReturnsNullAndAnnotated() throws Exception { tester.forAllPublicStaticMethods(FactoryThatReturnsNullAndAnnotated.class).testSerializable(); } public void testEqualsAndSerializableForReturnValues_factoryReturnsNullAndAnnotated() throws Exception { tester .forAllPublicStaticMethods(FactoryThatReturnsNullAndAnnotated.class) .testEqualsAndSerializable(); } public static final class FactoryThatReturnsNullAndAnnotated { public static @Nullable Object bad() { return null; } private FactoryThatReturnsNullAndAnnotated() {} } public void testGoodEquals() throws Exception { tester.testEquals(GoodEquals.class); } public void testEquals_interface() { tester.testEquals(AnInterface.class); } public void testEquals_abstractClass() { tester.testEquals(AnAbstractClass.class); } public void testEquals_enum() { tester.testEquals(OneConstantEnum.class); } public void testBadEquals() throws Exception { try { tester.testEquals(BadEquals.class); } catch (AssertionError expected) { assertThat(expected).hasMessageThat().contains("create(null)"); return; } fail("should have failed"); } public void testBadEquals_withParameterizedType() throws Exception { try { tester.testEquals(BadEqualsWithParameterizedType.class); } catch (AssertionError expected) { assertThat(expected).hasMessageThat().contains("create([[1]])"); return; } fail("should have failed"); } public void testBadEquals_withSingleParameterValue() throws Exception { assertThrows( ParameterHasNoDistinctValueException.class, () -> tester.doTestEquals(ConstructorParameterWithOptionalNotInstantiable.class)); } public void testGoodReferentialEqualityComparison() throws Exception { tester.testEquals(UsesEnum.class); tester.testEquals(UsesReferentialEquality.class); tester.testEquals(SameListInstance.class); } public void testStreamParameterSkippedForNullTesting() throws Exception { tester.testNulls(WithStreamParameter.class); } @AndroidIncompatible // problem with equality of Type objects? public void testEqualsUsingReferentialEquality() throws Exception { assertBadUseOfReferentialEquality(SameIntegerInstance.class); assertBadUseOfReferentialEquality(SameLongInstance.class); assertBadUseOfReferentialEquality(SameFloatInstance.class); assertBadUseOfReferentialEquality(SameDoubleInstance.class); assertBadUseOfReferentialEquality(SameShortInstance.class); assertBadUseOfReferentialEquality(SameByteInstance.class); assertBadUseOfReferentialEquality(SameCharacterInstance.class); assertBadUseOfReferentialEquality(SameBooleanInstance.class); assertBadUseOfReferentialEquality(SameObjectInstance.class); assertBadUseOfReferentialEquality(SameStringInstance.class); assertBadUseOfReferentialEquality(SameInterfaceInstance.class); } private void assertBadUseOfReferentialEquality(Class<?> cls) throws Exception { try { tester.testEquals(cls); } catch (AssertionError expected) { assertThat(expected).hasMessageThat().contains(cls.getSimpleName() + "("); return; } fail("should have failed for " + cls); } public void testParameterNotInstantiableForEqualsTest() throws Exception { assertThrows( ParameterNotInstantiableException.class, () -> tester.doTestEquals(ConstructorParameterNotInstantiable.class)); } public void testNoDistinctValueForEqualsTest() throws Exception { assertThrows( ParameterHasNoDistinctValueException.class, () -> tester.doTestEquals(ConstructorParameterSingleValue.class)); } public void testConstructorThrowsForEqualsTest() throws Exception { assertThrows( InvocationTargetException.class, () -> tester.doTestEquals(ConstructorThrows.class)); } public void testFactoryMethodReturnsNullForEqualsTest() throws Exception { assertThrows( FactoryMethodReturnsNullException.class, () -> tester.doTestEquals(FactoryMethodReturnsNullAndAnnotated.class)); } public void testFactoryMethodReturnsNullButNotAnnotatedInEqualsTest() throws Exception { try { tester.testEquals(FactoryMethodReturnsNullButNotAnnotated.class); } catch (AssertionError expected) { return; } fail("should have failed"); } public void testNoEqualsChecksOnEnum() throws Exception { tester.testEquals(OneConstantEnum.class); tester.testEquals(NoConstantEnum.class); tester.testEquals(TimeUnit.class); } public void testNoEqualsChecksOnInterface() throws Exception { tester.testEquals(Runnable.class); } public void testNoEqualsChecksOnAnnotation() throws Exception { tester.testEquals(MyAnnotation.class); } public void testGoodNulls() throws Exception { tester.testNulls(GoodNulls.class); } public void testNoNullCheckNeededDespiteNotInstantiable() throws Exception { tester.doTestNulls(NoNullCheckNeededDespiteNotInstantiable.class, Visibility.PACKAGE); } public void testNulls_interface() { tester.testNulls(AnInterface.class); } public void testNulls_abstractClass() { tester.testNulls(AnAbstractClass.class); } public void testNulls_enum() throws Exception { tester.testNulls(OneConstantEnum.class); tester.testNulls(NoConstantEnum.class); tester.testNulls(TimeUnit.class); } public void testNulls_parameterOptionalNotInstantiable() throws Exception { tester.testNulls(ConstructorParameterWithOptionalNotInstantiable.class); } public void testEnumFailsToCheckNull() throws Exception { try { tester.testNulls(EnumFailsToCheckNull.class); } catch (AssertionError expected) { return; } fail("should have failed"); } public void testNoNullChecksOnInterface() throws Exception { tester.testNulls(Runnable.class); } public void testNoNullChecksOnAnnotation() throws Exception { tester.testNulls(MyAnnotation.class); } public void testBadNulls() throws Exception { try { tester.testNulls(BadNulls.class); } catch (AssertionError expected) { return; } fail("should have failed"); } public void testInstantiate_factoryMethodReturnsNullButNotAnnotated() throws Exception { try { FactoryMethodReturnsNullButNotAnnotated unused = tester.instantiate(FactoryMethodReturnsNullButNotAnnotated.class); } catch (AssertionError expected) { assertThat(expected).hasMessageThat().contains("@Nullable"); return; } fail("should have failed"); } public void testInstantiate_factoryMethodReturnsNullAndAnnotated() throws Exception { assertThrows( FactoryMethodReturnsNullException.class, () -> tester.instantiate(FactoryMethodReturnsNullAndAnnotated.class)); } public void testInstantiate_factoryMethodAcceptsNull() throws Exception { assertNull(tester.instantiate(FactoryMethodAcceptsNull.class).name); } public void testInstantiate_factoryMethodDoesNotAcceptNull() throws Exception { assertNotNull(tester.instantiate(FactoryMethodDoesNotAcceptNull.class).name); } public void testInstantiate_constructorAcceptsNull() throws Exception { assertNull(tester.instantiate(ConstructorAcceptsNull.class).name); } public void testInstantiate_constructorDoesNotAcceptNull() throws Exception { assertNotNull(tester.instantiate(ConstructorDoesNotAcceptNull.class).name); } public void testInstantiate_notInstantiable() throws Exception { assertNull(tester.instantiate(NotInstantiable.class)); } public void testInstantiate_noConstantEnum() throws Exception { assertNull(tester.instantiate(NoConstantEnum.class)); } public void testInstantiate_oneConstantEnum() throws Exception { assertEquals(OneConstantEnum.A, tester.instantiate(OneConstantEnum.class)); } public void testInstantiate_interface() throws Exception { assertNull(tester.instantiate(Runnable.class)); } public void testInstantiate_abstractClass() throws Exception { assertNull(tester.instantiate(AbstractList.class)); } public void testInstantiate_annotation() throws Exception { assertNull(tester.instantiate(MyAnnotation.class)); } public void testInstantiate_setDefault() throws Exception { NotInstantiable x = new NotInstantiable(); tester.setDefault(NotInstantiable.class, x); assertNotNull(tester.instantiate(ConstructorParameterNotInstantiable.class)); } public void testSetDistinctValues_equalInstances() { assertThrows( IllegalArgumentException.class, () -> tester.setDistinctValues(String.class, "", "")); } public void testInstantiate_setDistinctValues() throws Exception { NotInstantiable x = new NotInstantiable(); NotInstantiable y = new NotInstantiable(); tester.setDistinctValues(NotInstantiable.class, x, y); assertNotNull(tester.instantiate(ConstructorParameterNotInstantiable.class)); tester.testEquals(ConstructorParameterMapOfNotInstantiable.class); } public void testInstantiate_constructorThrows() throws Exception { assertThrows( InvocationTargetException.class, () -> tester.instantiate(ConstructorThrows.class)); } public void testInstantiate_factoryMethodThrows() throws Exception { assertThrows( InvocationTargetException.class, () -> tester.instantiate(FactoryMethodThrows.class)); } public void testInstantiate_constructorParameterNotInstantiable() throws Exception { assertThrows( ParameterNotInstantiableException.class, () -> tester.instantiate(ConstructorParameterNotInstantiable.class)); } public void testInstantiate_factoryMethodParameterNotInstantiable() throws Exception { assertThrows( ParameterNotInstantiableException.class, () -> tester.instantiate(FactoryMethodParameterNotInstantiable.class)); } public void testInstantiate_instantiableFactoryMethodChosen() throws Exception { assertEquals("good", tester.instantiate(InstantiableFactoryMethodChosen.class).name); } @AndroidIncompatible // TODO(cpovirk): ClassNotFoundException... ClassSanityTesterTest$AnInterface public void testInterfaceProxySerializable() throws Exception { SerializableTester.reserializeAndAssert(tester.instantiate(HasAnInterface.class)); } public void testReturnValuesFromAnotherPackageIgnoredForNullTests() throws Exception { new ClassSanityTester().forAllPublicStaticMethods(JdkObjectFactory.class).testNulls(); } /** String doesn't check nulls as we expect. But the framework should ignore. */ private static class JdkObjectFactory { @Keep public static Object create() { return new ArrayList<>(); } } static class HasAnInterface implements Serializable { private final AnInterface i; public HasAnInterface(AnInterface i) { this.i = i; } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof HasAnInterface) { HasAnInterface that = (HasAnInterface) obj; return i.equals(that.i); } else { return false; } } @Override public int hashCode() { return i.hashCode(); } } static class InstantiableFactoryMethodChosen { final String name; private InstantiableFactoryMethodChosen(String name) { this.name = name; } public InstantiableFactoryMethodChosen(NotInstantiable x) { checkNotNull(x); this.name = "x1"; } public static InstantiableFactoryMethodChosen create(NotInstantiable x) { return new InstantiableFactoryMethodChosen(x); } public static InstantiableFactoryMethodChosen create(String s) { checkNotNull(s); return new InstantiableFactoryMethodChosen("good"); } } public void testInstantiate_instantiableConstructorChosen() throws Exception { assertEquals("good", tester.instantiate(InstantiableConstructorChosen.class).name); } public void testEquals_setOfNonInstantiable() throws Exception { assertThrows( ParameterNotInstantiableException.class, () -> new ClassSanityTester().doTestEquals(SetWrapper.class)); } private abstract static class Wrapper { private final Object wrapped; Wrapper(Object wrapped) { this.wrapped = checkNotNull(wrapped); } @Override public boolean equals(@Nullable Object obj) { // In general getClass().isInstance() is bad for equals. // But here we fully control the subclasses to ensure symmetry. if (getClass().isInstance(obj)) { Wrapper that = (Wrapper) obj; return wrapped.equals(that.wrapped); } return false; } @Override public int hashCode() { return wrapped.hashCode(); } @Override public String toString() { return wrapped.toString(); } } private static class SetWrapper extends Wrapper { @Keep public SetWrapper(Set<NotInstantiable> wrapped) { super(wrapped); } } static class InstantiableConstructorChosen { final String name; public InstantiableConstructorChosen(String name) { checkNotNull(name); this.name = "good"; } public InstantiableConstructorChosen(NotInstantiable x) { checkNotNull(x); this.name = "x1"; } public static InstantiableFactoryMethodChosen create(NotInstantiable x) { return new InstantiableFactoryMethodChosen(x); } } static class GoodEquals { private final String a; private final int b; private GoodEquals(String a, int b) { this.a = checkNotNull(a); this.b = b; } // ignored by testEquals() GoodEquals(@SuppressWarnings("unused") NotInstantiable x) { this.a = "x"; this.b = -1; } // will keep trying public GoodEquals(@SuppressWarnings("unused") NotInstantiable x, int b) { this.a = "x"; this.b = b; } // keep trying @Keep static GoodEquals create(int a, int b) { throw new RuntimeException(); } // Good! static GoodEquals create(String a, int b) { return new GoodEquals(a, b); } // keep trying @Keep public static @Nullable GoodEquals createMayReturnNull(int a, int b) { return null; } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof GoodEquals) { GoodEquals that = (GoodEquals) obj; return a.equals(that.a) && b == that.b; } else { return false; } } @Override public int hashCode() { return 0; } } static class BadEquals { public BadEquals() {} // ignored by testEquals() since it has less parameters. public static BadEquals create(@SuppressWarnings("unused") @Nullable String s) { return new BadEquals(); } @Override public boolean equals(@Nullable Object obj) { return obj instanceof BadEquals; } @Override public int hashCode() { return 0; } } static class SameIntegerInstance { private final Integer i; public SameIntegerInstance(Integer i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameIntegerInstance) { SameIntegerInstance that = (SameIntegerInstance) obj; return i == that.i; } return false; } } static class SameLongInstance { private final Long i; public SameLongInstance(Long i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameLongInstance) { SameLongInstance that = (SameLongInstance) obj; return i == that.i; } return false; } } static class SameFloatInstance { private final Float i; public SameFloatInstance(Float i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameFloatInstance) { SameFloatInstance that = (SameFloatInstance) obj; return i == that.i; } return false; } } static class SameDoubleInstance { private final Double i; public SameDoubleInstance(Double i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameDoubleInstance) { SameDoubleInstance that = (SameDoubleInstance) obj; return i == that.i; } return false; } } static class SameShortInstance { private final Short i; public SameShortInstance(Short i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameShortInstance) { SameShortInstance that = (SameShortInstance) obj; return i == that.i; } return false; } } static class SameByteInstance { private final Byte i; public SameByteInstance(Byte i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings({"BoxedPrimitiveEquality", "NumericEquality"}) public boolean equals(@Nullable Object obj) { if (obj instanceof SameByteInstance) { SameByteInstance that = (SameByteInstance) obj; return i == that.i; } return false; } } static class SameCharacterInstance { private final Character i; public SameCharacterInstance(Character i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings("BoxedPrimitiveEquality") public boolean equals(@Nullable Object obj) { if (obj instanceof SameCharacterInstance) { SameCharacterInstance that = (SameCharacterInstance) obj; return i == that.i; } return false; } } static class SameBooleanInstance { private final Boolean i; public SameBooleanInstance(Boolean i) { this.i = checkNotNull(i); } @Override public int hashCode() { return i.hashCode(); } @Override @SuppressWarnings("BoxedPrimitiveEquality") public boolean equals(@Nullable Object obj) { if (obj instanceof SameBooleanInstance) { SameBooleanInstance that = (SameBooleanInstance) obj; return i == that.i; } return false; } } static class SameStringInstance { private final String s; public SameStringInstance(String s) { this.s = checkNotNull(s); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof SameStringInstance) { SameStringInstance that = (SameStringInstance) obj; return s == that.s; } return false; } } static class SameObjectInstance { private final Object s; public SameObjectInstance(Object s) { this.s = checkNotNull(s); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof SameObjectInstance) { SameObjectInstance that = (SameObjectInstance) obj; return s == that.s; } return false; } } static class SameInterfaceInstance { private final Runnable s; public SameInterfaceInstance(Runnable s) { this.s = checkNotNull(s); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof SameInterfaceInstance) { SameInterfaceInstance that = (SameInterfaceInstance) obj; return s == that.s; } return false; } } static class SameListInstance { private final List<?> s; public SameListInstance(List<?> s) { this.s = checkNotNull(s); } @Override public int hashCode() { return System.identityHashCode(s); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof SameListInstance) { SameListInstance that = (SameListInstance) obj; return s == that.s; } return false; } } static class WithStreamParameter { private final List<?> list; // This should be ignored. public WithStreamParameter(Stream<?> s, String str) { this.list = s.collect(Collectors.toList()); checkNotNull(str); } } static class UsesReferentialEquality { private final ReferentialEquality s; public UsesReferentialEquality(ReferentialEquality s) { this.s = checkNotNull(s); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof UsesReferentialEquality) { UsesReferentialEquality that = (UsesReferentialEquality) obj; return s == that.s; } return false; } } static class UsesEnum { private final TimeUnit s; public UsesEnum(TimeUnit s) { this.s = checkNotNull(s); } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof UsesEnum) { UsesEnum that = (UsesEnum) obj; return s == that.s; } return false; } } public static class ReferentialEquality { public ReferentialEquality() {} } static class BadEqualsWithParameterizedType { // ignored by testEquals() since it has less parameters. public BadEqualsWithParameterizedType() {} public static BadEqualsWithParameterizedType create( @SuppressWarnings("unused") ImmutableList<Iterable<? extends String>> s) { return new BadEqualsWithParameterizedType(); } @Override public boolean equals(@Nullable Object obj) { return obj instanceof BadEqualsWithParameterizedType; } @Override public int hashCode() { return 0; } } static class GoodNulls { public GoodNulls(String s) { checkNotNull(s); } public void rejectNull(String s) { checkNotNull(s); } } public static class BadNulls { public void failsToRejectNull(@SuppressWarnings("unused") String s) {} } public static class NoNullCheckNeededDespiteNotInstantiable { public NoNullCheckNeededDespiteNotInstantiable(NotInstantiable x) { checkNotNull(x); } @Keep void primitiveOnly(int i) {} @Keep void nullableOnly(@Nullable String s) {} public void noParameter() {} @Keep void primitiveAndNullable(@Nullable String s, int i) {} } static class FactoryMethodReturnsNullButNotAnnotated { private FactoryMethodReturnsNullButNotAnnotated() {} static FactoryMethodReturnsNullButNotAnnotated returnsNull() { return null; } } static class FactoryMethodReturnsNullAndAnnotated { private FactoryMethodReturnsNullAndAnnotated() {} public static @Nullable FactoryMethodReturnsNullAndAnnotated returnsNull() { return null; } } static class FactoryMethodAcceptsNull { final String name; private FactoryMethodAcceptsNull(String name) { this.name = name; } static FactoryMethodAcceptsNull create(@Nullable String name) { return new FactoryMethodAcceptsNull(name); } } static class FactoryMethodDoesNotAcceptNull { final String name; private FactoryMethodDoesNotAcceptNull(String name) { this.name = checkNotNull(name); } public static FactoryMethodDoesNotAcceptNull create(String name) { return new FactoryMethodDoesNotAcceptNull(name); } } static class ConstructorAcceptsNull { final String name; public ConstructorAcceptsNull(@Nullable String name) { this.name = name; } } static class ConstructorDoesNotAcceptNull { final String name; ConstructorDoesNotAcceptNull(String name) { this.name = checkNotNull(name); } } static class ConstructorParameterNotInstantiable { public ConstructorParameterNotInstantiable(@SuppressWarnings("unused") NotInstantiable x) {} } static class ConstructorParameterMapOfNotInstantiable { private final Map<NotInstantiable, NotInstantiable> m; public ConstructorParameterMapOfNotInstantiable(Map<NotInstantiable, NotInstantiable> m) { this.m = checkNotNull(m); } @Override public boolean equals(@Nullable Object obj) { if (obj instanceof ConstructorParameterMapOfNotInstantiable) { return m.equals(((ConstructorParameterMapOfNotInstantiable) obj).m); } else { return false; } } @Override public int hashCode() { return m.hashCode(); } } // Test that we should get a distinct parameter error when doing equals test. static class ConstructorParameterWithOptionalNotInstantiable { public ConstructorParameterWithOptionalNotInstantiable(Optional<NotInstantiable> x) { checkNotNull(x); } @Override public boolean equals(@Nullable Object obj) { throw new UnsupportedOperationException(); } @Override public int hashCode() { throw new UnsupportedOperationException(); } } static class ConstructorParameterSingleValue { public ConstructorParameterSingleValue(@SuppressWarnings("unused") Singleton s) {} @Override public boolean equals(@Nullable Object obj) { return obj instanceof ConstructorParameterSingleValue; } @Override public int hashCode() { return 1; } public static class Singleton { public static final Singleton INSTANCE = new Singleton(); private Singleton() {} } } static class FactoryMethodParameterNotInstantiable { private FactoryMethodParameterNotInstantiable() {} static FactoryMethodParameterNotInstantiable create( @SuppressWarnings("unused") NotInstantiable x) { return new FactoryMethodParameterNotInstantiable(); } } static class ConstructorThrows { public ConstructorThrows() { throw new RuntimeException(); } } static class FactoryMethodThrows { private FactoryMethodThrows() {} public static FactoryMethodThrows create() { throw new RuntimeException(); } } static class NotInstantiable { private NotInstantiable() {} } private enum NoConstantEnum {} private enum OneConstantEnum { A } private enum EnumFailsToCheckNull { A; @Keep public void failToCheckNull(String s) {} } private interface AnInterface {} private abstract static class AnAbstractClass { @Keep public AnAbstractClass(String s) {} @Keep public void failsToCheckNull(String s) {} } private static class NoPublicStaticMethods { @Keep // To test non-public factory isn't used. static String notPublic() { return ""; } } @interface MyAnnotation {} }
googleapis/google-cloud-java
37,297
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/UpdateConversationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/conversational_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Request for UpdateConversation method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.UpdateConversationRequest} */ public final class UpdateConversationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) UpdateConversationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateConversationRequest.newBuilder() to construct. private UpdateConversationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateConversationRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateConversationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateConversationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateConversationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest.class, com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest.Builder.class); } private int bitField0_; public static final int CONVERSATION_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1alpha.Conversation conversation_; /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversation field is set. */ @java.lang.Override public boolean hasConversation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversation. */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.Conversation getConversation() { return conversation_ == null ? com.google.cloud.discoveryengine.v1alpha.Conversation.getDefaultInstance() : conversation_; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ConversationOrBuilder getConversationOrBuilder() { return conversation_ == null ? com.google.cloud.discoveryengine.v1alpha.Conversation.getDefaultInstance() : conversation_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getConversation()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversation()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest other = (com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) obj; if (hasConversation() != other.hasConversation()) return false; if (hasConversation()) { if (!getConversation().equals(other.getConversation())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasConversation()) { hash = (37 * hash) + CONVERSATION_FIELD_NUMBER; hash = (53 * hash) + getConversation().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateConversation method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.UpdateConversationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateConversationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateConversationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest.class, com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getConversationFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; conversation_ = null; if (conversationBuilder_ != null) { conversationBuilder_.dispose(); conversationBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateConversationRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest build() { com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest buildPartial() { com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest result = new com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.conversation_ = conversationBuilder_ == null ? conversation_ : conversationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) { return mergeFrom( (com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest other) { if (other == com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest .getDefaultInstance()) return this; if (other.hasConversation()) { mergeConversation(other.getConversation()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getConversationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1alpha.Conversation conversation_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Conversation, com.google.cloud.discoveryengine.v1alpha.Conversation.Builder, com.google.cloud.discoveryengine.v1alpha.ConversationOrBuilder> conversationBuilder_; /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversation field is set. */ public boolean hasConversation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversation. */ public com.google.cloud.discoveryengine.v1alpha.Conversation getConversation() { if (conversationBuilder_ == null) { return conversation_ == null ? com.google.cloud.discoveryengine.v1alpha.Conversation.getDefaultInstance() : conversation_; } else { return conversationBuilder_.getMessage(); } } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversation(com.google.cloud.discoveryengine.v1alpha.Conversation value) { if (conversationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } conversation_ = value; } else { conversationBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversation( com.google.cloud.discoveryengine.v1alpha.Conversation.Builder builderForValue) { if (conversationBuilder_ == null) { conversation_ = builderForValue.build(); } else { conversationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeConversation(com.google.cloud.discoveryengine.v1alpha.Conversation value) { if (conversationBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && conversation_ != null && conversation_ != com.google.cloud.discoveryengine.v1alpha.Conversation.getDefaultInstance()) { getConversationBuilder().mergeFrom(value); } else { conversation_ = value; } } else { conversationBuilder_.mergeFrom(value); } if (conversation_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearConversation() { bitField0_ = (bitField0_ & ~0x00000001); conversation_ = null; if (conversationBuilder_ != null) { conversationBuilder_.dispose(); conversationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1alpha.Conversation.Builder getConversationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getConversationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1alpha.ConversationOrBuilder getConversationOrBuilder() { if (conversationBuilder_ != null) { return conversationBuilder_.getMessageOrBuilder(); } else { return conversation_ == null ? com.google.cloud.discoveryengine.v1alpha.Conversation.getDefaultInstance() : conversation_; } } /** * * * <pre> * Required. The Conversation to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Conversation conversation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Conversation, com.google.cloud.discoveryengine.v1alpha.Conversation.Builder, com.google.cloud.discoveryengine.v1alpha.ConversationOrBuilder> getConversationFieldBuilder() { if (conversationBuilder_ == null) { conversationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Conversation, com.google.cloud.discoveryengine.v1alpha.Conversation.Builder, com.google.cloud.discoveryengine.v1alpha.ConversationOrBuilder>( getConversation(), getParentForChildren(), isClean()); conversation_ = null; } return conversationBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [Conversation][google.cloud.discoveryengine.v1alpha.Conversation] to * update. The following are NOT supported: * * * [Conversation.name][google.cloud.discoveryengine.v1alpha.Conversation.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.UpdateConversationRequest) private static final com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest(); } public static com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateConversationRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateConversationRequest>() { @java.lang.Override public UpdateConversationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateConversationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateConversationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateConversationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,243
java-securitycentermanagement/proto-google-cloud-securitycentermanagement-v1/src/main/java/com/google/cloud/securitycentermanagement/v1/ListSecurityCenterServicesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycentermanagement/v1/security_center_management.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycentermanagement.v1; /** * * * <pre> * Request message for * [SecurityCenterManagement.ListSecurityCenterServices][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityCenterServices]. * </pre> * * Protobuf type {@code google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest} */ public final class ListSecurityCenterServicesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) ListSecurityCenterServicesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListSecurityCenterServicesRequest.newBuilder() to construct. private ListSecurityCenterServicesRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSecurityCenterServicesRequest() { parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSecurityCenterServicesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto .internal_static_google_cloud_securitycentermanagement_v1_ListSecurityCenterServicesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto .internal_static_google_cloud_securitycentermanagement_v1_ListSecurityCenterServicesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest.class, com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest.Builder .class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. The maximum number of results to return in a single response. * Default is 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SHOW_ELIGIBLE_MODULES_ONLY_FIELD_NUMBER = 4; private boolean showEligibleModulesOnly_ = false; /** * * * <pre> * Flag that, when set, is used to filter the module settings that are shown. * The default setting is that all modules are shown. * </pre> * * <code>bool show_eligible_modules_only = 4;</code> * * @return The showEligibleModulesOnly. */ @java.lang.Override public boolean getShowEligibleModulesOnly() { return showEligibleModulesOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (showEligibleModulesOnly_ != false) { output.writeBool(4, showEligibleModulesOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (showEligibleModulesOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, showEligibleModulesOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest)) { return super.equals(obj); } com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest other = (com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (getShowEligibleModulesOnly() != other.getShowEligibleModulesOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + SHOW_ELIGIBLE_MODULES_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getShowEligibleModulesOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [SecurityCenterManagement.ListSecurityCenterServices][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListSecurityCenterServices]. * </pre> * * Protobuf type {@code * google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto .internal_static_google_cloud_securitycentermanagement_v1_ListSecurityCenterServicesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto .internal_static_google_cloud_securitycentermanagement_v1_ListSecurityCenterServicesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest.class, com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest.Builder .class); } // Construct using // com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; showEligibleModulesOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto .internal_static_google_cloud_securitycentermanagement_v1_ListSecurityCenterServicesRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest getDefaultInstanceForType() { return com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest build() { com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest buildPartial() { com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest result = new com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.showEligibleModulesOnly_ = showEligibleModulesOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) { return mergeFrom( (com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest other) { if (other == com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.getShowEligibleModulesOnly() != false) { setShowEligibleModulesOnly(other.getShowEligibleModulesOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { showEligibleModulesOnly_ = input.readBool(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the parent to list Security Command Center services, * in one of the following formats: * * * `organizations/{organization}/locations/{location}` * * `folders/{folder}/locations/{location}` * * `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of results to return in a single response. * Default is 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of results to return in a single response. * Default is 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of results to return in a single response. * Default is 10, minimum is 1, maximum is 1000. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A pagination token returned from a previous request. Provide this * token to retrieve the next page of results. * * When paginating, the rest of the request must match the request that * generated the page token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private boolean showEligibleModulesOnly_; /** * * * <pre> * Flag that, when set, is used to filter the module settings that are shown. * The default setting is that all modules are shown. * </pre> * * <code>bool show_eligible_modules_only = 4;</code> * * @return The showEligibleModulesOnly. */ @java.lang.Override public boolean getShowEligibleModulesOnly() { return showEligibleModulesOnly_; } /** * * * <pre> * Flag that, when set, is used to filter the module settings that are shown. * The default setting is that all modules are shown. * </pre> * * <code>bool show_eligible_modules_only = 4;</code> * * @param value The showEligibleModulesOnly to set. * @return This builder for chaining. */ public Builder setShowEligibleModulesOnly(boolean value) { showEligibleModulesOnly_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Flag that, when set, is used to filter the module settings that are shown. * The default setting is that all modules are shown. * </pre> * * <code>bool show_eligible_modules_only = 4;</code> * * @return This builder for chaining. */ public Builder clearShowEligibleModulesOnly() { bitField0_ = (bitField0_ & ~0x00000008); showEligibleModulesOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest) private static final com.google.cloud.securitycentermanagement.v1 .ListSecurityCenterServicesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest(); } public static com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSecurityCenterServicesRequest> PARSER = new com.google.protobuf.AbstractParser<ListSecurityCenterServicesRequest>() { @java.lang.Override public ListSecurityCenterServicesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSecurityCenterServicesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSecurityCenterServicesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycentermanagement.v1.ListSecurityCenterServicesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/openjpa
37,179
openjpa-persistence-jdbc/src/test/java/org/apache/openjpa/persistence/jdbc/order/TestOrderColumn.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.openjpa.persistence.jdbc.order; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.TreeMap; import jakarta.persistence.Query; import org.apache.openjpa.jdbc.conf.JDBCConfiguration; import org.apache.openjpa.jdbc.meta.ClassMapping; import org.apache.openjpa.jdbc.meta.FieldMapping; import org.apache.openjpa.jdbc.schema.Column; import org.apache.openjpa.jdbc.sql.DBDictionary; import org.apache.openjpa.persistence.OpenJPAEntityManagerFactorySPI; import org.apache.openjpa.persistence.OpenJPAEntityManagerSPI; import org.apache.openjpa.persistence.test.SingleEMFTestCase; public class TestOrderColumn extends SingleEMFTestCase { private Student[] students = new Student[12]; @Override public void setUp() { super.setUp( CLEAR_TABLES, Person.class, Player.class, BattingOrder.class, Trainer.class, Game.class, Inning.class, Course.class, Student.class, Owner.class, Bicycle.class, Car.class, Home.class, Widget.class,BiOrderMappedByEntity.class, BiOrderEntity.class ); try { createQueryData(); } catch (Exception e) { throw new RuntimeException(e); } } /* * Verifies that a collection remains contiguous and element * indexes are reordered if an element is removed for a * OneToMany relationship */ public void testOneToManyElementRemoval() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "batters", "batters_ORDER");// "batters_ORDER"); // Create some data Player[] players = new Player[10]; ArrayList<Player> playersArr = new ArrayList<>(); em.getTransaction().begin(); for (int i = 0; i < 10 ; i++) { players[i] = new Player("Player" + i, i+100); em.persist(players[i]); playersArr.add(players[i]); } em.getTransaction().commitAndResume(); // Persist the related entities BattingOrder order = new BattingOrder(); order.setBatters(playersArr); em.persist(order); em.getTransaction().commit(); em.refresh(order); em.clear(); // Verify order is correct. BattingOrder newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); for (int i = 0; i < 10 ; i++) { assertEquals(newOrder.getBatters().get(i), (players[i])); } // Remove some items em.getTransaction().begin(); newOrder.getBatters().remove(1); playersArr.remove(1); newOrder.getBatters().remove(5); playersArr.remove(5); em.getTransaction().commit(); em.clear(); // Simple assertion via find newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); assertNotNull(newOrder.getBatters()); assertEquals(playersArr.size(), newOrder.getBatters().size()); for (int i = 0; i < playersArr.size() ; i++) { assertEquals(newOrder.getBatters().get(i), (playersArr.get(i))); } // Stronger assertion via INDEX value validateIndexAndValues(em, "BattingOrder", "batters", 0, playersArr.toArray(), "id", order.id); em.close(); } /* * Verifies that a collection remains contiguous and element * indexes are reordered if an element is removed for a * OneToMany relationship */ public void testOneToManyBiDirElementRemoval() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BiOrderMappedByEntity.class, "bo2mEntities", "bo2mEntities_ORDER"); // Create some data BiOrderMappedByEntity bome = new BiOrderMappedByEntity(); bome.setId(1); List<BiOrderEntity> boea = new ArrayList<>(); for (int i = 0; i < 5; i++) { BiOrderEntity boe = new BiOrderEntity(); boe.setId(i+1); boe.setName("Entity" + i); boe.setEntity(bome); boea.add(boe); bome.addBo2mEntity(boe); } // Persist em.getTransaction().begin(); em.persist(bome); for (BiOrderEntity boe : boea) { em.persist(boe); } em.getTransaction().commit(); em.refresh(bome); em.clear(); // Verify order is correct. BiOrderMappedByEntity newBome = em.find(BiOrderMappedByEntity.class, bome.getId()); assertNotNull(newBome); for (int i = 0; i < 5 ; i++) { assertEquals(newBome.getBo2mEntities().get(i), boea.get(i)); } // Remove an item em.getTransaction().begin(); newBome.getBo2mEntities().get(2).setEntity(null); newBome.removeBo2mEntity(2); boea.remove(2); em.getTransaction().commit(); em.clear(); // Simple assertion via find newBome = em.find(BiOrderMappedByEntity.class, bome.getId()); assertNotNull(newBome); assertNotNull(newBome.getBo2mEntities()); assertEquals(boea.size(), newBome.getBo2mEntities().size()); for (int i = 0; i < boea.size() ; i++) { assertEquals(newBome.getBo2mEntities().get(i), (boea.get(i))); } // Stronger assertion via INDEX value validateIndexAndValues(em, "BiOrderMappedByEntity", "bo2mEntities", 0, boea.toArray(), "id", bome.getId()); em.close(); } /* * Verifies that a collection remains contiguous and element * indexes are reordered if an element is removed for an * ElementCollection */ public void testElementCollectionElementRemoval() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); Game game = new Game(); // Verify field name is the default via fm validateOrderColumnName(Game.class, "rainDates", "dateOrder"); // Create a list of basic types java.sql.Date dates[] = new java.sql.Date[10]; ArrayList<java.sql.Date> rainDates = new ArrayList<>(10); Calendar today = Calendar.getInstance(); for (int i = 0; i < 10; i++) { today.set(2009, 1, i+1); dates[i] = new java.sql.Date(today.getTimeInMillis()); } // Add in reverse order for (int i = 9; i >= 0; i--) { rainDates.add(dates[i]); } game.setRainDates(rainDates); em.getTransaction().begin(); em.persist(game); em.getTransaction().commit(); em.clear(); Game newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); // Verify the order for (int i = 0; i < 10; i++) { assertEquals(game.getRainDates().get(i), rainDates.get(i)); } // Remove some dates em.getTransaction().begin(); newGame.getRainDates().remove(4); rainDates.remove(4); newGame.getRainDates().remove(2); rainDates.remove(2); em.getTransaction().commit(); em.clear(); newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); assertNotNull(newGame.getRainDates()); assertEquals(8, newGame.getRainDates().size()); // Verify the order for (int i = 0; i < newGame.getRainDates().size(); i++) { assertEquals(newGame.getRainDates().get(i).toString(), rainDates.get(i).toString()); } // Stronger assertion via INDEX value validateCollIndexAndValues(em, "Game", "rainDates", 0, newGame.getRainDates().toArray(), "id", newGame.getId()); em.close(); } /* * Verifies that a collection remains contiguous and element * indexes are reordered if an element is inserted into the collection. */ public void testOneToManyElementInsert() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "batters", "batters_ORDER");// "batters_ORDER"); // Create some data Player[] players = new Player[10]; ArrayList<Player> playersArr = new ArrayList<>(); em.getTransaction().begin(); for (int i = 0; i < 10 ; i++) { players[i] = new Player("Player" + i, i+100); em.persist(players[i]); playersArr.add(players[i]); } em.getTransaction().commitAndResume(); // Persist the related entities BattingOrder order = new BattingOrder(); order.setBatters(playersArr); em.persist(order); em.getTransaction().commitAndResume(); em.refresh(order); em.getTransaction().commit(); em.clear(); // Verify order is correct. BattingOrder newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); for (int i = 0; i < 10 ; i++) { assertEquals(newOrder.getBatters().get(i), (players[i])); } Player p = new Player("PlayerNew", 150); playersArr.add(2, p); Player p2 = new Player("PlayerNew2", 151); playersArr.add(p2); // Add an item at index 2 and at the end of the list em.getTransaction().begin(); newOrder.getBatters().add(2, p); newOrder.getBatters().add(p2); em.getTransaction().commit(); em.clear(); // Simple assertion via find newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); assertNotNull(newOrder.getBatters()); assertEquals(playersArr.size(), newOrder.getBatters().size()); for (int i = 0; i < playersArr.size() ; i++) { assertEquals(newOrder.getBatters().get(i), (playersArr.get(i))); } // Stronger assertion via INDEX value validateIndexAndValues(em, "BattingOrder", "batters", 0, playersArr.toArray(), "id", order.id); em.close(); } /* * Verifies that a collection remains contiguous and element * indexes are reordered if an element is inserted into an * ElementCollection */ public void testElementCollectionElementInsert() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); Game game = new Game(); // Verify field name is the default via fm validateOrderColumnName(Game.class, "rainDates", "dateOrder"); // Create a list of basic types java.sql.Date dates[] = new java.sql.Date[10]; ArrayList<java.sql.Date> rainDates = new ArrayList<>(10); Calendar today = Calendar.getInstance(); for (int i = 0; i < 10; i++) { today.set(2009, 1, i+1); dates[i] = new java.sql.Date(today.getTimeInMillis()); } // Add in reverse order for (int i = 9; i >= 0; i--) { rainDates.add(dates[i]); } game.setRainDates(rainDates); em.getTransaction().begin(); em.persist(game); em.getTransaction().commit(); em.clear(); Game newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); // Verify the order for (int i = 0; i < 10; i++) { assertEquals(game.getRainDates().get(i), rainDates.get(i)); } // Add some dates today.set(2009, 1, 15); rainDates.add(1, new java.sql.Date(today.getTimeInMillis())); today.set(2009, 1, 20); rainDates.add(6, new java.sql.Date(today.getTimeInMillis())); em.getTransaction().begin(); game.getRainDates().add(1, rainDates.get(1)); game.getRainDates().add(6, rainDates.get(6)); em.getTransaction().commit(); em.clear(); newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); assertNotNull(game.getRainDates()); assertEquals(12, game.getRainDates().size()); // Verify the order for (int i = 0; i < game.getRainDates().size(); i++) { assertEquals(game.getRainDates().get(i), rainDates.get(i)); } em.close(); } /* * Validates use of OrderColumn with OneToMany using the default * order column name */ public void testOneToManyDefault() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "batters", "batters_ORDER");// "batters_ORDER"); // Create some data Player[] players = new Player[10]; em.getTransaction().begin(); for (int i = 0; i < 10 ; i++) { players[i] = new Player("Player" + i, i); em.persist(players[i]); } em.getTransaction().commitAndResume(); // Add it to the persistent list in reverse order ArrayList<Player> playersArr = new ArrayList<>(); for (int i = 0; i < 10 ; i++) { playersArr.add(players[9 - i]); } // Persist the related entities BattingOrder order = new BattingOrder(); order.setBatters(playersArr); em.persist(order); em.getTransaction().commit(); em.refresh(order); em.clear(); // Verify order is correct. BattingOrder newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); for (int i = 0; i < 10 ; i++) { assertEquals(newOrder.getBatters().get(i), (players[9 - i])); } // Add another entity and check order Player newPlayer = new Player("New Player", 99); em.getTransaction().begin(); newOrder.getBatters().add(9, newPlayer); em.getTransaction().commit(); em.clear(); newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); for (int i = 0; i <= 10 ; i++) { if (i < 9) assertEquals(newOrder.getBatters().get(i), (players[9 - i])); else if (i == 9) assertEquals(newOrder.getBatters().get(i), newPlayer); else if (i == 10) assertEquals(newOrder.getBatters().get(i), players[0]); } em.close(); } /* * Validates use of OrderColumn with OneToMany using a specified * order column name */ public void testOneToManyNamed() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "pinch_hitters", "pinch_order"); // Create some data Player[] players = new Player[4]; em.getTransaction().begin(); for (int i = 0; i < 4 ; i++) { players[i] = new Player("PinchHitter" + i, i); em.persist(players[i]); } em.getTransaction().commitAndResume(); // Add it to the persistent list in reverse order ArrayList<Player> pinchArr = new ArrayList<>(); for (int i = 0; i < players.length ; i++) { pinchArr.add(players[players.length - 1 - i]); } // Persist the related entities BattingOrder order = new BattingOrder(); order.setPinchHitters(pinchArr); em.persist(order); em.getTransaction().commit(); em.clear(); // Verify order is correct. BattingOrder newOrder = em.find(BattingOrder.class, order.id); assertNotNull(newOrder); for (int i = 0; i < players.length ; i++) { assertEquals(newOrder.getPinchHitters().get(i), (players[players.length - 1 - i])); } em.close(); } /* * Validates use of OrderColumn with ManyToMany relationship */ public void testManyToMany() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(Trainer.class, "playersTrained", "trainingOrder"); // Create some data Player[] players = new Player[25]; em.getTransaction().begin(); for (int i = 0; i < players.length ; i++) { players[i] = new Player("TrainedPlayer" + i, i); em.persist(players[i]); } em.getTransaction().commitAndResume(); // Create M2M, add players in reverse insert order to // validate column order Trainer[] trainers = new Trainer[5]; for (int i = 0; i < trainers.length; i++) { trainers[i] = new Trainer("Trainer" + i); ArrayList<Player> trained = new ArrayList<>(); for (int j = ((i * 5) + 4); j >= (i * 5); j--) { trained.add(players[j]); if (players[j].getTrainers() == null) players[j].setTrainers(new ArrayList<>()); players[j].getTrainers().add(trainers[i]); } trainers[i].setPlayersTrained(trained); em.persist(trainers[i]); } em.getTransaction().commit(); em.clear(); // Verify order is correct. for (int i = 0; i < trainers.length; i++) { Trainer trainer = em.find(Trainer.class, trainers[i].getId()); assertNotNull(trainer); List<Player> trainedPlayers = trainer.getPlayersTrained(); assertNotNull(trainedPlayers); assertEquals(trainedPlayers.size(), 5); for (int j = trainedPlayers.size() - 1; j >=0 ; j--) { assertEquals(trainedPlayers.get(j), (players[(i * 5) + (4 - j)])); } } em.close(); } /* * Validates use of OrderColumn with ManyToMany bi-directional with * both sides of the relationship maintaining order. This test is not * currently run since work is underway to determine the feasiblity of * bi-directional ordering. */ public void validateBiOrderedManyToMany() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(Game.class, "playedIn", "playerOrder"); validateOrderColumnName(Player.class, "gamesPlayedIn", "playedInOrder"); // Create some data Player[] players = new Player[25]; em.getTransaction().begin(); for (int i = 0; i < players.length ; i++) { players[i] = new Player("GamePlayer" + i, i); em.persist(players[i]); } em.getTransaction().commitAndResume(); // Create M2M, add players in reverse insert order to // validate column order Game[] games = new Game[5]; for (int i = 0; i < games.length; i++) { games[i] = new Game(); ArrayList<Player> playedIn = new ArrayList<>(); for (int j = ((i * 5) + 4); j >= (i * 5); j--) { playedIn.add(players[j]); if (players[j].getGamesPlayedIn() == null) players[j].setGamesPlayedIn(new ArrayList<>()); players[j].getGamesPlayedIn().add(games[i]); } games[i].setPlayedIn(playedIn); em.persist(games[i]); } em.getTransaction().commit(); em.clear(); // Verify order is correct. for (int i = 0; i < games.length; i++) { Game game = em.find(Game.class, games[i].getId()); assertNotNull(game); List<Player> playedIn = game.getPlayedIn(); assertNotNull(playedIn); assertEquals(playedIn.size(), 5); for (int j = playedIn.size() - 1; j >=0 ; j--) { Player p = playedIn.get(j); assertEquals(p, (players[(i * 5) + (4 - j)])); for (int k = 0; k < p.getGamesPlayedIn().size(); k++) { assertNotNull(p.getGamesPlayedIn()); assertEquals(p.getGamesPlayedIn().get(k), games[k]); } } } em.close(); } /* * Validates use of OrderColumn with ElementCollection of basic * elements */ public void testElementCollectionBasic() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); Game game = new Game(); // Verify field name is the default via fm validateOrderColumnName(Game.class, "rainDates", "dateOrder"); // Create a list of basic types java.sql.Date dates[] = new java.sql.Date[10]; ArrayList<java.sql.Date> rainDates = new ArrayList<>(10); Calendar today = Calendar.getInstance(); for (int i = 0; i < 10; i++) { today.set(2009, 1, i+1); dates[i] = new java.sql.Date(today.getTimeInMillis()); } // Add in reverse order for (int i = 9; i >= 0; i--) { rainDates.add(dates[i]); } game.setRainDates(rainDates); em.getTransaction().begin(); em.persist(game); em.getTransaction().commit(); em.clear(); Game newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); // Verify the order for (int i = 0; i < 10; i++) { assertEquals(game.getRainDates().get(i), dates[9-i]); } em.close(); } /* * Validates use of OrderColumn with ElementCollection of Embeddables */ public void testElementCollectionEmbeddables() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); Game game = new Game(); // Verify field name is the default via fm validateOrderColumnName(Game.class, "innings", "inningOrder"); // Create a list of basic types Inning innings[] = new Inning[9]; Collection<Inning> inningCol = new ArrayList<>(); Random rnd = new Random(); for (int i = 8; i >= 0; i--) { innings[i] = new Inning(i, Math.abs(rnd.nextInt()) % 10, Math.abs(rnd.nextInt()) % 10); } // Add in reverse (correct) order for (int i = 8; i >= 0; i--) { inningCol.add(innings[i]); } game.setInnings(inningCol); em.getTransaction().begin(); em.persist(game); em.getTransaction().commit(); em.clear(); Game newGame = em.find(Game.class, game.getId()); assertNotNull(newGame); // Verify the order Inning[] inningArr = (Inning[])game.getInnings(). toArray(new Inning[9]); for (int i = 0; i < 9; i++) { assertEquals(inningArr[i], innings[8-i]); } em.close(); } /* * Validates the use of the updatable on OrderColumn. insertable=false * simply means the order column is omitted from the sql. Having the * appropriate field mapping will enforce that. */ public void testOrderColumnInsertable() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Create a collection using secondary entities // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "titles", "titles_ORDER"); validateOrderColumnInsertable(emf, BattingOrder.class, "fixedBatters", false); em.close(); } /* * Validates the use of the updatable on OrderColumn. updatable=false * simply means the order column is omitted from the sql. Having the * appropriate field mapping will enforce that. */ public void testOrderColumnUpdateable() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Verify field name is the default via fm validateOrderColumnName(BattingOrder.class, "titles", "titles_ORDER"); validateOrderColumnUpdatable(emf, BattingOrder.class, "titles", false); em.close(); } /* * Validates the use of the OrderColumn with o2o, o2m, m2m relationships * and collection table - with and without join tables. */ public void testOrderColumnTable() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); validateOrderColumnTable(emf, Owner.class, "cars", "OC_CAR", "car_o2m_order"); validateOrderColumnTable(emf, Owner.class, "homes", "home_o2m_table", "homes_ORDER"); validateOrderColumnTable(emf, Owner.class, "bikeColl", "bike_table", "bike_coll_order"); validateOrderColumnTable(emf, Owner.class, "widgets", "widget_m2m_table", "widgets_ORDER"); Owner owner = new Owner(); Collection<Car> cars = new ArrayList<>(); Collection<Home> homes = new ArrayList<>(); Collection<Bicycle> bicycles = new ArrayList<>(); Collection<Widget> widgets = new ArrayList<>(); Collection<Owner> owners = new ArrayList<>(); owner.setCars(cars); owner.setHomes(homes); owner.setBikeColl(bicycles); owner.setWidgets(widgets); for (int i = 0; i < 5; i++){ Car car = new Car(2000 + 1, "Make"+i, "Model"+i); car.setOwner(owner); cars.add(car); Home home = new Home(2000 + i); homes.add(home); Bicycle bike = new Bicycle("Brand"+i, "Model"+i); bicycles.add(bike); Widget widget = new Widget("Name"+i); widgets.add(widget); widget.setOwners(owners); } Object[] carArr = cars.toArray(); Object[] homeArr = homes.toArray(); Object[] bikeArr = bicycles.toArray(); Object[] widgetArr = widgets.toArray(); em.getTransaction().begin(); em.persist(owner); em.getTransaction().commit(); String oid = owner.getId(); em.clear(); // Run queries to ensure the query component uses the correct tables validateIndexAndValues(em, "Owner", "cars", 0, carArr, "id", oid); validateIndexAndValues(em, "Owner", "homes", 0, homeArr, "id", oid); validateIndexAndValues(em, "Owner", "widgets", 0, widgetArr, "id", oid); validateIndexAndValues(em, "Owner", "bikeColl", 0, bikeArr, "id", oid); em.close(); } /* * Validates the use of order column (via INDEX) in the predicate of a * JPQL query. */ public void testOrderColumnPredicateQuery() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Query and verify the result String queryString = "SELECT w FROM Course c JOIN c.waitList w " + "WHERE c.name = :cname AND INDEX(w) = :widx"; Query qry = em.createQuery(queryString); qry.setParameter("widx", 0); qry.setParameter("cname", "Course B"); Student idx0 = (Student)qry.getSingleResult(); assertNotNull(idx0); assertEquals(idx0, students[10]); qry.setParameter("widx", 1); idx0 = (Student)qry.getSingleResult(); assertNotNull(idx0); assertEquals(idx0, students[11]); qry.setParameter("cname", "Course A"); qry.setParameter("widx", 0); idx0 = (Student)qry.getSingleResult(); assertNotNull(idx0); assertEquals(idx0, students[11]); qry.setParameter("widx", 1); idx0 = (Student)qry.getSingleResult(); assertNotNull(idx0); assertEquals(idx0, students[10]); em.close(); } /* * Validates the use of order column (via INDEX) in the projection of * a JPQL query. */ public void testOrderColumnProjectionQuery() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Query and verify the result String queryString = "SELECT INDEX(w) FROM Course c JOIN c.waitList w" + " WHERE c.name = :cname ORDER BY w"; Query qry = em.createQuery(queryString); qry.setParameter("cname", "Course A"); List rlist = qry.getResultList(); assertNotNull(rlist); assertEquals(2, rlist.size()); assertEquals(0L, rlist.get(0)); assertEquals(1L, rlist.get(1)); queryString = "SELECT INDEX(w) FROM Course c JOIN c.waitList w" + " WHERE c.name = :cname AND w.name = 'Student11'"; qry = em.createQuery(queryString); qry.setParameter("cname", "Course B"); Long idx = (Long)qry.getSingleResult(); assertNotNull(idx); assertEquals((Long)idx, (Long)1L); em.close(); } /* * Create the data used by the query tests */ private void createQueryData() { OpenJPAEntityManagerSPI em = emf.createEntityManager(); // Add some data for (int i = 0; i < 12; i++) { students[i] = new Student("Student" + i); } Course courseA = new Course("Course A"); Course courseB = new Course("Course B"); HashSet<Course> courses = new HashSet<>(); courses.add(courseA); courses.add(courseB); HashSet<Student> cAstudents = new HashSet<>(); for (int i = 0; i < 5; i++) { cAstudents.add(students[i]); students[i].setCourses(courses); } courseA.setStudents(cAstudents); ArrayList<Student> cAwaitlist = new ArrayList<>(); cAwaitlist.add(students[11]); cAwaitlist.add(students[10]); courseA.setWaitList(cAwaitlist); HashSet<Student> cBstudents = new HashSet<>(); for (int i = 5; i < 10; i++) { cBstudents.add(students[i]); } courseB.setStudents(cBstudents); ArrayList<Student> cBwaitlist = new ArrayList<>(); cBwaitlist.add(students[10]); cBwaitlist.add(students[11]); courseB.setWaitList(cBwaitlist); em.getTransaction().begin(); em.persist(courseA); em.persist(courseB); em.getTransaction().commit(); em.close(); } private void validateIndexAndValues(OpenJPAEntityManagerSPI em, String entity, String indexedCol, int base, Object[] objs, String idField, Object idValue) { String queryString = "SELECT INDEX(b), b FROM " + entity + " a JOIN a." + indexedCol + " b WHERE a." + idField + " = :idVal"; em.clear(); Query qry = em.createQuery(queryString); qry.setParameter("idVal", idValue); List rlist = qry.getResultList(); assertNotNull(rlist); assertEquals(objs.length, rlist.size()); TreeMap<Long, Object> objMap = new TreeMap<>(); for (int i = 0; i < objs.length; i++) { Object[] rvals = (Object[])rlist.get(i); Long idx = (Long)rvals[0]; Object objVal = rvals[1]; objMap.put(idx, objVal); } for (int i = 0; i < objs.length; i++) { Object val = objMap.get(((long) (base + i))); assertEquals(val, objs[i]); } } private void validateCollIndexAndValues(OpenJPAEntityManagerSPI em, String entity, String indexedCol, int base, Object[] objs, String idField, Object idValue) { String queryString = "SELECT INDEX(b), b FROM " + entity + " a, IN(a." + indexedCol + ") b WHERE a." + idField + " = :idVal"; em.clear(); Query qry = em.createQuery(queryString); qry.setParameter("idVal", idValue); List rlist = qry.getResultList(); assertNotNull(rlist); assertEquals(objs.length, rlist.size()); TreeMap<Long, Object> objMap = new TreeMap<>(); for (int i = 0; i < objs.length; i++) { Object[] rvals = (Object[])rlist.get(i); Long idx = (Long)rvals[0]; Object objVal = rvals[1]; objMap.put(idx, objVal); } for (int i = 0; i < objs.length; i++) { Object val = objMap.get(((long) (base + i))); assertEquals(val, objs[i]); } } private void validateOrderColumnName(Class clazz, String fieldName, String columnName) { validateOrderColumnName(emf, clazz, fieldName, columnName); } private Column getOrderColumn(OpenJPAEntityManagerFactorySPI emf1, Class clazz, String fieldName) { JDBCConfiguration conf = (JDBCConfiguration) emf1.getConfiguration(); ClassMapping cls = conf.getMappingRepositoryInstance(). getMapping(clazz, null, true); FieldMapping fm = cls.getFieldMapping(fieldName); Column oc = fm.getOrderColumn(); assertNotNull(oc); return oc; } private void validateOrderColumnName(OpenJPAEntityManagerFactorySPI emf1, Class clazz, String fieldName, String columnName) { Column oc = getOrderColumn(emf1, clazz, fieldName); assertTrue(oc.getName().equalsIgnoreCase(columnName)); } private void validateOrderColumnTable( OpenJPAEntityManagerFactorySPI emf1, Class clazz, String fieldName, String tableName, String columnName) { Column oc = getOrderColumn(emf1, clazz, fieldName); // Verify the oc has the correct table name assertTrue(oc.getTableName().equalsIgnoreCase(tableName)); // Verify the table exists in the db assertTrue(tableAndColumnExists(emf1, null, tableName, null, columnName)); } private void validateOrderColumnUpdatable( OpenJPAEntityManagerFactorySPI emf1, Class clazz, String fieldName, boolean updatable) { Column oc = getOrderColumn(emf1, clazz, fieldName); assertEquals(updatable, !oc.getFlag(Column.FLAG_DIRECT_UPDATE)); } private void validateOrderColumnInsertable( OpenJPAEntityManagerFactorySPI emf1, Class clazz, String fieldName, boolean insertable) { Column oc = getOrderColumn(emf1, clazz, fieldName); assertEquals(insertable, !oc.getFlag(Column.FLAG_DIRECT_INSERT)); } /** * Method to verify a table was created for the given name and schema */ private boolean tableAndColumnExists(OpenJPAEntityManagerFactorySPI emf1, OpenJPAEntityManagerSPI em, String tableName, String schemaName, String columnName) { JDBCConfiguration conf = (JDBCConfiguration) emf1.getConfiguration(); DBDictionary dict = conf.getDBDictionaryInstance(); OpenJPAEntityManagerSPI em1 = em; // If no em supplied, create one if (em1 == null) { em1 = emf1.createEntityManager(); } Connection conn = (Connection)em1.getConnection(); try { DatabaseMetaData dbmd = conn.getMetaData(); // (meta, catalog, schemaName, tableName, conn) Column[] cols = dict.getColumns(dbmd, null, null, tableName, columnName, conn); if (cols != null && cols.length == 1) { Column col = cols[0]; String colName = col.getName(); if (col.getTableName().equalsIgnoreCase(tableName) && (schemaName == null || col.getSchemaName().equalsIgnoreCase(schemaName)) && colName.equalsIgnoreCase(columnName)) return true; } } catch (Throwable e) { fail("Unable to get column information."); } finally { if (em == null) { em1.close(); } } return false; } }
apache/ratis
37,416
ratis-grpc/src/main/java/org/apache/ratis/grpc/server/GrpcLogAppender.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ratis.grpc.server; import org.apache.ratis.conf.RaftProperties; import org.apache.ratis.grpc.GrpcConfigKeys; import org.apache.ratis.grpc.GrpcUtil; import org.apache.ratis.grpc.metrics.GrpcServerMetrics; import org.apache.ratis.metrics.Timekeeper; import org.apache.ratis.proto.RaftProtos.InstallSnapshotResult; import org.apache.ratis.protocol.RaftPeerId; import org.apache.ratis.retry.MultipleLinearRandomRetry; import org.apache.ratis.retry.RetryPolicy; import org.apache.ratis.server.RaftServer; import org.apache.ratis.server.RaftServerConfigKeys; import org.apache.ratis.server.leader.FollowerInfo; import org.apache.ratis.server.leader.LeaderState; import org.apache.ratis.server.leader.LogAppenderBase; import org.apache.ratis.server.protocol.TermIndex; import org.apache.ratis.server.raftlog.RaftLog; import org.apache.ratis.server.util.ServerStringUtils; import org.apache.ratis.thirdparty.io.grpc.StatusRuntimeException; import org.apache.ratis.thirdparty.io.grpc.stub.CallStreamObserver; import org.apache.ratis.thirdparty.io.grpc.stub.StreamObserver; import org.apache.ratis.proto.RaftProtos.AppendEntriesReplyProto; import org.apache.ratis.proto.RaftProtos.AppendEntriesReplyProto.AppendResult; import org.apache.ratis.proto.RaftProtos.AppendEntriesRequestProto; import org.apache.ratis.proto.RaftProtos.InstallSnapshotReplyProto; import org.apache.ratis.proto.RaftProtos.InstallSnapshotReplyProto.InstallSnapshotReplyBodyCase; import org.apache.ratis.proto.RaftProtos.InstallSnapshotRequestProto; import org.apache.ratis.proto.RaftProtos.InstallSnapshotRequestProto.InstallSnapshotRequestBodyCase; import org.apache.ratis.statemachine.SnapshotInfo; import org.apache.ratis.util.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InterruptedIOException; import java.util.Comparator; import java.util.LinkedList; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; /** * A new log appender implementation using grpc bi-directional stream API. */ public class GrpcLogAppender extends LogAppenderBase { public static final Logger LOG = LoggerFactory.getLogger(GrpcLogAppender.class); private enum BatchLogKey implements BatchLogger.Key { RESET_CLIENT, INCONSISTENCY_REPLY, APPEND_LOG_RESPONSE_HANDLER_ON_ERROR } public static final int INSTALL_SNAPSHOT_NOTIFICATION_INDEX = 0; private static final Comparator<Long> CALL_ID_COMPARATOR = (left, right) -> { // calculate diff in order to take care the possibility of numerical overflow final long diff = left - right; return diff == 0? 0: diff > 0? 1: -1; }; enum Event { APPEND_ENTRIES_REPLY, APPEND_ENTRIES_INCONSISTENCY_REPLY, SNAPSHOT_REPLY, COMPLETE, TIMEOUT, ERROR; boolean updateFirstReplyReceived(boolean firstReplyReceived) { switch (this) { case APPEND_ENTRIES_REPLY: case APPEND_ENTRIES_INCONSISTENCY_REPLY: case SNAPSHOT_REPLY: case COMPLETE: return true; case ERROR: return false; case TIMEOUT: return firstReplyReceived; default: throw new IllegalStateException("Unexpected event: " + this); } } boolean isError() { switch (this) { case APPEND_ENTRIES_INCONSISTENCY_REPLY: case TIMEOUT: case ERROR: return true; case APPEND_ENTRIES_REPLY: case SNAPSHOT_REPLY: case COMPLETE: return false; default: throw new IllegalStateException("Unexpected event: " + this); } } } static class ReplyState { private boolean firstReplyReceived = false; private int errorCount = 0; synchronized boolean isFirstReplyReceived() { return firstReplyReceived; } synchronized int getErrorCount() { return errorCount; } int process(AppendResult result) { return process(result == AppendResult.INCONSISTENCY? Event.APPEND_ENTRIES_INCONSISTENCY_REPLY : Event.APPEND_ENTRIES_REPLY); } synchronized int process(Event event) { firstReplyReceived = event.updateFirstReplyReceived(firstReplyReceived); if (event.isError()) { errorCount++; } else { errorCount = 0; } return errorCount; } } private final AtomicLong callId = new AtomicLong(); private final RequestMap pendingRequests = new RequestMap(); private final int maxPendingRequestsNum; private final boolean installSnapshotEnabled; private final TimeDuration requestTimeoutDuration; private final TimeDuration installSnapshotStreamTimeout; private final TimeDuration logMessageBatchDuration; private final int maxOutstandingInstallSnapshots; private final TimeoutExecutor scheduler = TimeoutExecutor.getInstance(); @SuppressWarnings({"squid:S3077"}) // Suppress volatile for generic type private volatile StreamObservers appendLogRequestObserver; private final boolean useSeparateHBChannel; private final GrpcServerMetrics grpcServerMetrics; private final AutoCloseableReadWriteLock lock; private final StackTraceElement caller; private final RetryPolicy errorRetryWaitPolicy; private final ReplyState replyState = new ReplyState(); public GrpcLogAppender(RaftServer.Division server, LeaderState leaderState, FollowerInfo f) { super(server, leaderState, f); Objects.requireNonNull(getServerRpc(), "getServerRpc() == null"); final RaftProperties properties = server.getRaftServer().getProperties(); this.maxPendingRequestsNum = GrpcConfigKeys.Server.leaderOutstandingAppendsMax(properties); this.requestTimeoutDuration = RaftServerConfigKeys.Rpc.requestTimeout(properties); this.maxOutstandingInstallSnapshots = GrpcConfigKeys.Server.installSnapshotRequestElementLimit(properties); this.installSnapshotStreamTimeout = GrpcConfigKeys.Server.installSnapshotRequestTimeout(properties) .multiply(maxOutstandingInstallSnapshots); this.logMessageBatchDuration = GrpcConfigKeys.Server.logMessageBatchDuration(properties); this.installSnapshotEnabled = RaftServerConfigKeys.Log.Appender.installSnapshotEnabled(properties); this.useSeparateHBChannel = GrpcConfigKeys.Server.heartbeatChannel(properties); grpcServerMetrics = new GrpcServerMetrics(server.getMemberId().toString()); grpcServerMetrics.addPendingRequestsCount(getFollowerId().toString(), pendingRequests::logRequestsSize); lock = new AutoCloseableReadWriteLock(this); caller = LOG.isTraceEnabled()? JavaUtils.getCallerStackTraceElement(): null; errorRetryWaitPolicy = MultipleLinearRandomRetry.parseCommaSeparated( RaftServerConfigKeys.Log.Appender.retryPolicy(properties)); } @Override public GrpcServicesImpl getServerRpc() { return (GrpcServicesImpl)super.getServerRpc(); } private GrpcServerProtocolClient getClient() throws IOException { return getServerRpc().getProxies().getProxy(getFollowerId()); } private void resetClient(AppendEntriesRequest request, Event event) { try (AutoCloseableLock writeLock = lock.writeLock(caller, LOG::trace)) { getClient().resetConnectBackoff(); if (appendLogRequestObserver != null) { appendLogRequestObserver.stop(); appendLogRequestObserver = null; } final int errorCount = replyState.process(event); // clear the pending requests queue and reset the next index of follower pendingRequests.clear(); final FollowerInfo f = getFollower(); final long nextIndex = 1 + Optional.ofNullable(request) .map(AppendEntriesRequest::getPreviousLog) .map(TermIndex::getIndex) .orElseGet(f::getMatchIndex); if (event.isError() && request == null) { final long followerNextIndex = f.getNextIndex(); BatchLogger.print(BatchLogKey.RESET_CLIENT, f.getId() + "-" + followerNextIndex, suffix -> LOG.warn("{}: Follower failed (request=null, errorCount={}); keep nextIndex ({}) unchanged and retry.{}", this, errorCount, followerNextIndex, suffix), logMessageBatchDuration); return; } if (request != null && request.isHeartbeat()) { return; } getFollower().computeNextIndex(getNextIndexForError(nextIndex)); } catch (IOException ie) { LOG.warn(this + ": Failed to getClient for " + getFollowerId(), ie); } } private boolean isFollowerCommitBehindLastCommitIndex() { return getRaftLog().getLastCommittedIndex() > getFollower().getCommitIndex(); } private boolean installSnapshot() { if (installSnapshotEnabled) { final SnapshotInfo snapshot = shouldInstallSnapshot(); if (snapshot != null) { installSnapshot(snapshot); return true; } } else { // check installSnapshotNotification final TermIndex firstAvailable = shouldNotifyToInstallSnapshot(); if (firstAvailable != null) { notifyInstallSnapshot(firstAvailable); return true; } } return false; } @Override public void run() throws IOException { for(; isRunning(); mayWait()) { //HB period is expired OR we have messages OR follower is behind with commit index if (shouldSendAppendEntries() || isFollowerCommitBehindLastCommitIndex()) { final boolean installingSnapshot = installSnapshot(); appendLog(installingSnapshot || haveTooManyPendingRequests()); } getLeaderState().checkHealth(getFollower()); } Optional.ofNullable(appendLogRequestObserver).ifPresent(StreamObservers::onCompleted); } public long getWaitTimeMs() { if (haveTooManyPendingRequests()) { return getHeartbeatWaitTimeMs(); // Should wait for a short time } else if (shouldSendAppendEntries() && !isSlowFollower()) { // For normal nodes, new entries should be sent ASAP // however for slow followers (especially when the follower is down), // keep sending without any wait time only ends up in high CPU load return TimeDuration.max(getRemainingWaitTime(), TimeDuration.ZERO).toLong(TimeUnit.MILLISECONDS); } return getHeartbeatWaitTimeMs(); } private boolean isSlowFollower() { final TimeDuration elapsedTime = getFollower().getLastRpcResponseTime().elapsedTime(); return elapsedTime.compareTo(getServer().properties().rpcSlownessTimeout()) > 0; } private void mayWait() { // use lastSend time instead of lastResponse time try { getEventAwaitForSignal().await(getWaitTimeMs() + errorWaitTimeMs(), TimeUnit.MILLISECONDS); } catch (InterruptedException ie) { LOG.warn(this + ": Wait interrupted by " + ie); Thread.currentThread().interrupt(); } } private long errorWaitTimeMs() { return errorRetryWaitPolicy.handleAttemptFailure(replyState::getErrorCount) .getSleepTime().toLong(TimeUnit.MILLISECONDS); } @Override public CompletableFuture<LifeCycle.State> stopAsync() { grpcServerMetrics.unregister(); return super.stopAsync(); } @Override public boolean shouldSendAppendEntries() { return appendLogRequestObserver == null || super.shouldSendAppendEntries(); } @Override public boolean hasPendingDataRequests() { return pendingRequests.logRequestsSize() > 0; } /** @return true iff either (1) queue is full, or (2) queue is non-empty and not received first response. */ private boolean haveTooManyPendingRequests() { final int size = pendingRequests.logRequestsSize(); if (size == 0) { return false; } else if (size >= maxPendingRequestsNum) { return true; } else { // queue is non-empty and non-full return !replyState.isFirstReplyReceived(); } } static class StreamObservers { private final CallStreamObserver<AppendEntriesRequestProto> appendLog; private final CallStreamObserver<AppendEntriesRequestProto> heartbeat; private final TimeDuration waitForReady; private volatile boolean running = true; StreamObservers(GrpcServerProtocolClient client, AppendLogResponseHandler handler, boolean separateHeartbeat, TimeDuration waitTimeMin) { this.appendLog = client.appendEntries(handler, false); this.heartbeat = separateHeartbeat? client.appendEntries(handler, true): null; this.waitForReady = waitTimeMin.isPositive()? waitTimeMin: TimeDuration.ONE_MILLISECOND; } void onNext(AppendEntriesRequestProto proto) throws InterruptedIOException { CallStreamObserver<AppendEntriesRequestProto> stream; boolean isHeartBeat = heartbeat != null && proto.getEntriesCount() == 0; if (isHeartBeat) { stream = heartbeat; } else { stream = appendLog; } // stall for stream to be ready. while (!stream.isReady() && running) { sleep(waitForReady, isHeartBeat); } stream.onNext(proto); } void stop() { running = false; } void onCompleted() { appendLog.onCompleted(); Optional.ofNullable(heartbeat).ifPresent(StreamObserver::onCompleted); } } @Override public long getCallId() { return callId.get(); } @Override public Comparator<Long> getCallIdComparator() { return CALL_ID_COMPARATOR; } private void appendLog(boolean heartbeat) throws IOException { final AppendEntriesRequestProto pending; final AppendEntriesRequest request; try (AutoCloseableLock writeLock = lock.writeLock(caller, LOG::trace)) { // Prepare and send the append request. // Note changes on follower's nextIndex and ops on pendingRequests should always be done under the write-lock pending = newAppendEntriesRequest(callId.getAndIncrement(), heartbeat); if (pending == null) { return; } request = new AppendEntriesRequest(pending, getFollowerId(), grpcServerMetrics); pendingRequests.put(request); increaseNextIndex(pending); if (appendLogRequestObserver == null) { appendLogRequestObserver = new StreamObservers( getClient(), new AppendLogResponseHandler(), useSeparateHBChannel, getWaitTimeMin()); } } final TimeDuration remaining = getRemainingWaitTime(); if (remaining.isPositive()) { sleep(remaining, heartbeat); } if (isRunning()) { sendRequest(request, pending); } } private static void sleep(TimeDuration waitTime, boolean heartbeat) throws InterruptedIOException { try { waitTime.sleep(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw IOUtils.toInterruptedIOException( "Interrupted appendLog, heartbeat? " + heartbeat, e); } } private void sendRequest(AppendEntriesRequest request, AppendEntriesRequestProto proto) throws InterruptedIOException { CodeInjectionForTesting.execute(GrpcServicesImpl.GRPC_SEND_SERVER_REQUEST, getServer().getId(), null, proto); resetHeartbeatTrigger(); StreamObservers observers = appendLogRequestObserver; if (observers != null) { request.startRequestTimer(); observers.onNext(proto); getFollower().updateLastRpcSendTime(request.isHeartbeat()); scheduler.onTimeout(requestTimeoutDuration, () -> timeoutAppendRequest(request.getCallId(), request.isHeartbeat()), LOG, () -> "Timeout check failed for append entry request: " + request); } } private void timeoutAppendRequest(long cid, boolean heartbeat) { final AppendEntriesRequest pending = pendingRequests.remove(cid, heartbeat); if (pending != null) { final int errorCount = replyState.process(Event.TIMEOUT); LOG.warn("{}: Timed out {}appendEntries, errorCount={}, request={}", this, heartbeat ? "HEARTBEAT " : "", errorCount, pending); grpcServerMetrics.onRequestTimeout(getFollowerId().toString(), heartbeat); pending.stopRequestTimer(); } } private void increaseNextIndex(AppendEntriesRequestProto request) { final int count = request.getEntriesCount(); if (count > 0) { getFollower().increaseNextIndex(request.getEntries(count - 1).getIndex() + 1); } } private void increaseNextIndex(final long installedSnapshotIndex, Object reason) { final long newNextIndex = installedSnapshotIndex + 1; LOG.info("{}: updateNextIndex {} for {}", this, newNextIndex, reason); getFollower().updateNextIndex(newNextIndex); } /** * StreamObserver for handling responses from the follower */ private class AppendLogResponseHandler implements StreamObserver<AppendEntriesReplyProto> { private final String name = getFollower().getName() + "-" + JavaUtils.getClassSimpleName(getClass()); /** * After receiving a appendEntries reply, do the following: * 1. If the reply is success, update the follower's match index and submit * an event to leaderState * 2. If the reply is NOT_LEADER, step down * 3. If the reply is INCONSISTENCY, increase/ decrease the follower's next * index based on the response */ @Override public void onNext(AppendEntriesReplyProto reply) { AppendEntriesRequest request = pendingRequests.remove(reply); if (request != null) { request.stopRequestTimer(); // Update completion time getFollower().updateLastRespondedAppendEntriesSendTime(request.getSendTime()); } getFollower().updateLastRpcResponseTime(); if (LOG.isDebugEnabled()) { LOG.debug("{}: received {} reply {}, request={}", this, replyState.isFirstReplyReceived()? "a": "the first", ServerStringUtils.toAppendEntriesReplyString(reply), request); } try { onNextImpl(request, reply); } catch(Exception t) { LOG.error("Failed onNext request=" + request + ", reply=" + ServerStringUtils.toAppendEntriesReplyString(reply), t); } } private void onNextImpl(AppendEntriesRequest request, AppendEntriesReplyProto reply) { final int errorCount = replyState.process(reply.getResult()); switch (reply.getResult()) { case SUCCESS: grpcServerMetrics.onRequestSuccess(getFollowerId().toString(), reply.getIsHearbeat()); getLeaderState().onFollowerCommitIndex(getFollower(), reply.getFollowerCommit()); if (getFollower().updateMatchIndex(reply.getMatchIndex())) { getFollower().updateNextIndex(reply.getMatchIndex() + 1); getLeaderState().onFollowerSuccessAppendEntries(getFollower()); } break; case NOT_LEADER: grpcServerMetrics.onRequestNotLeader(getFollowerId().toString()); LOG.warn("{}: received {} reply with term {}", this, reply.getResult(), reply.getTerm()); if (onFollowerTerm(reply.getTerm())) { return; } break; case INCONSISTENCY: grpcServerMetrics.onRequestInconsistency(getFollowerId().toString()); BatchLogger.print(BatchLogKey.INCONSISTENCY_REPLY, getFollower().getName() + "_" + reply.getNextIndex(), suffix -> LOG.warn("{}: received {} reply with nextIndex {}, errorCount={}, request={} {}", this, reply.getResult(), reply.getNextIndex(), errorCount, request, suffix)); final long requestFirstIndex = request != null? request.getFirstIndex(): RaftLog.INVALID_LOG_INDEX; updateNextIndex(getNextIndexForInconsistency(requestFirstIndex, reply.getNextIndex())); break; default: throw new IllegalStateException("Unexpected reply result: " + reply.getResult()); } getLeaderState().onAppendEntriesReply(GrpcLogAppender.this, reply); notifyLogAppender(); } /** * for now we simply retry the first pending request */ @Override public void onError(Throwable t) { if (!isRunning()) { LOG.info("{} is already stopped", GrpcLogAppender.this); return; } BatchLogger.print(BatchLogKey.APPEND_LOG_RESPONSE_HANDLER_ON_ERROR, AppendLogResponseHandler.this.name, suffix -> GrpcUtil.warn(LOG, () -> this + ": Failed appendEntries" + suffix, t), logMessageBatchDuration, t instanceof StatusRuntimeException); grpcServerMetrics.onRequestRetry(); // Update try counter AppendEntriesRequest request = pendingRequests.remove(GrpcUtil.getCallId(t), GrpcUtil.isHeartbeat(t)); resetClient(request, Event.ERROR); } @Override public void onCompleted() { LOG.info("{}: follower responses appendEntries COMPLETED", this); resetClient(null, Event.COMPLETE); } @Override public String toString() { return name; } } private void updateNextIndex(long replyNextIndex) { try (AutoCloseableLock writeLock = lock.writeLock(caller, LOG::trace)) { pendingRequests.clear(); getFollower().setNextIndex(replyNextIndex); } } private class InstallSnapshotResponseHandler implements StreamObserver<InstallSnapshotReplyProto> { private final String name = getFollower().getName() + "-" + JavaUtils.getClassSimpleName(getClass()); private final Queue<Integer> pending; private final CompletableFuture<Void> done = new CompletableFuture<>(); private final boolean isNotificationOnly; InstallSnapshotResponseHandler() { this(false); } InstallSnapshotResponseHandler(boolean notifyOnly) { pending = new LinkedList<>(); this.isNotificationOnly = notifyOnly; } void addPending(InstallSnapshotRequestProto request) { try (AutoCloseableLock writeLock = lock.writeLock(caller, LOG::trace)) { final int index; if (isNotificationOnly) { Preconditions.assertSame(InstallSnapshotRequestBodyCase.NOTIFICATION, request.getInstallSnapshotRequestBodyCase(), "request case"); index = INSTALL_SNAPSHOT_NOTIFICATION_INDEX; } else { Preconditions.assertSame(InstallSnapshotRequestBodyCase.SNAPSHOTCHUNK, request.getInstallSnapshotRequestBodyCase(), "request case"); index = request.getSnapshotChunk().getRequestIndex(); } if (index == 0) { Preconditions.assertTrue(pending.isEmpty(), "pending queue is non-empty before offer for index 0"); } pending.offer(index); } } void removePending(InstallSnapshotReplyProto reply) { try (AutoCloseableLock writeLock = lock.writeLock(caller, LOG::trace)) { final int index = Objects.requireNonNull(pending.poll(), "index == null"); if (isNotificationOnly) { Preconditions.assertSame(InstallSnapshotReplyBodyCase.SNAPSHOTINDEX, reply.getInstallSnapshotReplyBodyCase(), "reply case"); Preconditions.assertSame(INSTALL_SNAPSHOT_NOTIFICATION_INDEX, (int) index, "poll index"); } else { Preconditions.assertSame(InstallSnapshotReplyBodyCase.REQUESTINDEX, reply.getInstallSnapshotReplyBodyCase(), "reply case"); Preconditions.assertSame(reply.getRequestIndex(), (int) index, "poll index"); } } } //compare follower's latest installed snapshot index with leader's start index void onFollowerCatchup(long followerSnapshotIndex) { final long leaderStartIndex = getRaftLog().getStartIndex(); final long followerNextIndex = followerSnapshotIndex + 1; if (followerNextIndex >= leaderStartIndex) { LOG.info("{}: Follower can catch up leader after install the snapshot, as leader's start index is {}", this, followerNextIndex); notifyInstallSnapshotFinished(InstallSnapshotResult.SUCCESS, followerSnapshotIndex); } } void notifyInstallSnapshotFinished(InstallSnapshotResult result, long snapshotIndex) { getServer().getStateMachine().event().notifySnapshotInstalled(result, snapshotIndex, getFollower().getPeer()); } void waitForResponse() { try { done.get(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { throw new IllegalStateException("Failed to complete " + name, e); } } void close() { done.complete(null); notifyLogAppender(); } boolean hasAllResponse() { try (AutoCloseableLock readLock = lock.readLock(caller, LOG::trace)) { return pending.isEmpty(); } } @Override public void onNext(InstallSnapshotReplyProto reply) { if (LOG.isInfoEnabled()) { LOG.info("{}: received {} reply {}", this, replyState.isFirstReplyReceived()? "a" : "the first", ServerStringUtils.toInstallSnapshotReplyString(reply)); } // update the last rpc time getFollower().updateLastRpcResponseTime(); replyState.process(Event.SNAPSHOT_REPLY); final long followerSnapshotIndex; switch (reply.getResult()) { case SUCCESS: LOG.info("{}: Completed InstallSnapshot. Reply: {}", this, reply); getFollower().setAttemptedToInstallSnapshot(); removePending(reply); break; case IN_PROGRESS: LOG.info("{}: InstallSnapshot in progress.", this); removePending(reply); break; case ALREADY_INSTALLED: followerSnapshotIndex = reply.getSnapshotIndex(); LOG.info("{}: Follower snapshot is already at index {}.", this, followerSnapshotIndex); getFollower().setSnapshotIndex(followerSnapshotIndex); getFollower().setAttemptedToInstallSnapshot(); getLeaderState().onFollowerCommitIndex(getFollower(), followerSnapshotIndex); increaseNextIndex(followerSnapshotIndex, reply.getResult()); removePending(reply); break; case NOT_LEADER: onFollowerTerm(reply.getTerm()); break; case CONF_MISMATCH: LOG.error("{}: Configuration Mismatch ({}): Leader {} has it set to {} but follower {} has it set to {}", this, RaftServerConfigKeys.Log.Appender.INSTALL_SNAPSHOT_ENABLED_KEY, getServer().getId(), installSnapshotEnabled, getFollowerId(), !installSnapshotEnabled); break; case SNAPSHOT_INSTALLED: followerSnapshotIndex = reply.getSnapshotIndex(); LOG.info("{}: Follower installed snapshot at index {}", this, followerSnapshotIndex); getFollower().setSnapshotIndex(followerSnapshotIndex); getFollower().setAttemptedToInstallSnapshot(); getLeaderState().onFollowerCommitIndex(getFollower(), followerSnapshotIndex); increaseNextIndex(followerSnapshotIndex, reply.getResult()); onFollowerCatchup(followerSnapshotIndex); removePending(reply); break; case SNAPSHOT_UNAVAILABLE: LOG.info("{}: Follower could not install snapshot as it is not available.", this); getFollower().setAttemptedToInstallSnapshot(); notifyInstallSnapshotFinished(InstallSnapshotResult.SNAPSHOT_UNAVAILABLE, RaftLog.INVALID_LOG_INDEX); removePending(reply); break; case UNRECOGNIZED: LOG.error("Unrecognized the reply result {}: Leader is {}, follower is {}", reply.getResult(), getServer().getId(), getFollowerId()); break; case SNAPSHOT_EXPIRED: LOG.warn("{}: Follower could not install snapshot as it is expired.", this); default: break; } } @Override public void onError(Throwable t) { if (!isRunning()) { LOG.info("{} is stopped", GrpcLogAppender.this); return; } GrpcUtil.warn(LOG, () -> this + ": Failed InstallSnapshot", t); grpcServerMetrics.onRequestRetry(); // Update try counter resetClient(null, Event.ERROR); close(); } @Override public void onCompleted() { if (!isNotificationOnly || LOG.isDebugEnabled()) { LOG.info("{}: follower responded installSnapshot COMPLETED", this); } replyState.process(Event.COMPLETE); close(); } @Override public String toString() { return name; } } /** * Send installSnapshot request to Follower with a snapshot. * @param snapshot the snapshot to be sent to Follower */ private void installSnapshot(SnapshotInfo snapshot) { LOG.info("{}: followerNextIndex = {} but logStartIndex = {}, send snapshot {} to follower", this, getFollower().getNextIndex(), getRaftLog().getStartIndex(), snapshot); final InstallSnapshotResponseHandler responseHandler = new InstallSnapshotResponseHandler(); StreamObserver<InstallSnapshotRequestProto> snapshotRequestObserver = null; final String requestId = UUID.randomUUID().toString(); try { snapshotRequestObserver = getClient().installSnapshot( getFollower().getName() + "-installSnapshot-" + requestId, installSnapshotStreamTimeout, maxOutstandingInstallSnapshots, responseHandler); for (InstallSnapshotRequestProto request : newInstallSnapshotRequests(requestId, snapshot)) { if (isRunning()) { snapshotRequestObserver.onNext(request); getFollower().updateLastRpcSendTime(false); responseHandler.addPending(request); } else { break; } } snapshotRequestObserver.onCompleted(); grpcServerMetrics.onInstallSnapshot(); } catch (Exception e) { LOG.warn(this + ": failed to installSnapshot " + snapshot, e); if (snapshotRequestObserver != null) { snapshotRequestObserver.onError(e); } return; } responseHandler.waitForResponse(); if (responseHandler.hasAllResponse()) { getFollower().setSnapshotIndex(snapshot.getTermIndex().getIndex()); LOG.info("{}: installed snapshot {} successfully", this, snapshot); } } /** * Send an installSnapshot notification request to the Follower. * @param firstAvailable the first available log's index on the Leader */ private void notifyInstallSnapshot(TermIndex firstAvailable) { LOG.info("{}: notifyInstallSnapshot with firstAvailable={}, followerNextIndex={}", this, firstAvailable, getFollower().getNextIndex()); final InstallSnapshotResponseHandler responseHandler = new InstallSnapshotResponseHandler(true); StreamObserver<InstallSnapshotRequestProto> snapshotRequestObserver = null; // prepare and enqueue the notify install snapshot request. final InstallSnapshotRequestProto request = newInstallSnapshotNotificationRequest(firstAvailable); if (LOG.isInfoEnabled()) { LOG.info("{}: send {}", this, ServerStringUtils.toInstallSnapshotRequestString(request)); } try { snapshotRequestObserver = getClient().installSnapshot(getFollower().getName() + "-notifyInstallSnapshot", requestTimeoutDuration, 0, responseHandler); snapshotRequestObserver.onNext(request); getFollower().updateLastRpcSendTime(false); responseHandler.addPending(request); snapshotRequestObserver.onCompleted(); } catch (Exception e) { GrpcUtil.warn(LOG, () -> this + ": Failed to notify follower to install snapshot.", e); if (snapshotRequestObserver != null) { snapshotRequestObserver.onError(e); } return; } responseHandler.waitForResponse(); } /** * Should the Leader notify the Follower to install the snapshot through * its own State Machine. * @return the first available log's start term index */ private TermIndex shouldNotifyToInstallSnapshot() { final FollowerInfo follower = getFollower(); final long leaderNextIndex = getRaftLog().getNextIndex(); final boolean isFollowerBootstrapping = getLeaderState().isFollowerBootstrapping(follower); final long leaderStartIndex = getRaftLog().getStartIndex(); final TermIndex firstAvailable = Optional.ofNullable(getRaftLog().getTermIndex(leaderStartIndex)) .orElseGet(() -> TermIndex.valueOf(getServer().getInfo().getCurrentTerm(), leaderNextIndex)); if (isFollowerBootstrapping && !follower.hasAttemptedToInstallSnapshot()) { // If the follower is bootstrapping and has not yet installed any snapshot from leader, then the follower should // be notified to install a snapshot. Every follower should try to install at least one snapshot during // bootstrapping, if available. LOG.debug("{}: follower is bootstrapping, notify to install snapshot to {}.", this, firstAvailable); return firstAvailable; } final long followerNextIndex = follower.getNextIndex(); if (followerNextIndex >= leaderNextIndex) { return null; } if (followerNextIndex < leaderStartIndex) { // The Leader does not have the logs from the Follower's last log // index onwards. And install snapshot is disabled. So the Follower // should be notified to install the latest snapshot through its // State Machine. return firstAvailable; } else if (leaderStartIndex == RaftLog.INVALID_LOG_INDEX) { // Leader has no logs to check from, hence return next index. return firstAvailable; } return null; } static class AppendEntriesRequest { private final Timekeeper timer; @SuppressWarnings({"squid:S3077"}) // Suppress volatile for generic type private volatile Timekeeper.Context timerContext; private final long callId; private final TermIndex previousLog; private final int entriesCount; private final TermIndex firstEntry; private final TermIndex lastEntry; @SuppressWarnings({"squid:S3077"}) // Suppress volatile for generic type private volatile Timestamp sendTime; AppendEntriesRequest(AppendEntriesRequestProto proto, RaftPeerId followerId, GrpcServerMetrics grpcServerMetrics) { this.callId = proto.getServerRequest().getCallId(); this.previousLog = proto.hasPreviousLog()? TermIndex.valueOf(proto.getPreviousLog()): null; this.entriesCount = proto.getEntriesCount(); this.firstEntry = entriesCount > 0? TermIndex.valueOf(proto.getEntries(0)): null; this.lastEntry = entriesCount > 0? TermIndex.valueOf(proto.getEntries(entriesCount - 1)): null; this.timer = grpcServerMetrics.getGrpcLogAppenderLatencyTimer(followerId.toString(), isHeartbeat()); grpcServerMetrics.onRequestCreate(isHeartbeat()); } long getCallId() { return callId; } TermIndex getPreviousLog() { return previousLog; } long getFirstIndex() { return Optional.ofNullable(firstEntry).map(TermIndex::getIndex).orElse(RaftLog.INVALID_LOG_INDEX); } Timestamp getSendTime() { return sendTime; } void startRequestTimer() { timerContext = timer.time(); sendTime = Timestamp.currentTime(); } void stopRequestTimer() { timerContext.stop(); } boolean isHeartbeat() { return entriesCount == 0; } @Override public String toString() { final String entries = entriesCount == 0? "" : entriesCount == 1? ",entry=" + firstEntry : ",entries=" + firstEntry + "..." + lastEntry; return JavaUtils.getClassSimpleName(getClass()) + ":cid=" + callId + ",entriesCount=" + entriesCount + entries; } } static class RequestMap { private final Map<Long, AppendEntriesRequest> logRequests = new ConcurrentHashMap<>(); private final Map<Long, AppendEntriesRequest> heartbeats = new ConcurrentHashMap<>(); int logRequestsSize() { return logRequests.size(); } void clear() { logRequests.clear(); heartbeats.clear(); } void put(AppendEntriesRequest request) { if (request.isHeartbeat()) { heartbeats.put(request.getCallId(), request); } else { logRequests.put(request.getCallId(), request); } } AppendEntriesRequest remove(AppendEntriesReplyProto reply) { return remove(reply.getServerReply().getCallId(), reply.getIsHearbeat()); } AppendEntriesRequest remove(long cid, boolean isHeartbeat) { return isHeartbeat ? heartbeats.remove(cid): logRequests.remove(cid); } } }
googleapis/google-cloud-java
37,299
java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/UpdateServingConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1beta/serving_config_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1beta; /** * * * <pre> * Request for UpdateServingConfig method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest} */ public final class UpdateServingConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) UpdateServingConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateServingConfigRequest.newBuilder() to construct. private UpdateServingConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateServingConfigRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateServingConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ServingConfigServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateServingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ServingConfigServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateServingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest.Builder.class); } private int bitField0_; public static final int SERVING_CONFIG_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1beta.ServingConfig servingConfig_; /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the servingConfig field is set. */ @java.lang.Override public boolean hasServingConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The servingConfig. */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ServingConfig getServingConfig() { return servingConfig_ == null ? com.google.cloud.discoveryengine.v1beta.ServingConfig.getDefaultInstance() : servingConfig_; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1beta.ServingConfigOrBuilder getServingConfigOrBuilder() { return servingConfig_ == null ? com.google.cloud.discoveryengine.v1beta.ServingConfig.getDefaultInstance() : servingConfig_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getServingConfig()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getServingConfig()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest other = (com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) obj; if (hasServingConfig() != other.hasServingConfig()) return false; if (hasServingConfig()) { if (!getServingConfig().equals(other.getServingConfig())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasServingConfig()) { hash = (37 * hash) + SERVING_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getServingConfig().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateServingConfig method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1beta.ServingConfigServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateServingConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1beta.ServingConfigServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateServingConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest.class, com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getServingConfigFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; servingConfig_ = null; if (servingConfigBuilder_ != null) { servingConfigBuilder_.dispose(); servingConfigBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1beta.ServingConfigServiceProto .internal_static_google_cloud_discoveryengine_v1beta_UpdateServingConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest build() { com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest buildPartial() { com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest result = new com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.servingConfig_ = servingConfigBuilder_ == null ? servingConfig_ : servingConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) { return mergeFrom( (com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest other) { if (other == com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest .getDefaultInstance()) return this; if (other.hasServingConfig()) { mergeServingConfig(other.getServingConfig()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getServingConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1beta.ServingConfig servingConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.ServingConfig, com.google.cloud.discoveryengine.v1beta.ServingConfig.Builder, com.google.cloud.discoveryengine.v1beta.ServingConfigOrBuilder> servingConfigBuilder_; /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the servingConfig field is set. */ public boolean hasServingConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The servingConfig. */ public com.google.cloud.discoveryengine.v1beta.ServingConfig getServingConfig() { if (servingConfigBuilder_ == null) { return servingConfig_ == null ? com.google.cloud.discoveryengine.v1beta.ServingConfig.getDefaultInstance() : servingConfig_; } else { return servingConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServingConfig(com.google.cloud.discoveryengine.v1beta.ServingConfig value) { if (servingConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } servingConfig_ = value; } else { servingConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServingConfig( com.google.cloud.discoveryengine.v1beta.ServingConfig.Builder builderForValue) { if (servingConfigBuilder_ == null) { servingConfig_ = builderForValue.build(); } else { servingConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeServingConfig(com.google.cloud.discoveryengine.v1beta.ServingConfig value) { if (servingConfigBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && servingConfig_ != null && servingConfig_ != com.google.cloud.discoveryengine.v1beta.ServingConfig.getDefaultInstance()) { getServingConfigBuilder().mergeFrom(value); } else { servingConfig_ = value; } } else { servingConfigBuilder_.mergeFrom(value); } if (servingConfig_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearServingConfig() { bitField0_ = (bitField0_ & ~0x00000001); servingConfig_ = null; if (servingConfigBuilder_ != null) { servingConfigBuilder_.dispose(); servingConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.ServingConfig.Builder getServingConfigBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServingConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1beta.ServingConfigOrBuilder getServingConfigOrBuilder() { if (servingConfigBuilder_ != null) { return servingConfigBuilder_.getMessageOrBuilder(); } else { return servingConfig_ == null ? com.google.cloud.discoveryengine.v1beta.ServingConfig.getDefaultInstance() : servingConfig_; } } /** * * * <pre> * Required. The ServingConfig to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1beta.ServingConfig serving_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.ServingConfig, com.google.cloud.discoveryengine.v1beta.ServingConfig.Builder, com.google.cloud.discoveryengine.v1beta.ServingConfigOrBuilder> getServingConfigFieldBuilder() { if (servingConfigBuilder_ == null) { servingConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1beta.ServingConfig, com.google.cloud.discoveryengine.v1beta.ServingConfig.Builder, com.google.cloud.discoveryengine.v1beta.ServingConfigOrBuilder>( getServingConfig(), getParentForChildren(), isClean()); servingConfig_ = null; } return servingConfigBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [ServingConfig][google.cloud.discoveryengine.v1beta.ServingConfig] to * update. The following are NOT supported: * * * [ServingConfig.name][google.cloud.discoveryengine.v1beta.ServingConfig.name] * * If not set, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest) private static final com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest(); } public static com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateServingConfigRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateServingConfigRequest>() { @java.lang.Override public UpdateServingConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateServingConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateServingConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1beta.UpdateServingConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/guava
37,782
android/guava-tests/test/com/google/common/math/StatsAccumulatorTest.java
/* * Copyright (C) 2012 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.math; import static com.google.common.math.StatsTesting.ALLOWED_ERROR; import static com.google.common.math.StatsTesting.ALL_MANY_VALUES; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.MANY_VALUES; import static com.google.common.math.StatsTesting.MANY_VALUES_COUNT; import static com.google.common.math.StatsTesting.MANY_VALUES_MAX; import static com.google.common.math.StatsTesting.MANY_VALUES_MEAN; import static com.google.common.math.StatsTesting.MANY_VALUES_MIN; import static com.google.common.math.StatsTesting.MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.MEGA_STREAM_COUNT; import static com.google.common.math.StatsTesting.MEGA_STREAM_MAX; import static com.google.common.math.StatsTesting.MEGA_STREAM_MEAN; import static com.google.common.math.StatsTesting.MEGA_STREAM_MIN; import static com.google.common.math.StatsTesting.MEGA_STREAM_POPULATION_VARIANCE; import static com.google.common.math.StatsTesting.ONE_VALUE; import static com.google.common.math.StatsTesting.OTHER_ONE_VALUE; import static com.google.common.math.StatsTesting.TWO_VALUES; import static com.google.common.math.StatsTesting.TWO_VALUES_MAX; import static com.google.common.math.StatsTesting.TWO_VALUES_MEAN; import static com.google.common.math.StatsTesting.TWO_VALUES_MIN; import static com.google.common.math.StatsTesting.TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStream; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStreamPart1; import static com.google.common.math.StatsTesting.megaPrimitiveDoubleStreamPart2; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static java.lang.Math.sqrt; import static java.util.stream.DoubleStream.concat; import static org.junit.Assert.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.math.StatsTesting.ManyValues; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; import junit.framework.TestCase; import org.jspecify.annotations.NullUnmarked; /** * Tests for {@link StatsAccumulator}. This tests the stats methods for instances built with {@link * StatsAccumulator#add} and {@link StatsAccumulator#addAll}, and various error cases of the {@link * StatsAccumulator#add} and {@link StatsAccumulator#addAll} methods. For tests of the {@link * StatsAccumulator#snapshot} method which returns {@link Stats} instances, see {@link StatsTest}. * * @author Pete Gillin */ @NullUnmarked public class StatsAccumulatorTest extends TestCase { private StatsAccumulator emptyAccumulator; private StatsAccumulator emptyAccumulatorByAddAllEmptyIterable; private StatsAccumulator emptyAccumulatorByAddAllEmptyStats; private StatsAccumulator oneValueAccumulator; private StatsAccumulator oneValueAccumulatorByAddAllEmptyStats; private StatsAccumulator twoValuesAccumulator; private StatsAccumulator twoValuesAccumulatorByAddAllStats; private StatsAccumulator manyValuesAccumulatorByAddAllIterable; private StatsAccumulator manyValuesAccumulatorByAddAllIterator; private StatsAccumulator manyValuesAccumulatorByAddAllVarargs; private StatsAccumulator manyValuesAccumulatorByRepeatedAdd; private StatsAccumulator manyValuesAccumulatorByAddAndAddAll; private StatsAccumulator manyValuesAccumulatorByAddAllStats; private StatsAccumulator manyValuesAccumulatorByAddAllStatsAccumulator; private StatsAccumulator integerManyValuesAccumulatorByAddAllIterable; private StatsAccumulator longManyValuesAccumulatorByAddAllIterator; private StatsAccumulator longManyValuesAccumulatorByAddAllVarargs; @Override protected void setUp() throws Exception { super.setUp(); emptyAccumulator = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyIterable = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyIterable.addAll(ImmutableList.<Double>of()); emptyAccumulatorByAddAllEmptyStats = new StatsAccumulator(); emptyAccumulatorByAddAllEmptyStats.addAll(Stats.of()); oneValueAccumulator = new StatsAccumulator(); oneValueAccumulator.add(ONE_VALUE); oneValueAccumulatorByAddAllEmptyStats = new StatsAccumulator(); oneValueAccumulatorByAddAllEmptyStats.add(ONE_VALUE); oneValueAccumulatorByAddAllEmptyStats.addAll(Stats.of()); twoValuesAccumulator = new StatsAccumulator(); twoValuesAccumulator.addAll(TWO_VALUES); twoValuesAccumulatorByAddAllStats = new StatsAccumulator(); twoValuesAccumulatorByAddAllStats.addAll(Stats.of(ONE_VALUE)); twoValuesAccumulatorByAddAllStats.addAll(Stats.of(OTHER_ONE_VALUE)); manyValuesAccumulatorByAddAllIterable = new StatsAccumulator(); manyValuesAccumulatorByAddAllIterable.addAll(MANY_VALUES); manyValuesAccumulatorByAddAllIterator = new StatsAccumulator(); manyValuesAccumulatorByAddAllIterator.addAll(MANY_VALUES.iterator()); manyValuesAccumulatorByAddAllVarargs = new StatsAccumulator(); manyValuesAccumulatorByAddAllVarargs.addAll(Doubles.toArray(MANY_VALUES)); manyValuesAccumulatorByRepeatedAdd = new StatsAccumulator(); for (double value : MANY_VALUES) { manyValuesAccumulatorByRepeatedAdd.add(value); } manyValuesAccumulatorByAddAndAddAll = new StatsAccumulator(); manyValuesAccumulatorByAddAndAddAll.add(MANY_VALUES.get(0)); manyValuesAccumulatorByAddAndAddAll.addAll(MANY_VALUES.subList(1, MANY_VALUES.size())); manyValuesAccumulatorByAddAllStats = new StatsAccumulator(); manyValuesAccumulatorByAddAllStats.addAll( Stats.of(MANY_VALUES.subList(0, MANY_VALUES.size() / 2))); manyValuesAccumulatorByAddAllStats.addAll( Stats.of(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size()))); manyValuesAccumulatorByAddAllStatsAccumulator = new StatsAccumulator(); manyValuesAccumulatorByAddAllStatsAccumulator.addAll( statsAccumulatorOf(MANY_VALUES.subList(0, MANY_VALUES.size() / 2))); manyValuesAccumulatorByAddAllStatsAccumulator.addAll( statsAccumulatorOf(MANY_VALUES.subList(MANY_VALUES.size() / 2, MANY_VALUES.size()))); integerManyValuesAccumulatorByAddAllIterable = new StatsAccumulator(); integerManyValuesAccumulatorByAddAllIterable.addAll(INTEGER_MANY_VALUES); longManyValuesAccumulatorByAddAllIterator = new StatsAccumulator(); longManyValuesAccumulatorByAddAllIterator.addAll(LONG_MANY_VALUES.iterator()); longManyValuesAccumulatorByAddAllVarargs = new StatsAccumulator(); longManyValuesAccumulatorByAddAllVarargs.addAll(Longs.toArray(LONG_MANY_VALUES)); } private static StatsAccumulator statsAccumulatorOf(Iterable<? extends Number> values) { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(values); return accumulator; } public void testCount() { assertThat(emptyAccumulator.count()).isEqualTo(0); assertThat(emptyAccumulatorByAddAllEmptyIterable.count()).isEqualTo(0); assertThat(emptyAccumulatorByAddAllEmptyStats.count()).isEqualTo(0); assertThat(oneValueAccumulator.count()).isEqualTo(1); assertThat(oneValueAccumulatorByAddAllEmptyStats.count()).isEqualTo(1); assertThat(twoValuesAccumulator.count()).isEqualTo(2); assertThat(twoValuesAccumulatorByAddAllStats.count()).isEqualTo(2); assertThat(manyValuesAccumulatorByAddAllIterable.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.count()).isEqualTo(MANY_VALUES_COUNT); assertThat(integerManyValuesAccumulatorByAddAllIterable.count()) .isEqualTo(StatsTesting.INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.count()) .isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.count()) .isEqualTo(StatsTesting.LONG_MANY_VALUES_COUNT); } public void testCountOverflow_doesNotThrow() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.add(ONE_VALUE); for (int power = 1; power < Long.SIZE - 1; power++) { accumulator.addAll(accumulator.snapshot()); } // Should overflow without throwing. accumulator.addAll(accumulator.snapshot()); assertThat(accumulator.count()).isLessThan(0L); } public void testMean() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.mean()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.mean()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.mean()); assertThat(oneValueAccumulator.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.mean()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(twoValuesAccumulator.mean()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN); assertThat(twoValuesAccumulatorByAddAllStats.mean()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllIterable.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllIterator.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllVarargs.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByRepeatedAdd.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAndAddAll.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllStats.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.mean()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); // For datasets of many double values created from an iterable, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asIterable()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double mean = accumulator.mean(); double meanByAddAllStats = accumulatorByAddAllStats.mean(); if (values.hasAnyNaN()) { assertWithMessage("mean of " + values).that(mean).isNaN(); assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity() && values.hasAnyNegativeInfinity()) { assertWithMessage("mean of " + values).that(mean).isNaN(); assertWithMessage("mean by addAll(Stats) of " + values).that(meanByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertWithMessage("mean of " + values).that(mean).isPositiveInfinity(); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isPositiveInfinity(); } else if (values.hasAnyNegativeInfinity()) { assertWithMessage("mean of " + values).that(mean).isNegativeInfinity(); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isNegativeInfinity(); } else { assertWithMessage("mean of " + values) .that(mean) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); assertWithMessage("mean by addAll(Stats) of " + values) .that(meanByAddAllStats) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.mean()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN) .of(INTEGER_MANY_VALUES_MEAN); assertThat(longManyValuesAccumulatorByAddAllIterator.mean()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN); assertThat(longManyValuesAccumulatorByAddAllVarargs.mean()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN); } public void testSum() { assertThat(emptyAccumulator.sum()).isEqualTo(0.0); assertThat(emptyAccumulatorByAddAllEmptyIterable.sum()).isEqualTo(0.0); assertThat(emptyAccumulatorByAddAllEmptyStats.sum()).isEqualTo(0.0); assertThat(oneValueAccumulator.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.sum()).isWithin(ALLOWED_ERROR).of(ONE_VALUE); assertThat(twoValuesAccumulator.sum()).isWithin(ALLOWED_ERROR).of(TWO_VALUES_MEAN * 2); assertThat(twoValuesAccumulatorByAddAllStats.sum()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_MEAN * 2); assertThat(manyValuesAccumulatorByAddAllIterable.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sum()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_MEAN * MANY_VALUES_COUNT); assertThat(integerManyValuesAccumulatorByAddAllIterable.sum()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_MEAN) .of(INTEGER_MANY_VALUES_MEAN * INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.sum()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.sum()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_MEAN) .of(LONG_MANY_VALUES_MEAN * LONG_MANY_VALUES_COUNT); } public void testPopulationVariance() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.populationVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.populationVariance()); assertThat(oneValueAccumulator.populationVariance()).isEqualTo(0.0); assertThat(oneValueAccumulatorByAddAllEmptyStats.populationVariance()).isEqualTo(0.0); assertThat(twoValuesAccumulator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2); assertThat(twoValuesAccumulatorByAddAllStats.populationVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2); assertThat(manyValuesAccumulatorByAddAllIterable.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllIterator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllVarargs.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByRepeatedAdd.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAndAddAll.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStats.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); // For datasets of many double values created from an iterator, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asIterable().iterator()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double populationVariance = accumulator.populationVariance(); double populationVarianceByAddAllStats = accumulatorByAddAllStats.populationVariance(); if (values.hasAnyNonFinite()) { assertWithMessage("population variance of " + values).that(populationVariance).isNaN(); assertWithMessage("population variance by addAll(Stats) of " + values) .that(populationVarianceByAddAllStats) .isNaN(); } else { assertWithMessage("population variance of " + values) .that(populationVariance) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); assertWithMessage("population variance by addAll(Stats) of " + values) .that(populationVarianceByAddAllStats) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.populationVariance()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllIterator.populationVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT); assertThat(longManyValuesAccumulatorByAddAllVarargs.populationVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT); } public void testPopulationStandardDeviation() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.populationStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.populationStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.populationStandardDeviation()); assertThat(oneValueAccumulator.populationStandardDeviation()).isEqualTo(0.0); assertThat(oneValueAccumulatorByAddAllEmptyStats.populationStandardDeviation()).isEqualTo(0.0); assertThat(twoValuesAccumulator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2)); assertThat(twoValuesAccumulatorByAddAllStats.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS / 2)); assertThat(manyValuesAccumulatorByAddAllIterable.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllIterator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllVarargs.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByRepeatedAdd.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAndAddAll.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllStats.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / MANY_VALUES_COUNT)); assertThat(integerManyValuesAccumulatorByAddAllIterable.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / INTEGER_MANY_VALUES_COUNT)); assertThat(longManyValuesAccumulatorByAddAllIterator.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT)); assertThat(longManyValuesAccumulatorByAddAllVarargs.populationStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / LONG_MANY_VALUES_COUNT)); } public void testSampleVariance() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.sampleVariance()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.sampleVariance()); assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleVariance()); assertThrows( IllegalStateException.class, () -> oneValueAccumulatorByAddAllEmptyStats.sampleVariance()); assertThat(twoValuesAccumulator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS); assertThat(twoValuesAccumulatorByAddAllStats.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS); assertThat(manyValuesAccumulatorByAddAllIterable.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllIterator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllVarargs.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByRepeatedAdd.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAndAddAll.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllStats.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleVariance()) .isWithin(ALLOWED_ERROR) .of(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1)); assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleVariance()) .isWithin(ALLOWED_ERROR * INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1)); assertThat(longManyValuesAccumulatorByAddAllIterator.sampleVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)); assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleVariance()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1)); } public void testSampleStandardDeviation() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.sampleStandardDeviation()); assertThrows(IllegalStateException.class, () -> oneValueAccumulator.sampleStandardDeviation()); assertThrows( IllegalStateException.class, () -> oneValueAccumulatorByAddAllEmptyStats.sampleStandardDeviation()); assertThat(twoValuesAccumulator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS)); assertThat(twoValuesAccumulatorByAddAllStats.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(TWO_VALUES_SUM_OF_SQUARES_OF_DELTAS)); assertThat(manyValuesAccumulatorByAddAllIterable.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllIterator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByRepeatedAdd.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAndAddAll.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllStats.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR) .of(sqrt(MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (MANY_VALUES_COUNT - 1))); assertThat(integerManyValuesAccumulatorByAddAllIterable.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS)) .of(sqrt(INTEGER_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (INTEGER_MANY_VALUES_COUNT - 1))); assertThat(longManyValuesAccumulatorByAddAllIterator.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1))); assertThat(longManyValuesAccumulatorByAddAllVarargs.sampleStandardDeviation()) .isWithin(ALLOWED_ERROR * LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS) .of(sqrt(LONG_MANY_VALUES_SUM_OF_SQUARES_OF_DELTAS / (LONG_MANY_VALUES_COUNT - 1))); } public void testMax() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.max()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.max()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.max()); assertThat(oneValueAccumulator.max()).isEqualTo(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.max()).isEqualTo(ONE_VALUE); assertThat(twoValuesAccumulator.max()).isEqualTo(TWO_VALUES_MAX); assertThat(twoValuesAccumulatorByAddAllStats.max()).isEqualTo(TWO_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllIterable.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllIterator.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByRepeatedAdd.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAndAddAll.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllStats.max()).isEqualTo(MANY_VALUES_MAX); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.max()).isEqualTo(MANY_VALUES_MAX); // For datasets of many double values created from an array, we test many combinations of // finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); accumulator.addAll(values.asArray()); for (double value : values.asIterable()) { accumulatorByAddAllStats.addAll(Stats.of(value)); } double max = accumulator.max(); double maxByAddAllStats = accumulatorByAddAllStats.max(); if (values.hasAnyNaN()) { assertWithMessage("max of " + values).that(max).isNaN(); assertWithMessage("max by addAll(Stats) of " + values).that(maxByAddAllStats).isNaN(); } else if (values.hasAnyPositiveInfinity()) { assertWithMessage("max of " + values).that(max).isPositiveInfinity(); assertWithMessage("max by addAll(Stats) of " + values) .that(maxByAddAllStats) .isPositiveInfinity(); } else { assertWithMessage("max of " + values).that(max).isEqualTo(MANY_VALUES_MAX); assertWithMessage("max by addAll(Stats) of " + values) .that(maxByAddAllStats) .isEqualTo(MANY_VALUES_MAX); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.max()) .isEqualTo(INTEGER_MANY_VALUES_MAX); assertThat(longManyValuesAccumulatorByAddAllIterator.max()).isEqualTo(LONG_MANY_VALUES_MAX); assertThat(longManyValuesAccumulatorByAddAllVarargs.max()).isEqualTo(LONG_MANY_VALUES_MAX); } public void testMin() { assertThrows(IllegalStateException.class, () -> emptyAccumulator.min()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyIterable.min()); assertThrows(IllegalStateException.class, () -> emptyAccumulatorByAddAllEmptyStats.min()); assertThat(oneValueAccumulator.min()).isEqualTo(ONE_VALUE); assertThat(oneValueAccumulatorByAddAllEmptyStats.min()).isEqualTo(ONE_VALUE); assertThat(twoValuesAccumulator.min()).isEqualTo(TWO_VALUES_MIN); assertThat(twoValuesAccumulatorByAddAllStats.min()).isEqualTo(TWO_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllIterable.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllIterator.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByRepeatedAdd.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAndAddAll.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllStats.min()).isEqualTo(MANY_VALUES_MIN); assertThat(manyValuesAccumulatorByAddAllStatsAccumulator.min()).isEqualTo(MANY_VALUES_MIN); // For datasets of many double values created by adding elements individually, we test many // combinations of finite and non-finite values: for (ManyValues values : ALL_MANY_VALUES) { StatsAccumulator accumulator = new StatsAccumulator(); StatsAccumulator accumulatorByAddAllStats = new StatsAccumulator(); for (double value : values.asIterable()) { accumulator.add(value); accumulatorByAddAllStats.addAll(Stats.of(value)); } double min = accumulator.min(); double minByAddAllStats = accumulatorByAddAllStats.min(); if (values.hasAnyNaN()) { assertWithMessage("min of " + values).that(min).isNaN(); assertWithMessage("min by addAll(Stats) of " + values).that(minByAddAllStats).isNaN(); } else if (values.hasAnyNegativeInfinity()) { assertWithMessage("min of " + values).that(min).isNegativeInfinity(); assertWithMessage("min by addAll(Stats) of " + values) .that(minByAddAllStats) .isNegativeInfinity(); } else { assertWithMessage("min of " + values).that(min).isEqualTo(MANY_VALUES_MIN); assertWithMessage("min by addAll(Stats) of " + values) .that(minByAddAllStats) .isEqualTo(MANY_VALUES_MIN); } } assertThat(integerManyValuesAccumulatorByAddAllIterable.min()) .isEqualTo(INTEGER_MANY_VALUES_MIN); assertThat(longManyValuesAccumulatorByAddAllIterator.min()).isEqualTo(LONG_MANY_VALUES_MIN); assertThat(longManyValuesAccumulatorByAddAllVarargs.min()).isEqualTo(LONG_MANY_VALUES_MIN); } public void testVerifyMegaStreamHalves() { assertThat( concat(megaPrimitiveDoubleStreamPart1(), megaPrimitiveDoubleStreamPart2()) .sorted() .toArray()) .isEqualTo(megaPrimitiveDoubleStream().toArray()); } public void testAddAllPrimitiveDoubleStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1()); accumulator.addAll(megaPrimitiveDoubleStreamPart2()); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } public void testAddAllPrimitiveIntStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToInt(x -> (int) x)); accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToInt(x -> (int) x)); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } public void testAddAllPrimitiveLongStream() { StatsAccumulator accumulator = new StatsAccumulator(); accumulator.addAll(megaPrimitiveDoubleStreamPart1().mapToLong(x -> (long) x)); accumulator.addAll(megaPrimitiveDoubleStreamPart2().mapToLong(x -> (long) x)); assertThat(accumulator.count()).isEqualTo(MEGA_STREAM_COUNT); assertThat(accumulator.mean()).isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT).of(MEGA_STREAM_MEAN); assertThat(accumulator.populationVariance()) .isWithin(ALLOWED_ERROR * MEGA_STREAM_COUNT) .of(MEGA_STREAM_POPULATION_VARIANCE); assertThat(accumulator.min()).isEqualTo(MEGA_STREAM_MIN); assertThat(accumulator.max()).isEqualTo(MEGA_STREAM_MAX); } }
apache/jackrabbit-filevault
37,663
vault-validation/src/main/java/org/apache/jackrabbit/vault/validation/spi/impl/nodetype/JcrNodeTypeMetaDataImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.vault.validation.spi.impl.nodetype; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import javax.jcr.NamespaceException; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.NoSuchNodeTypeException; import org.apache.jackrabbit.jcr2spi.nodetype.EffectiveNodeType; import org.apache.jackrabbit.jcr2spi.nodetype.EffectiveNodeTypeProvider; import org.apache.jackrabbit.jcr2spi.nodetype.ItemDefinitionProvider; import org.apache.jackrabbit.jcr2spi.nodetype.NodeTypeDefinitionProvider; import org.apache.jackrabbit.spi.Name; import org.apache.jackrabbit.spi.Path; import org.apache.jackrabbit.spi.Path.Element; import org.apache.jackrabbit.spi.QNodeDefinition; import org.apache.jackrabbit.spi.QNodeTypeDefinition; import org.apache.jackrabbit.spi.QPropertyDefinition; import org.apache.jackrabbit.spi.QValue; import org.apache.jackrabbit.spi.QValueFactory; import org.apache.jackrabbit.spi.commons.conversion.IllegalNameException; import org.apache.jackrabbit.spi.commons.conversion.MalformedPathException; import org.apache.jackrabbit.spi.commons.conversion.NamePathResolver; import org.apache.jackrabbit.spi.commons.conversion.NameResolver; import org.apache.jackrabbit.spi.commons.name.NameConstants; import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl; import org.apache.jackrabbit.spi.commons.name.PathBuilder; import org.apache.jackrabbit.spi.commons.name.PathFactoryImpl; import org.apache.jackrabbit.spi.commons.nodetype.constraint.ValueConstraint; import org.apache.jackrabbit.spi.commons.value.QValueFactoryImpl; import org.apache.jackrabbit.spi.commons.value.ValueFormat; import org.apache.jackrabbit.util.Text; import org.apache.jackrabbit.vault.fs.api.WorkspaceFilter; import org.apache.jackrabbit.vault.validation.spi.NodeContext; import org.apache.jackrabbit.vault.validation.spi.ValidationMessage; import org.apache.jackrabbit.vault.validation.spi.ValidationMessageSeverity; import org.apache.jackrabbit.vault.validation.spi.util.NodeContextImpl; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** This class encapsulates node type related data of a node. It uses expanded names/paths internally. */ public class JcrNodeTypeMetaDataImpl implements JcrNodeTypeMetaData { static final String EXCEPTION_MESSAGE_INVALID_NAME = "Invalid %s '%s': %s"; static final String CONSTRAINT_PROPERTY_VALUE = "Value constraint violation: %s"; static final String CONSTRAINT_PROPERTY_PROTECTED = "Property is protected!"; static final String CONSTRAINT_PROPERTY_AUTO_CREATED = "Property is auto-created and can not be manually added"; static final String CONSTRAINT_PROPERTY_NOT_ALLOWED = "No applicable property definition found for name and type!"; static final String CONSTRAINT_CHILD_NODE_AUTO_CREATED = "Node is auto-created and can not be manually added"; static final String CONSTRAINT_CHILD_NODE_PROTECTED = "Node is protected and can not be manually added"; static final String CONSTRAINT_MIXIN_TYPE_AS_PRIMARY_TYPE = "Given node type is a mixin and cannot be used as primary node type."; static final String CONSTRAINT_ABSTRACT_TYPE_AS_PRIMARY_TYPE = "Given node type is abstract and cannot be used as primary node type."; static final String CONSTRAINT_CHILD_NODE_NOT_ALLOWED = "Node type does not allow arbitrary child nodes and does not allow this specific name and node type either!"; static final String MESSAGE_CHILD_NODE_NOT_ALLOWED = "Node '%s [%s]' is not allowed as child of node with %s: %s"; static final String MESSAGE_PROPERTY_NOT_ALLOWED = "Property '%s' [%s] is not allowed in node with %s: %s"; static final String MESSAGE_MANDATORY_CHILD_NODE_MISSING = "Mandatory child node missing: %s inside node with %s"; static final String MESSAGE_MANDATORY_UNCONTAINED_CHILD_NODE_MISSING = "Mandatory child node missing: %s inside node with types [%s] (outside of filter rules)"; static final String MESSAGE_MANDATORY_PROPERTY_MISSING = "Mandatory property '%s' missing in node with %s"; static final String MESSAGE_MANDATORY_PROPERTY_WITH_WRONG_TYPE = "Mandatory property '%s' has type '%s' while it should have '%s' in node with %s"; // do not validate protected JCR system properties that are handled by FileVault specially in https://github.com/apache/jackrabbit-filevault/blob/f785fcb24d4cbd01c734e9273310a925c29ae15b/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/io/DocViewSAXImporter.java#L123 and // https://github.com/apache/jackrabbit-filevault/blob/f785fcb24d4cbd01c734e9273310a925c29ae15b/vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/io/DocViewSAXImporter.java#L140 private static final Collection<Name> JCR_SYSTEM_PROPERTIES = Arrays.asList( NameConstants.JCR_PRIMARYTYPE, NameConstants.JCR_MIXINTYPES, NameConstants.JCR_UUID, NameConstants.JCR_BASEVERSION, NameConstants.JCR_PREDECESSORS, NameConstants.JCR_SUCCESSORS, NameConstants.JCR_VERSIONHISTORY, NameConstants.JCR_ISCHECKEDOUT, NameFactoryImpl.getInstance().create("http://jackrabbit.apache.org/oak/ns/1.0", "counter")); private static final Name NT_REP_POLICY = NameFactoryImpl.getInstance().create(Name.NS_REP_URI, "Policy"); private static final Name NT_REP_AUTHORIZABLE = NameFactoryImpl.getInstance().create(Name.NS_REP_URI, "Authorizable"); private static final QValueFactory QVALUE_FACTORY = QValueFactoryImpl.getInstance(); private final @NotNull Name name; private final @NotNull NodeContext context; private @Nullable Name primaryNodeType; // the effectiveNodeType does not remember which one was the primary one! private @Nullable EffectiveNodeType effectiveNodeType; private final @NotNull Map<Name, Integer> propertyTypesByName; private final @NotNull Map<Name, JcrNodeTypeMetaDataImpl> childNodesByName; private final @Nullable JcrNodeTypeMetaDataImpl parentNode; private boolean isAuthenticationOrAuthorizationContext; private final boolean isImplicit; // if this is true, the node type is set implicitly (not explicitly set in package, used as is in the // repository) private boolean isValidationDone; private final boolean isIncremental; private JcrNodeTypeMetaDataImpl(boolean isIncremental, @NotNull NodeContext context, @NotNull Name name, @Nullable Name primaryNodeType, @Nullable EffectiveNodeType effectiveNodeType, JcrNodeTypeMetaDataImpl parentNode, boolean isAuthenticationOrAuthorizationContext, boolean isImplicit) { super(); this.context = context; this.name = name; // fully namespaced (taking into account local namespace declaration for Docview XML) this.primaryNodeType = primaryNodeType; this.effectiveNodeType = effectiveNodeType; this.parentNode = parentNode; this.propertyTypesByName = new HashMap<>(); this.childNodesByName = new HashMap<>(); this.isAuthenticationOrAuthorizationContext = isAuthenticationOrAuthorizationContext; this.isImplicit = isImplicit; this.isValidationDone = false; this.isIncremental = isIncremental; } @Override public String toString() { return "JcrNodeTypeMetaDataImpl [" + "name=" + name + ", " + "effectiveNodeType=" + effectiveNodeType + ", " + "propertyTypesByName=" + propertyTypesByName + ", " + "childNodes=" + childNodesByName.keySet() + ", " // + "parentNode path="+(parentNode != null ? + parentNode.getPath() + ", " : "") + "isAuthenticationOrAuthorizationContext=" + isAuthenticationOrAuthorizationContext + "]"; } @Override public void setUnknownNodeTypes() { this.primaryNodeType = null; this.effectiveNodeType = null; } @Override public void setNodeTypes(@NotNull NameResolver nameResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, boolean isFallbackPrimaryType, @NotNull String primaryType, String... mixinTypes) throws IllegalNameException, ConstraintViolationException, NoSuchNodeTypeException, NamespaceException { List<Name> types = getTypes(nameResolver, primaryType, mixinTypes); if (effectiveNodeType == null || (!isFallbackPrimaryType && !effectiveNodeType.includesNodeTypes(types.toArray(new Name[0])))) { // only override if not a default node type this.primaryNodeType = types.get(0); this.effectiveNodeType = effectiveNodeTypeProvider.getEffectiveNodeType(types.toArray(new Name[0])); if (!isAuthenticationOrAuthorizationContext) { isAuthenticationOrAuthorizationContext = isAclOrAuthorizableNodeType(effectiveNodeType); } } } @Override public Name getPrimaryNodeType() { return primaryNodeType; } private static boolean isAclOrAuthorizableNodeType(EffectiveNodeType effectiveNodeType) { return effectiveNodeType.includesNodeType(NT_REP_AUTHORIZABLE) || effectiveNodeType.includesNodeType(NT_REP_POLICY); } private enum NameType { NODE_NAME("node name"), PRIMARY_TYPE("primary type"), MIXIN_TYPE("mixin type"); private final String label; NameType(String label) { this.label = label; } public String getLabel() { return label; } } private static @NotNull Name getQName(@NotNull NameResolver nameResolver, @NotNull String name, @NotNull NameType type) throws IllegalNameException, NamespaceException { try { Name qName = nameResolver.getQName(name); // was it a namespace which has been generated on demand before? if (type != NameType.NODE_NAME && qName.getNamespaceURI().startsWith(OnDemandRegisterNamespaceResolverWrapper.UNDECLARED_NAMESPACE_URI_PREFIX)) { int posColon = name.indexOf(':'); // extract prefix String prefix = name.substring(0, posColon); throw new NamespaceException(prefix + ": is not a registered namespace prefix."); } return qName; } catch (NamespaceException e) { if (type == NameType.NODE_NAME) { throw new NamespaceExceptionInNodeName( String.format(Locale.ENGLISH, EXCEPTION_MESSAGE_INVALID_NAME, type.getLabel(), name, e.getLocalizedMessage()), e); } throw new NamespaceException(String.format(Locale.ENGLISH, EXCEPTION_MESSAGE_INVALID_NAME, type.getLabel(), name, e.getLocalizedMessage()), e); } catch (IllegalNameException e) { throw new IllegalNameException(String.format(Locale.ENGLISH, EXCEPTION_MESSAGE_INVALID_NAME, type.getLabel(), name, e.getLocalizedMessage()), e); } } @Override public @NotNull JcrNodeTypeMetaData addImplicitChildNode(@NotNull NameResolver nameResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, @NotNull NodeContext nodeContext, @Nullable Name implicitNodeType) throws RepositoryException { JcrNodeTypeMetaDataImpl childNode = addChildNode(nameResolver, effectiveNodeTypeProvider, nodeTypeDefinitionProvider, itemDefinitionProvider, true, nodeContext, Text.getName(nodeContext.getNodePath()), implicitNodeType); return childNode; } @Override public @NotNull JcrNodeTypeMetaData addUnknownChildNode(@NotNull NameResolver nameResolver, @NotNull NodeContext context, @NotNull String name) throws IllegalNameException, NamespaceException { return addUnknownChildNode(context, getQName(nameResolver, name, NameType.NODE_NAME)); } private @NotNull JcrNodeTypeMetaDataImpl addUnknownChildNode(@NotNull NodeContext context, @NotNull Name name) throws IllegalNameException { JcrNodeTypeMetaDataImpl childNode = new JcrNodeTypeMetaDataImpl(this.isIncremental, context, name, null, null, this, false, false); childNodesByName.put(name, childNode); return childNode; } @Override public @NotNull JcrNodeTypeMetaData addChildNode(@NotNull NameResolver nameResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, @NotNull NodeContext nodeContext, @NotNull String primaryType, String... mixinTypes) throws IllegalNameException, RepositoryException, NamespaceExceptionInNodeName { List<Name> types = getTypes(nameResolver, primaryType, mixinTypes); String nodeName = Text.getName(nodeContext.getNodePath()); JcrNodeTypeMetaDataImpl childNode = addChildNode(nameResolver, effectiveNodeTypeProvider, nodeTypeDefinitionProvider, itemDefinitionProvider, false, nodeContext, nodeName, types.toArray(new Name[0])); // defer validation return childNode; } private static List<Name> getTypes(@NotNull NameResolver nameResolver, @NotNull String primaryType, String... mixinTypes) throws IllegalNameException, NamespaceException { List<Name> types = new ArrayList<>(); types.add(getQName(nameResolver, primaryType, NameType.PRIMARY_TYPE)); if (mixinTypes != null) { for (String mixinType : mixinTypes) { types.add(getQName(nameResolver, mixinType, NameType.MIXIN_TYPE)); } } return types; } private @NotNull JcrNodeTypeMetaDataImpl addChildNode(@NotNull NameResolver nameResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, boolean isImplicit, @NotNull NodeContext context, @NotNull String name, @Nullable Name... nodeTypes) throws ConstraintViolationException, NoSuchNodeTypeException, NamespaceExceptionInNodeName, NamespaceException, IllegalNameException { final Name qName = getQName(nameResolver, name, NameType.NODE_NAME); // special handling for users and acls boolean isAuthenticationOrAuthorizationContext = false; final EffectiveNodeType newEffectiveNodeType; final Name newPrimaryNodeType; if (nodeTypes != null) { newEffectiveNodeType = effectiveNodeTypeProvider.getEffectiveNodeType(nodeTypes); newPrimaryNodeType = nodeTypes[0]; isAuthenticationOrAuthorizationContext = isAclOrAuthorizableNodeType(newEffectiveNodeType); } else { newEffectiveNodeType = null; newPrimaryNodeType = null; } // special handling for users and acls if (!isAuthenticationOrAuthorizationContext) { isAuthenticationOrAuthorizationContext = this.isAuthenticationOrAuthorizationContext; } JcrNodeTypeMetaDataImpl newNode = new JcrNodeTypeMetaDataImpl(this.isIncremental, context, qName, newPrimaryNodeType, newEffectiveNodeType, this, isAuthenticationOrAuthorizationContext, isImplicit); childNodesByName.put(qName, newNode); return newNode; } /** Similar to * {@link EffectiveNodeType#checkAddNodeConstraints(Name, org.apache.jackrabbit.spi.QNodeTypeDefinition, ItemDefinitionProvider)} * * @param parentEffectiveNodeType * @return constraints violation message * @throws RepositoryException */ private Optional<String> validateAgainstParentNodeType(@NotNull EffectiveNodeType parentEffectiveNodeType, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider) throws RepositoryException { if (effectiveNodeType == null || primaryNodeType == null) { return Optional.empty(); } // except for ACL node types (for which the mixin rep:AccessControllable is transparently added) everything must comply with the // parent node rules if (effectiveNodeType.includesNodeType(NT_REP_POLICY)) { return Optional.empty(); } QNodeTypeDefinition primaryNodeTypeDefinition = nodeTypeDefinitionProvider.getNodeTypeDefinition(primaryNodeType); if (primaryNodeTypeDefinition.isAbstract()) { return Optional.of(CONSTRAINT_ABSTRACT_TYPE_AS_PRIMARY_TYPE); } else if (primaryNodeTypeDefinition.isMixin()) { return Optional.of(CONSTRAINT_MIXIN_TYPE_AS_PRIMARY_TYPE); } try { // get applicable node type from parent QNodeDefinition applicableParentNodeDefinition = itemDefinitionProvider.getQNodeDefinition(parentEffectiveNodeType, this.name, primaryNodeType); if (!isAuthenticationOrAuthorizationContext && applicableParentNodeDefinition.isProtected()) { return Optional.of(CONSTRAINT_CHILD_NODE_PROTECTED); } if (applicableParentNodeDefinition.isAutoCreated()) { return Optional.of(CONSTRAINT_CHILD_NODE_AUTO_CREATED); } } catch (ConstraintViolationException e) { return Optional.of(CONSTRAINT_CHILD_NODE_NOT_ALLOWED); } return Optional.empty(); } @Override public @NotNull Collection<ValidationMessage> finalizeValidation(@NotNull NamePathResolver namePathResolver, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, @NotNull ValidationMessageSeverity severity, @NotNull ValidationMessageSeverity severityForDefaultNodeTypeViolations, @NotNull WorkspaceFilter filter) throws NamespaceException { if (!isValidationDone) { Collection<ValidationMessage> messages = new LinkedList<>(); // in incremental validations ignore missing mandatory properties and child nodes (as they might not be visible to the validator) if (!isIncremental) { messages.add(new ValidationMessage(ValidationMessageSeverity.DEBUG, "Validate children and mandatory properties of " + getQualifiedPath(namePathResolver), context)); messages.addAll(validateChildNodes(namePathResolver, nodeTypeDefinitionProvider, itemDefinitionProvider, severity, severityForDefaultNodeTypeViolations, filter)); messages.addAll(validateMandatoryProperties(namePathResolver, severity, severityForDefaultNodeTypeViolations)); } // only remove child nodes on 2nd level to be able to validate mandatory properties of parent childNodesByName.clear(); isValidationDone = true; messages.add(new ValidationMessage(ValidationMessageSeverity.DEBUG, "Remove node information of children of " + getQualifiedPath(namePathResolver), context)); return messages; } else { return Collections.singletonList(new ValidationMessage(ValidationMessageSeverity.DEBUG, "Already finalized validation of " + getQualifiedPath(namePathResolver), context)); } } private Collection<ValidationMessage> validateChildNodes(@NotNull NamePathResolver namePathResolver, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, @NotNull ValidationMessageSeverity severity, @NotNull ValidationMessageSeverity severityForDefaultNodeTypeViolations, @NotNull WorkspaceFilter filter) { if (effectiveNodeType == null) { return Collections.emptyList(); } Collection<ValidationMessage> messages = new LinkedList<>(); // validate child nodes against parent node type definition for (JcrNodeTypeMetaDataImpl childNode : childNodesByName.values()) { Optional<String> constraintViolation; try { constraintViolation = childNode.validateAgainstParentNodeType(effectiveNodeType, nodeTypeDefinitionProvider, itemDefinitionProvider); if (constraintViolation.isPresent()) { messages.add(new ValidationMessage(isImplicit ? severityForDefaultNodeTypeViolations : severity, String.format(Locale.ENGLISH, MESSAGE_CHILD_NODE_NOT_ALLOWED, namePathResolver.getJCRName(childNode.name), namePathResolver.getJCRName(childNode.primaryNodeType), getEffectiveNodeTypeLabel(namePathResolver, effectiveNodeType), constraintViolation.get()), childNode.context)); } } catch (RepositoryException e) { throw new IllegalStateException("Could not validate child node " + childNode.name + " against parent node definition", e); } } // validate mandatory child nodes of children for (QNodeDefinition mandatoryNodeType : effectiveNodeType.getMandatoryQNodeDefinitions()) { // skip auto created ones if (mandatoryNodeType.isAutoCreated()) { continue; } boolean foundRequiredChildNode = false; for (JcrNodeTypeMetaDataImpl child : childNodesByName.values()) { foundRequiredChildNode = child.fulfillsNodeDefinition(mandatoryNodeType); } if (!foundRequiredChildNode && !mandatoryNodeType.getName().equals(NameConstants.ANY_NAME)) { PathBuilder pathBuilder = new PathBuilder(this.getPath()); pathBuilder.addLast(mandatoryNodeType.getName()); try { if (filter.contains(namePathResolver.getJCRPath(pathBuilder.getPath()))) { messages.add(new ValidationMessage(isImplicit ? severityForDefaultNodeTypeViolations : severity, String.format(Locale.ENGLISH, MESSAGE_MANDATORY_CHILD_NODE_MISSING, getNodeDefinitionLabel(namePathResolver, mandatoryNodeType), getEffectiveNodeTypeLabel(namePathResolver, effectiveNodeType)), context)); } else { // if mandatory child nodes are missing outside filter rules, this is not an issue messages.add(new ValidationMessage(ValidationMessageSeverity.DEBUG, String.format(Locale.ENGLISH, MESSAGE_MANDATORY_UNCONTAINED_CHILD_NODE_MISSING, getNodeDefinitionLabel(namePathResolver, mandatoryNodeType), getEffectiveNodeTypeLabel(namePathResolver, effectiveNodeType)), context)); } } catch (NamespaceException | MalformedPathException e) { throw new IllegalStateException("Could not give out node types and name for " + mandatoryNodeType, e); } } } return messages; } private String getEffectiveNodeTypeLabel(NameResolver nameResolver, EffectiveNodeType nodeType) throws NamespaceException { String label; String types = joinAsQualifiedJcrName(nameResolver, nodeType.getMergedNodeTypes()); if (isImplicit) label = String.format(Locale.ENGLISH, "potential default types [%s]", types); else { label = String.format(Locale.ENGLISH, "types [%s]", types); } return label; } private static String getNodeDefinitionLabel(NameResolver nameResolver, QNodeDefinition nodeDefinition) throws NamespaceException { return nameResolver.getJCRName(nodeDefinition.getName()) + " [" + joinAsQualifiedJcrName(nameResolver, nodeDefinition.getRequiredPrimaryTypes()) + "]"; } private static String joinAsQualifiedJcrName(NameResolver nameResolver, Name[] names) throws NamespaceException { StringBuilder types = new StringBuilder(); String delimiter = ""; for (Name name : names) { types.append(delimiter).append(nameResolver.getJCRName(name)); delimiter = ", "; } return types.toString(); } @Override public @NotNull Collection<@NotNull ? extends JcrNodeTypeMetaData> getChildren() { return childNodesByName.values(); } @Override public @NotNull JcrNodeTypeMetaData getOrCreateNode(NamePathResolver nameResolver, @NotNull NodeContext nodeContext, String path) throws RepositoryException { return getNode(nameResolver, nodeContext, path, true).get(); } @Override public Optional<JcrNodeTypeMetaData> getNode(NamePathResolver nameResolver, String path) throws RepositoryException { return getNode(nameResolver, null, path, false); } private Optional<JcrNodeTypeMetaData> getNode(NamePathResolver nameResolver, @Nullable NodeContext nodeContext, String path, boolean shouldCreateIfMissing) throws RepositoryException { // convert to fully namespaced path Path qPath = nameResolver.getQPath(path); // navigate there Path qRelativePath = getPath().computeRelativePath(qPath); // first go up until you reach a common parent @NotNull JcrNodeTypeMetaDataImpl currentNode = this; for (Element element : qRelativePath.getElements()) { if (!element.denotesParent()) { break; } else { currentNode = currentNode.parentNode; } } qRelativePath = currentNode.getPath().computeRelativePath(qPath); // then go down until you match the path for (Element element : qRelativePath.getElements()) { if (element.denotesCurrent()) { continue; } JcrNodeTypeMetaDataImpl childNode = currentNode.childNodesByName.get(element.getName()); if (childNode == null) { if (shouldCreateIfMissing) { if (nodeContext == null) { throw new IllegalArgumentException("Node context must be given in case node is created but is null"); } childNode = currentNode.addUnknownChildNode(nodeContext, element.getName()); } else { return Optional.empty(); } } currentNode = childNode; } return Optional.of(currentNode); } private Collection<ValidationMessage> validateMandatoryProperties(@NotNull NamePathResolver nameResolver, @NotNull ValidationMessageSeverity severity, @NotNull ValidationMessageSeverity severityForDefaultNodeTypeViolations) { if (effectiveNodeType == null) { return Collections.emptyList(); } Collection<ValidationMessage> messages = new ArrayList<>(); // are all mandatory properties covered? for (QPropertyDefinition mandatoryPropertyDefinition : effectiveNodeType.getMandatoryQPropertyDefinitions()) { // ignore auto-created properties as they are created on-demand if (mandatoryPropertyDefinition.isAutoCreated()) { continue; } // ignore certain properties which are handled specially in filevault if (JCR_SYSTEM_PROPERTIES.contains(mandatoryPropertyDefinition.getName()) ) { continue; } try { if (!propertyTypesByName.containsKey(mandatoryPropertyDefinition.getName())) { messages.add(new ValidationMessage(isImplicit ? severityForDefaultNodeTypeViolations : severity, String.format(Locale.ENGLISH, MESSAGE_MANDATORY_PROPERTY_MISSING, nameResolver.getJCRName(mandatoryPropertyDefinition.getName()), getEffectiveNodeTypeLabel(nameResolver, effectiveNodeType)), context)); } else { // check type int actualPropertyType = propertyTypesByName.get(mandatoryPropertyDefinition.getName()); if (mandatoryPropertyDefinition.getRequiredType() != actualPropertyType) { // check type messages.add(new ValidationMessage(isImplicit ? severityForDefaultNodeTypeViolations : severity, String.format(Locale.ENGLISH, MESSAGE_MANDATORY_PROPERTY_WITH_WRONG_TYPE, nameResolver.getJCRName(mandatoryPropertyDefinition.getName()), PropertyType.nameFromValue(actualPropertyType), PropertyType.nameFromValue(mandatoryPropertyDefinition.getRequiredType()), getEffectiveNodeTypeLabel(nameResolver, effectiveNodeType)), context)); } } } catch (NamespaceException e) { throw new IllegalStateException("Could not give out parent node types or property names for " + mandatoryPropertyDefinition, e); } } return messages; } @Override public Collection<ValidationMessage> addProperty(@NotNull NodeContext nodeContext, @NotNull NamePathResolver namePathResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, @NotNull ValidationMessageSeverity severity, @NotNull ValidationMessageSeverity severityForDefaultNodeTypeViolations, String name, boolean isMultiValue, Value... values) throws RepositoryException { Collection<ValidationMessage> messages = new ArrayList<>(); // some sanity checks on multivalue if (!isMultiValue && values.length > 1) { throw new IllegalArgumentException("isMultiValue is only supposed to be false if exactly one value is passed but " + values.length + " values were passed!"); } if (values.length == 0) { // unable to proceed when no value is present return messages; } Name qName; try { qName = namePathResolver.getQName(name); } catch (IllegalNameException | NamespaceException e) { throw new IllegalNameException("Invalid property name " + name, e); } propertyTypesByName.put(qName, values[0].getType()); // now check for validity Optional<String> constraintViolation = validatePropertyConstraints(namePathResolver, effectiveNodeTypeProvider, nodeTypeDefinitionProvider, itemDefinitionProvider, qName, values, isAuthenticationOrAuthorizationContext, isMultiValue); if (constraintViolation.isPresent()) { messages.add(new ValidationMessage(isImplicit ? severityForDefaultNodeTypeViolations : severity, String.format(Locale.ENGLISH, MESSAGE_PROPERTY_NOT_ALLOWED, namePathResolver.getJCRName(qName), PropertyType.nameFromValue(values[0].getType()), getEffectiveNodeTypeLabel(namePathResolver, effectiveNodeType), constraintViolation.get()), nodeContext)); } return messages; } private @NotNull Optional<String> validatePropertyConstraints(@NotNull NamePathResolver namePathResolver, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider, @NotNull NodeTypeDefinitionProvider nodeTypeDefinitionProvider, @NotNull ItemDefinitionProvider itemDefinitionProvider, Name name, Value[] values, boolean allowProtected, boolean isMultiValue) throws RepositoryException { if (effectiveNodeType == null) { return Optional.empty(); } QPropertyDefinition applicablePropertyDefinition; try { applicablePropertyDefinition = getPropertyDefinition(name, values[0].getType(), effectiveNodeType, itemDefinitionProvider, isMultiValue); } catch (ConstraintViolationException t) { return Optional.of(CONSTRAINT_PROPERTY_NOT_ALLOWED); } if (applicablePropertyDefinition.isProtected() && !allowProtected && !JCR_SYSTEM_PROPERTIES.contains(name)) { return Optional.of(CONSTRAINT_PROPERTY_PROTECTED); } for (Value value : values) { try { QValue qValue = ValueFormat.getQValue(value, namePathResolver, QVALUE_FACTORY); ValueConstraint.checkValueConstraints(applicablePropertyDefinition, new QValue[] { qValue }); } catch (ConstraintViolationException e) { return Optional.of(String.format(Locale.ENGLISH, CONSTRAINT_PROPERTY_VALUE, e.getLocalizedMessage())); } } return Optional.empty(); } private static QPropertyDefinition getPropertyDefinition(Name name, int type, EffectiveNodeType effectiveNodeType, ItemDefinitionProvider itemDefinitionProvider, boolean isMultiValue) throws NoSuchNodeTypeException, ConstraintViolationException { QPropertyDefinition def; try { def = itemDefinitionProvider.getQPropertyDefinition(effectiveNodeType.getAllNodeTypes(), name, type, isMultiValue); } catch (ConstraintViolationException e) { if (type != PropertyType.UNDEFINED) { def = itemDefinitionProvider.getQPropertyDefinition(effectiveNodeType.getAllNodeTypes(), name, PropertyType.UNDEFINED, isMultiValue); } else { throw e; } } return def; } private boolean fulfillsNodeDefinition(QNodeDefinition nodeDefinition) { // name must match if (!nodeDefinition.getName().equals(NameConstants.ANY_NAME) && !nodeDefinition.getName().equals(name)) { return false; } if (effectiveNodeType == null) { // if effective node type cannot be determined, assume the worst case (i.e. node does not match definition) return false; } for (Name requiredType : nodeDefinition.getRequiredPrimaryTypes()) { // type must match all of the given types if (!effectiveNodeType.includesNodeType(requiredType)) { return false; } } return true; } public static @NotNull JcrNodeTypeMetaDataImpl createRoot(boolean isIncremental, @NotNull EffectiveNodeTypeProvider effectiveNodeTypeProvider) throws ConstraintViolationException, NoSuchNodeTypeException { return new JcrNodeTypeMetaDataImpl(isIncremental, new NodeContextImpl("", Paths.get(""), Paths.get("")), NameConstants.ROOT, NameConstants.REP_ROOT, effectiveNodeTypeProvider.getEffectiveNodeType( new Name[] { NameConstants.REP_ROOT, NameConstants.REP_ACCESS_CONTROLLABLE, NameConstants.REP_REPO_ACCESS_CONTROLLABLE }), null, false, false); } private Path getPath() { if (parentNode == null) { return PathFactoryImpl.getInstance().getRootPath(); } else { PathBuilder pathBuilder = new PathBuilder(parentNode.getPath()); pathBuilder.addLast(name); try { return pathBuilder.getPath(); } catch (MalformedPathException e) { throw new IllegalStateException("Could not create path from parent and name", e); } } } @Override public String getQualifiedPath(NamePathResolver resolver) throws NamespaceException { return resolver.getJCRPath(getPath()); } }
googleapis/google-cloud-java
37,308
java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/RecurringTimeWindow.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1beta1/cluster_service.proto // Protobuf Java Version: 3.25.8 package com.google.container.v1beta1; /** * * * <pre> * Represents an arbitrary window of time that recurs. * </pre> * * Protobuf type {@code google.container.v1beta1.RecurringTimeWindow} */ public final class RecurringTimeWindow extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1beta1.RecurringTimeWindow) RecurringTimeWindowOrBuilder { private static final long serialVersionUID = 0L; // Use RecurringTimeWindow.newBuilder() to construct. private RecurringTimeWindow(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RecurringTimeWindow() { recurrence_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RecurringTimeWindow(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_RecurringTimeWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_RecurringTimeWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1beta1.RecurringTimeWindow.class, com.google.container.v1beta1.RecurringTimeWindow.Builder.class); } private int bitField0_; public static final int WINDOW_FIELD_NUMBER = 1; private com.google.container.v1beta1.TimeWindow window_; /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> * * @return Whether the window field is set. */ @java.lang.Override public boolean hasWindow() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> * * @return The window. */ @java.lang.Override public com.google.container.v1beta1.TimeWindow getWindow() { return window_ == null ? com.google.container.v1beta1.TimeWindow.getDefaultInstance() : window_; } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ @java.lang.Override public com.google.container.v1beta1.TimeWindowOrBuilder getWindowOrBuilder() { return window_ == null ? com.google.container.v1beta1.TimeWindow.getDefaultInstance() : window_; } public static final int RECURRENCE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object recurrence_ = ""; /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @return The recurrence. */ @java.lang.Override public java.lang.String getRecurrence() { java.lang.Object ref = recurrence_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); recurrence_ = s; return s; } } /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @return The bytes for recurrence. */ @java.lang.Override public com.google.protobuf.ByteString getRecurrenceBytes() { java.lang.Object ref = recurrence_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); recurrence_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getWindow()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recurrence_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, recurrence_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWindow()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(recurrence_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, recurrence_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1beta1.RecurringTimeWindow)) { return super.equals(obj); } com.google.container.v1beta1.RecurringTimeWindow other = (com.google.container.v1beta1.RecurringTimeWindow) obj; if (hasWindow() != other.hasWindow()) return false; if (hasWindow()) { if (!getWindow().equals(other.getWindow())) return false; } if (!getRecurrence().equals(other.getRecurrence())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasWindow()) { hash = (37 * hash) + WINDOW_FIELD_NUMBER; hash = (53 * hash) + getWindow().hashCode(); } hash = (37 * hash) + RECURRENCE_FIELD_NUMBER; hash = (53 * hash) + getRecurrence().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1beta1.RecurringTimeWindow parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1beta1.RecurringTimeWindow parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1beta1.RecurringTimeWindow parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1beta1.RecurringTimeWindow prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents an arbitrary window of time that recurs. * </pre> * * Protobuf type {@code google.container.v1beta1.RecurringTimeWindow} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1beta1.RecurringTimeWindow) com.google.container.v1beta1.RecurringTimeWindowOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_RecurringTimeWindow_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_RecurringTimeWindow_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1beta1.RecurringTimeWindow.class, com.google.container.v1beta1.RecurringTimeWindow.Builder.class); } // Construct using com.google.container.v1beta1.RecurringTimeWindow.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getWindowFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; window_ = null; if (windowBuilder_ != null) { windowBuilder_.dispose(); windowBuilder_ = null; } recurrence_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1beta1.ClusterServiceProto .internal_static_google_container_v1beta1_RecurringTimeWindow_descriptor; } @java.lang.Override public com.google.container.v1beta1.RecurringTimeWindow getDefaultInstanceForType() { return com.google.container.v1beta1.RecurringTimeWindow.getDefaultInstance(); } @java.lang.Override public com.google.container.v1beta1.RecurringTimeWindow build() { com.google.container.v1beta1.RecurringTimeWindow result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1beta1.RecurringTimeWindow buildPartial() { com.google.container.v1beta1.RecurringTimeWindow result = new com.google.container.v1beta1.RecurringTimeWindow(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.container.v1beta1.RecurringTimeWindow result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.window_ = windowBuilder_ == null ? window_ : windowBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.recurrence_ = recurrence_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1beta1.RecurringTimeWindow) { return mergeFrom((com.google.container.v1beta1.RecurringTimeWindow) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1beta1.RecurringTimeWindow other) { if (other == com.google.container.v1beta1.RecurringTimeWindow.getDefaultInstance()) return this; if (other.hasWindow()) { mergeWindow(other.getWindow()); } if (!other.getRecurrence().isEmpty()) { recurrence_ = other.recurrence_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getWindowFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { recurrence_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.container.v1beta1.TimeWindow window_; private com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1beta1.TimeWindow, com.google.container.v1beta1.TimeWindow.Builder, com.google.container.v1beta1.TimeWindowOrBuilder> windowBuilder_; /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> * * @return Whether the window field is set. */ public boolean hasWindow() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> * * @return The window. */ public com.google.container.v1beta1.TimeWindow getWindow() { if (windowBuilder_ == null) { return window_ == null ? com.google.container.v1beta1.TimeWindow.getDefaultInstance() : window_; } else { return windowBuilder_.getMessage(); } } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public Builder setWindow(com.google.container.v1beta1.TimeWindow value) { if (windowBuilder_ == null) { if (value == null) { throw new NullPointerException(); } window_ = value; } else { windowBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public Builder setWindow(com.google.container.v1beta1.TimeWindow.Builder builderForValue) { if (windowBuilder_ == null) { window_ = builderForValue.build(); } else { windowBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public Builder mergeWindow(com.google.container.v1beta1.TimeWindow value) { if (windowBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && window_ != null && window_ != com.google.container.v1beta1.TimeWindow.getDefaultInstance()) { getWindowBuilder().mergeFrom(value); } else { window_ = value; } } else { windowBuilder_.mergeFrom(value); } if (window_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public Builder clearWindow() { bitField0_ = (bitField0_ & ~0x00000001); window_ = null; if (windowBuilder_ != null) { windowBuilder_.dispose(); windowBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public com.google.container.v1beta1.TimeWindow.Builder getWindowBuilder() { bitField0_ |= 0x00000001; onChanged(); return getWindowFieldBuilder().getBuilder(); } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ public com.google.container.v1beta1.TimeWindowOrBuilder getWindowOrBuilder() { if (windowBuilder_ != null) { return windowBuilder_.getMessageOrBuilder(); } else { return window_ == null ? com.google.container.v1beta1.TimeWindow.getDefaultInstance() : window_; } } /** * * * <pre> * The window of the first recurrence. * </pre> * * <code>.google.container.v1beta1.TimeWindow window = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1beta1.TimeWindow, com.google.container.v1beta1.TimeWindow.Builder, com.google.container.v1beta1.TimeWindowOrBuilder> getWindowFieldBuilder() { if (windowBuilder_ == null) { windowBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.container.v1beta1.TimeWindow, com.google.container.v1beta1.TimeWindow.Builder, com.google.container.v1beta1.TimeWindowOrBuilder>( getWindow(), getParentForChildren(), isClean()); window_ = null; } return windowBuilder_; } private java.lang.Object recurrence_ = ""; /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @return The recurrence. */ public java.lang.String getRecurrence() { java.lang.Object ref = recurrence_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); recurrence_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @return The bytes for recurrence. */ public com.google.protobuf.ByteString getRecurrenceBytes() { java.lang.Object ref = recurrence_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); recurrence_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @param value The recurrence to set. * @return This builder for chaining. */ public Builder setRecurrence(java.lang.String value) { if (value == null) { throw new NullPointerException(); } recurrence_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @return This builder for chaining. */ public Builder clearRecurrence() { recurrence_ = getDefaultInstance().getRecurrence(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * An RRULE (https://tools.ietf.org/html/rfc5545#section-3.8.5.3) for how * this window reccurs. They go on for the span of time between the start and * end time. * * For example, to have something repeat every weekday, you'd use: * `FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR` * * To repeat some window daily (equivalent to the DailyMaintenanceWindow): * `FREQ=DAILY` * * For the first weekend of every month: * `FREQ=MONTHLY;BYSETPOS=1;BYDAY=SA,SU` * * This specifies how frequently the window starts. Eg, if you wanted to have * a 9-5 UTC-4 window every weekday, you'd use something like: * ``` * start time = 2019-01-01T09:00:00-0400 * end time = 2019-01-01T17:00:00-0400 * recurrence = FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR * ``` * * Windows can span multiple days. Eg, to make the window encompass every * weekend from midnight Saturday till the last minute of Sunday UTC: * ``` * start time = 2019-01-05T00:00:00Z * end time = 2019-01-07T23:59:00Z * recurrence = FREQ=WEEKLY;BYDAY=SA * ``` * * Note the start and end time's specific dates are largely arbitrary except * to specify duration of the window and when it first starts. * The FREQ values of HOURLY, MINUTELY, and SECONDLY are not supported. * </pre> * * <code>string recurrence = 2;</code> * * @param value The bytes for recurrence to set. * @return This builder for chaining. */ public Builder setRecurrenceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); recurrence_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1beta1.RecurringTimeWindow) } // @@protoc_insertion_point(class_scope:google.container.v1beta1.RecurringTimeWindow) private static final com.google.container.v1beta1.RecurringTimeWindow DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1beta1.RecurringTimeWindow(); } public static com.google.container.v1beta1.RecurringTimeWindow getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RecurringTimeWindow> PARSER = new com.google.protobuf.AbstractParser<RecurringTimeWindow>() { @java.lang.Override public RecurringTimeWindow parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RecurringTimeWindow> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RecurringTimeWindow> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1beta1.RecurringTimeWindow getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
37,205
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/over/NonTimeRowsUnboundedPrecedingFunctionTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.operators.over; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.streaming.api.operators.KeyedProcessOperator; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.runtime.generated.GeneratedRecordComparator; import org.apache.flink.types.RowKind; import org.junit.Test; import java.time.Duration; import java.util.Arrays; import java.util.List; import static org.apache.flink.table.runtime.util.StreamRecordUtils.insertRecord; import static org.apache.flink.table.runtime.util.StreamRecordUtils.updateAfterRecord; import static org.apache.flink.table.runtime.util.StreamRecordUtils.updateBeforeRecord; import static org.assertj.core.api.Assertions.assertThat; /** Test for {@link NonTimeRowsUnboundedPrecedingFunction}. */ public class NonTimeRowsUnboundedPrecedingFunctionTest extends NonTimeOverWindowTestBase { private NonTimeRowsUnboundedPrecedingFunction<RowData> getNonTimeRowsUnboundedPrecedingFunction( long retentionTime, GeneratedRecordComparator generatedSortKeyComparator) { return new NonTimeRowsUnboundedPrecedingFunction<RowData>( retentionTime, aggsHandleFunction, GENERATED_ROW_VALUE_EQUALISER, GENERATED_SORT_KEY_EQUALISER, generatedSortKeyComparator, accTypes, inputFieldTypes, SORT_KEY_TYPES, SORT_KEY_SELECTOR) {}; } @Test public void testInsertOnlyRecordsWithCustomSortKey() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(insertRecord("key2", 1L, 100L)); testHarness.processElement(insertRecord("key2", 2L, 200L)); // out of order record should trigger updates for all records after its inserted position testHarness.processElement(insertRecord("key1", 4L, 400L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L), outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 4L, 400L, 7L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 12L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 18L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testInsertOnlyRecordsWithCustomSortKeyAndLongSumAgg() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( new NonTimeRowsUnboundedPrecedingFunction<RowData>( 0L, aggsSumLongHandleFunction, GENERATED_ROW_VALUE_EQUALISER, GENERATED_SORT_KEY_EQUALISER, GENERATED_SORT_KEY_COMPARATOR_ASC, accTypes, inputFieldTypes, SORT_KEY_TYPES, SORT_KEY_SELECTOR) {}); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(insertRecord("key2", 1L, 100L)); testHarness.processElement(insertRecord("key2", 2L, 200L)); // out of order record should trigger updates for all records after its inserted position testHarness.processElement(insertRecord("key1", 4L, 400L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L), outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 4L, 400L, 7L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 12L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 18L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testInsertOnlyRecordsWithDuplicateSortKeys() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 5L, 502L)); testHarness.processElement(insertRecord("key1", 5L, 501L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(insertRecord("key2", 1L, 100L)); testHarness.processElement(insertRecord("key2", 2L, 200L)); // out of order record should trigger updates for all records after its inserted position testHarness.processElement(insertRecord("key1", 2L, 203L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 4L, 400L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L), outputRecord(RowKind.INSERT, "key1", 5L, 502L, 13L), outputRecord(RowKind.INSERT, "key1", 5L, 501L, 18L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 24L), outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 203L, 5L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 10L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 13L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 15L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 18L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 20L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 24L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 26L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 7L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 12L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 17L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 22L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 28L), outputRecord(RowKind.INSERT, "key1", 4L, 400L, 11L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 12L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 16L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 17L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 21L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 22L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 26L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 28L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 32L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractingRecordsWithCustomSortKey() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 2L, 200L)); testHarness.processElement(updateAfterRecord("key1", 3L, 200L)); testHarness.processElement(insertRecord("key2", 1L, 100L)); testHarness.processElement(insertRecord("key2", 2L, 200L)); testHarness.processElement(insertRecord("key3", 1L, 100L)); testHarness.processElement(insertRecord("key1", 4L, 400L)); testHarness.processElement(updateBeforeRecord("key1", 3L, 200L)); testHarness.processElement(updateAfterRecord("key1", 3L, 300L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 8L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 14L), outputRecord(RowKind.DELETE, "key1", 2L, 200L, 3L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 8L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 6L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 14L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 12L), outputRecord(RowKind.UPDATE_AFTER, "key1", 3L, 200L, 4L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 6L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 9L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 12L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 15L), outputRecord(RowKind.INSERT, "key2", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key2", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key3", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 4L, 400L, 8L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 9L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 13L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 15L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 19L), outputRecord(RowKind.DELETE, "key1", 3L, 200L, 4L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 4L, 400L, 8L), outputRecord(RowKind.UPDATE_AFTER, "key1", 4L, 400L, 5L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 13L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 10L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 19L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 16L), outputRecord(RowKind.UPDATE_AFTER, "key1", 3L, 300L, 4L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 4L, 400L, 5L), outputRecord(RowKind.UPDATE_AFTER, "key1", 4L, 400L, 8L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 10L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 13L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 16L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 19L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractWithFirstDuplicateSortKey() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 5L, 502L)); testHarness.processElement(insertRecord("key1", 5L, 501L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 5L, 500L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L), outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L), outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L), outputRecord(RowKind.DELETE, "key1", 5L, 500L, 10L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 502L, 15L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 502L, 10L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 15L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractWithMiddleDuplicateSortKey() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 5L, 502L)); testHarness.processElement(insertRecord("key1", 5L, 501L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 5L, 502L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L), outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L), outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L), outputRecord(RowKind.DELETE, "key1", 5L, 502L, 15L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 501L, 20L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 501L, 15L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractWithLastDuplicateSortKey() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 5L, 502L)); testHarness.processElement(insertRecord("key1", 5L, 501L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 5L, 501L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L), outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L), outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L), outputRecord(RowKind.DELETE, "key1", 5L, 501L, 20L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 6L, 600L, 26L), outputRecord(RowKind.UPDATE_AFTER, "key1", 6L, 600L, 21L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractWithDescendingSort() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_DESC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 2L, 200L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 2L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 1L), outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 4L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 3L), outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 5L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 5L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 2L), outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 7L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 4L), outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 9L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 5L), outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 10L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 6L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 5L, 500L, 5L), outputRecord(RowKind.UPDATE_AFTER, "key1", 5L, 500L, 11L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 200L, 7L), outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 200L, 13L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 9L), outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 15L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 10L), outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 16L), outputRecord(RowKind.DELETE, "key1", 2L, 200L, 13L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 2L, 201L, 15L), outputRecord(RowKind.UPDATE_AFTER, "key1", 2L, 201L, 13L), outputRecord(RowKind.UPDATE_BEFORE, "key1", 1L, 100L, 16L), outputRecord(RowKind.UPDATE_AFTER, "key1", 1L, 100L, 14L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testRetractWithEarlyOut() throws Exception { KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>( getNonTimeRowsUnboundedPrecedingFunction( 0L, GENERATED_SORT_KEY_COMPARATOR_ASC)); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records testHarness.processElement(insertRecord("key1", 0L, 100L)); testHarness.processElement(insertRecord("key1", 0L, 101L)); testHarness.processElement(insertRecord("key1", 0L, 102L)); testHarness.processElement(insertRecord("key1", 1L, 100L)); testHarness.processElement(insertRecord("key1", 2L, 200L)); testHarness.processElement(insertRecord("key1", 2L, 201L)); testHarness.processElement(insertRecord("key1", 5L, 500L)); testHarness.processElement(insertRecord("key1", 5L, 502L)); testHarness.processElement(insertRecord("key1", 5L, 501L)); testHarness.processElement(insertRecord("key1", 6L, 600L)); testHarness.processElement(updateBeforeRecord("key1", 0L, 100L)); List<RowData> expectedRows = Arrays.asList( outputRecord(RowKind.INSERT, "key1", 0L, 100L, 0L), outputRecord(RowKind.INSERT, "key1", 0L, 101L, 0L), outputRecord(RowKind.INSERT, "key1", 0L, 102L, 0L), outputRecord(RowKind.INSERT, "key1", 1L, 100L, 1L), outputRecord(RowKind.INSERT, "key1", 2L, 200L, 3L), outputRecord(RowKind.INSERT, "key1", 2L, 201L, 5L), outputRecord(RowKind.INSERT, "key1", 5L, 500L, 10L), outputRecord(RowKind.INSERT, "key1", 5L, 502L, 15L), outputRecord(RowKind.INSERT, "key1", 5L, 501L, 20L), outputRecord(RowKind.INSERT, "key1", 6L, 600L, 26L), outputRecord(RowKind.DELETE, "key1", 0L, 100L, 0L)); List<RowData> actualRows = testHarness.extractOutputValues(); validateRows(actualRows, expectedRows); } @Test public void testInsertAndRetractAllWithStateValidation() throws Exception { NonTimeRowsUnboundedPrecedingFunction<RowData> function = getNonTimeRowsUnboundedPrecedingFunction(0L, GENERATED_SORT_KEY_COMPARATOR_ASC); KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>(function); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L); testHarness.processElement(insertRecord("key1", 1L, 100L)); validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true); GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L); testHarness.processElement(insertRecord("key1", 2L, 200L)); validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true); GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L); testHarness.processElement(insertRecord("key1", 2L, 201L)); validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true); GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L); testHarness.processElement(insertRecord("key1", 5L, 500L)); validateState(function, fourthRecord, 2, 3, 0, 1, 3, 4, true); GenericRowData fifthRecord = GenericRowData.of("key1", 5L, 502L); testHarness.processElement(insertRecord("key1", 5L, 502L)); validateState(function, fifthRecord, 2, 3, 1, 2, 4, 5, true); GenericRowData sixthRecord = GenericRowData.of("key1", 5L, 501L); testHarness.processElement(insertRecord("key1", 5L, 501L)); validateState(function, sixthRecord, 2, 3, 2, 3, 5, 6, true); GenericRowData seventhRecord = GenericRowData.of("key1", 6L, 600L); testHarness.processElement(insertRecord("key1", 6L, 600L)); validateState(function, seventhRecord, 3, 4, 0, 1, 6, 7, true); testHarness.processElement(updateBeforeRecord("key1", 5L, 502L)); validateState(function, fifthRecord, 2, 4, 1, 2, 4, 6, false); testHarness.processElement(updateBeforeRecord("key1", 6L, 600L)); validateState(function, seventhRecord, 3, 3, 0, 0, 6, 5, false); testHarness.processElement(updateBeforeRecord("key1", 2L, 201L)); validateState(function, thirdRecord, 1, 3, 1, 1, 2, 4, false); testHarness.processElement(updateBeforeRecord("key1", 2L, 200L)); validateState(function, secondRecord, 1, 2, -1, 0, 1, 3, false); testHarness.processElement(updateBeforeRecord("key1", 5L, 500L)); validateState(function, fourthRecord, 1, 2, 0, 1, 3, 2, false); testHarness.processElement(updateBeforeRecord("key1", 5L, 501L)); validateState(function, sixthRecord, 1, 1, -1, 0, 5, 1, false); testHarness.processElement(updateBeforeRecord("key1", 1L, 100L)); validateState(function, firstRecord, 0, 0, -1, 0, 0, 0, false); List<RowData> actualRows = testHarness.extractOutputValues(); assertThat(actualRows.size()).isEqualTo(28); assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(0L); assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L); } @Test public void testInsertWithStateTTLExpiration() throws Exception { Duration stateTtlTime = Duration.ofMillis(10); NonTimeRowsUnboundedPrecedingFunction<RowData> function = getNonTimeRowsUnboundedPrecedingFunction( stateTtlTime.toMillis(), GENERATED_SORT_KEY_COMPARATOR_ASC); KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>(function); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L); testHarness.processElement(insertRecord("key1", 1L, 100L)); validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true); GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L); testHarness.processElement(insertRecord("key1", 2L, 200L)); validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true); GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L); testHarness.processElement(insertRecord("key1", 2L, 201L)); validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true); // expire the state testHarness.setStateTtlProcessingTime(stateTtlTime.toMillis() + 1); // After insertion of the following record, there should be only 1 record in state // After insertion of the following record, there should be only 1 record in state GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L); testHarness.processElement(insertRecord("key1", 5L, 500L)); validateState(function, fourthRecord, 0, 1, 0, 1, 0, 1, true); List<RowData> actualRows = testHarness.extractOutputValues(); assertThat(actualRows.size()).isEqualTo(4); assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(0L); assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L); } @Test public void testInsertAndRetractWithStateTTLExpiration() throws Exception { Duration stateTtlTime = Duration.ofMillis(10); NonTimeRowsUnboundedPrecedingFunction<RowData> function = getNonTimeRowsUnboundedPrecedingFunction( stateTtlTime.toMillis(), GENERATED_SORT_KEY_COMPARATOR_ASC); KeyedProcessOperator<RowData, RowData, RowData> operator = new KeyedProcessOperator<>(function); OneInputStreamOperatorTestHarness<RowData, RowData> testHarness = createTestHarness(operator); testHarness.open(); // put some records GenericRowData firstRecord = GenericRowData.of("key1", 1L, 100L); testHarness.processElement(insertRecord("key1", 1L, 100L)); validateState(function, firstRecord, 0, 1, 0, 1, 0, 1, true); GenericRowData secondRecord = GenericRowData.of("key1", 2L, 200L); testHarness.processElement(insertRecord("key1", 2L, 200L)); validateState(function, secondRecord, 1, 2, 0, 1, 1, 2, true); GenericRowData thirdRecord = GenericRowData.of("key1", 2L, 201L); testHarness.processElement(insertRecord("key1", 2L, 201L)); validateState(function, thirdRecord, 1, 2, 1, 2, 2, 3, true); GenericRowData fourthRecord = GenericRowData.of("key1", 5L, 500L); testHarness.processElement(insertRecord("key1", 5L, 500L)); validateState(function, fourthRecord, 2, 3, 0, 1, 3, 4, true); GenericRowData fifthRecord = GenericRowData.of("key1", 5L, 502L); testHarness.processElement(insertRecord("key1", 5L, 502L)); validateState(function, fifthRecord, 2, 3, 1, 2, 4, 5, true); // expire the state testHarness.setStateTtlProcessingTime(stateTtlTime.toMillis() + 1); // Retract a non-existent record due to state ttl expiration testHarness.processElement(updateBeforeRecord("key1", 5L, 502L)); // Ensure state is null/empty List<Tuple2<RowData, List<Long>>> sortedList = function.getRuntimeContext().getState(function.sortedListStateDescriptor).value(); assertThat(sortedList).isNull(); MapState<RowData, RowData> mapState = function.getRuntimeContext().getMapState(function.accStateDescriptor); assertThat(mapState.isEmpty()).isTrue(); Long idValue = function.getRuntimeContext().getState(function.idStateDescriptor).value(); assertThat(idValue).isNull(); List<RowData> actualRows = testHarness.extractOutputValues(); assertThat(actualRows.size()).isEqualTo(5); assertThat(function.getNumOfSortKeysNotFound().getCount()).isEqualTo(1L); assertThat(function.getNumOfIdsNotFound().getCount()).isEqualTo(0L); } void validateNumAccRows(int numAccRows, int expectedNumAccRows, int totalRows) { assertThat(numAccRows).isEqualTo(totalRows); } void validateEntry( AbstractNonTimeUnboundedPrecedingOver<RowData> function, RowData record, int idOffset) throws Exception { assertThat( function.getRuntimeContext() .getMapState(function.accStateDescriptor) .get(GenericRowData.of(Long.MIN_VALUE + idOffset))) .isNotNull(); } }
apache/harmony
37,454
classlib/modules/swing/src/main/java/common/javax/swing/text/html/FormViewComponentFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Roman I. Chernyatchik */ package javax.swing.text.html; import java.awt.Color; import java.awt.Component; import java.awt.ComponentOrientation; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Graphics; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.InputEvent; import java.net.URL; import javax.swing.AbstractButton; import javax.swing.Box; import javax.swing.ButtonGroup; import javax.swing.ButtonModel; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JList; import javax.swing.JPasswordField; import javax.swing.JRadioButton; import javax.swing.JScrollPane; import javax.swing.JTextArea; import javax.swing.JTextField; import javax.swing.JToggleButton; import javax.swing.KeyStroke; import javax.swing.ListSelectionModel; import javax.swing.JToggleButton.ToggleButtonModel; import javax.swing.border.BevelBorder; import javax.swing.border.Border; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import javax.swing.text.AttributeSet; import javax.swing.text.JTextComponent; import javax.swing.text.PlainDocument; import javax.swing.text.SimpleAttributeSet; import org.apache.harmony.x.swing.Utilities; import org.apache.harmony.x.swing.text.html.HTMLIconFactory; import org.apache.harmony.x.swing.text.html.form.Form; import org.apache.harmony.x.swing.text.html.form.FormButtonModel; import org.apache.harmony.x.swing.text.html.form.FormSelectComboBoxModel; import org.apache.harmony.x.swing.text.html.form.FormSelectListModel; import org.apache.harmony.x.swing.text.html.form.FormTextModel; import org.apache.harmony.x.swing.text.html.form.FormToggleButtonModel; final class FormViewComponentFactory { static class InputImageIcon implements Icon { private BackgroundImageLoader loader; private Icon icon; public InputImageIcon(final String src, final URL baseURL, final FormView view) { if (src == null) { icon = HTMLIconFactory.getNoImageIcon(); } else { URL url = HTML.resolveURL(src, baseURL); if (url == null) { icon = HTMLIconFactory.getLoadingFailedIcon(); } else { loader = new BackgroundImageLoader(url, true, -1, -1) { protected void onReady() { super.onReady(); view.preferenceChanged(view, true, true); } protected void onError() { super.onError(); icon = HTMLIconFactory.getNoImageIcon(); view.preferenceChanged(view, true, true); } }; } } } public boolean imageWasLoaded() { return loader != null && loader.isReady(); } public void paintIcon(final Component c, final Graphics g, final int x, final int y) { if (icon != null) { icon.paintIcon(c, g, x, y); return; } if (!loader.isReady()) { HTMLIconFactory.getLoadingImageIcon().paintIcon(c, g, x, y); return; } g.drawImage(loader.image, x, y, getIconWidth(), getIconHeight(), loader); } public int getIconWidth() { if (icon != null) { return icon.getIconWidth(); } if (!loader.isReady()) { return HTMLIconFactory.getLoadingImageIcon().getIconWidth(); } return loader.getWidth(); } public int getIconHeight() { if (icon != null) { return icon.getIconHeight(); } if (!loader.isReady()) { return HTMLIconFactory.getLoadingImageIcon().getIconHeight(); } return loader.getHeight(); } }; private static final int DEFAULT_TEXTFIELD_SIZE = 20; private static final int DEFAULT_STRUT = 5; private static final int DEFAULT_COLS_COUNT = 20; private static final int DEFAULT_ROWS_COUNT = 3; private static final char MEAN_CHAR = 'z'; private static final Color IMG_BORDER_HIGHLIGHT = new Color(136, 136, 136); private static final Color IMG_BORDER_SHADOW = new Color(204, 204, 204); private static final String DIR_RTL = "rtl"; private static final String BROWSE_BUTTON_DEFAULT_TEXT = "Browse..."; private static final String SUBMIT_DEFAULT_TEXT = "Submit Query"; private static final String RESET_DEFAULT_TEXT = "Reset"; private FormViewComponentFactory() { } public static Component createButtonComponent(final Object model, final AttributeSet attrs, final FormView view) { // TODO Implement support of BUTTON content return createImageComponent(model, attrs, view); } public static Component createInputButtonComponent(final Object model, final AttributeSet attrs) { ButtonModel buttonModel = (ButtonModel) model; final JButton button = new JButton(""); // Model if (buttonModel == null) { buttonModel = new FormButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } button.setModel(buttonModel); // VALUE String attribute = (String)attrs.getAttribute(HTML.Attribute.VALUE); if (!Utilities.isEmptyString(attribute)) { button.setText(attribute); } else { final int width, height; final FontMetrics fontMetrics = button.getFontMetrics(button.getFont()); final Insets insets = button.getInsets(); width = DEFAULT_STRUT + insets.top + insets.bottom; height = fontMetrics.getHeight() + insets.top + insets.bottom; Dimension size = button.getPreferredSize(); size.width = width; size.height = height; button.setPreferredSize(size); button.setMaximumSize(size); button.setMinimumSize(size); } // SIZE setButtonSize(button, attrs); // TITLE setTitle(button, attrs); // ACCESSKEY setButtonAccessKey(button, attrs); // ALIGN setButtonAlign(button); // DISABLED setDisabled(button, attrs); return button; } public static Component createInputCheckBoxComponent(final Object model, final AttributeSet attrs) { ToggleButtonModel checkBoxModel = (ToggleButtonModel) model; final JCheckBox checkBox = new JCheckBox(); // Model if (checkBoxModel == null) { checkBoxModel = new FormToggleButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } checkBox.setModel(checkBoxModel); // SIZE setButtonSize(checkBox, attrs); // TITLE setTitle(checkBox, attrs); // CHECKED setChecked(checkBox, attrs); // ACCESSKEY setButtonAccessKey(checkBox, attrs); // ALIGN setButtonAlign(checkBox); // DISABLED setDisabled(checkBox, attrs); return checkBox; } public static Component createInputImageComponent(final Object model, final AttributeSet attrs, final FormView view) { final Component image = createImageComponent(model, attrs, view); // ActionPerformed image.addMouseListener(view.new MouseEventListener()); return image; } public static Component createInputPasswordComponent(final Object model, final AttributeSet attrs, final FormView view) { PlainDocument document = (PlainDocument) model; final JPasswordField passwordField = new JPasswordField(); // Model if (document == null) { document = new FormTextModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } passwordField.setDocument(document); // ActionPerformed passwordField.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent event) { view.actionPerformed(event); } }); // VALUE String attribute = (String)attrs.getAttribute(HTML.Attribute.VALUE); if (!Utilities.isEmptyString(attribute)) { passwordField.setText(attribute); } // SIZE setTextSize(passwordField, attrs, passwordField.getEchoChar()); // TITLE setTitle(passwordField, attrs); // ACCESSKEY setTextAccessKey(passwordField, attrs); // DIR setTextDir(passwordField, attrs); // READONLY setTextReadonly(passwordField, attrs); // ALIGN setTextAlign(passwordField); // DISABLED setDisabled(passwordField, attrs); return passwordField; } public static Component createInputRadioComponent(final Object model, final AttributeSet attrs) { ToggleButtonModel radioButtonModel; final JRadioButton radioButton = new JRadioButton(); // NAME String attribute = (String) attrs.getAttribute(HTML.Attribute.NAME); if (!Utilities.isEmptyString(attribute)) { radioButtonModel = (ToggleButtonModel) model; } else { radioButtonModel = new ToggleButtonModel() { public void setGroup(final ButtonGroup group) { //Do nothing }; }; } // Model if (radioButtonModel == null) { radioButtonModel = new FormToggleButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } radioButton.setModel(radioButtonModel); // SIZE setButtonSize(radioButton, attrs); // TITLE setTitle(radioButton, attrs); // CHECKED setChecked(radioButton, attrs); // ACCESSKEY setButtonAccessKey(radioButton, attrs); // ALIGN setButtonAlign(radioButton); // DISABLED setDisabled(radioButton, attrs); return radioButton; } public static Component createInputResetComponent(final Object model, final AttributeSet attrs, final FormView view) { ButtonModel resetButtonModel = (ButtonModel) model; final JButton resetButton = new JButton(); // Model if (resetButtonModel == null) { resetButtonModel = new FormButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } resetButton.setModel(resetButtonModel); // ActionPerformed resetButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent event) { view.actionPerformed(event); } }); // VALUE String attribute = (String)attrs.getAttribute(HTML.Attribute.VALUE); if (!Utilities.isEmptyString(attribute)) { resetButton.setText(attribute); } else { resetButton.setText(RESET_DEFAULT_TEXT); } // SIZE setButtonSize(resetButton, attrs); // TITLE setTitle(resetButton, attrs); // ACCESSKEY setButtonAccessKey(resetButton, attrs); // ALIGN setButtonAlign(resetButton); // DISABLED setDisabled(resetButton, attrs); return resetButton; } public static Component createInputSubmitComponent(final Object model, final AttributeSet attrs, final FormView view) { ButtonModel submitButtonModel = (ButtonModel) model; final JButton submitButton = new JButton(); // Model if (submitButtonModel == null) { submitButtonModel = new FormButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } submitButton.setModel(submitButtonModel); // ActionPerformed submitButton.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent event) { view.actionPerformed(event); } }); // VALUE String attribute = (String)attrs.getAttribute(HTML.Attribute.VALUE); if (!Utilities.isEmptyString(attribute)) { submitButton.setText(attribute); } else { submitButton.setText(SUBMIT_DEFAULT_TEXT); } // SIZE setButtonSize(submitButton, attrs); // TITLE setTitle(submitButton, attrs); // ACCESSKEY setButtonAccessKey(submitButton, attrs); // ALIGN setButtonAlign(submitButton); // DISABLED setDisabled(submitButton, attrs); return submitButton; } public static Component createInputTextComponent(final Object model, final AttributeSet attrs, final FormView view) { PlainDocument document = (PlainDocument) model; final JTextField textField = new JTextField(); // Model if (document == null) { document = new FormTextModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } textField.setDocument(document); // ActionPerformed textField.addActionListener(new ActionListener() { public void actionPerformed(final ActionEvent event) { view.actionPerformed(event); } }); // VALUE final String attribute = (String)attrs.getAttribute(HTML.Attribute .VALUE); if (!Utilities.isEmptyString(attribute)) { textField.setText(attribute); } // SIZE setTextSize(textField, attrs, MEAN_CHAR); // TITLE setTitle(textField, attrs); // ACCESSKEY setTextAccessKey(textField, attrs); // DIR setTextDir(textField, attrs); // READONLY setTextReadonly(textField, attrs); // ALIGN setTextAlign(textField); // DISABLED setDisabled(textField, attrs); return textField; } public static Component createInputFileComponent(final Object model, final AttributeSet attrs) { /* * FilePath attributes */ PlainDocument document = (PlainDocument) model; final JTextField filePath = new JTextField(); // Model if (document == null) { document = new FormTextModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } filePath.setDocument(document); // SIZE setTextSize(filePath, attrs, MEAN_CHAR); // ACCESSKEY setTextAccessKey(filePath, attrs); // DIR boolean isRTL = setTextDir(filePath, attrs); /* * Browse button attributes */ final JButton browseButton = new JButton(BROWSE_BUTTON_DEFAULT_TEXT); // READONLY String attribute = (String) attrs.getAttribute(HTML.Attribute .READONLY); if (attribute != null) { filePath.setEditable(false); } else { browseButton.addActionListener(new ActionListener() { private JFileChooser chooser; public void actionPerformed(final ActionEvent e) { if (chooser == null) { chooser = new JFileChooser(); } if (chooser.showOpenDialog(browseButton) == JFileChooser.APPROVE_OPTION) { filePath.setText(chooser.getSelectedFile().getPath()); } } }); } /* * Box attributes */ final Box box = Box.createHorizontalBox(); // TITLE attribute = (String) attrs.getAttribute(HTML.Attribute.TITLE); if (!Utilities.isEmptyString(attribute)) { filePath.setToolTipText(attribute); browseButton.setToolTipText(attribute); } // ALIGN box.setAlignmentX(JComponent.CENTER_ALIGNMENT); box.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); browseButton.setAlignmentX(JComponent.LEFT_ALIGNMENT); browseButton.setAlignmentY(JComponent.CENTER_ALIGNMENT); // DISABLED if (attrs.getAttribute(HTML.Attribute.DISABLED) != null) { filePath.setEnabled(false); browseButton.setEnabled(false); } box.add(filePath); box.add(Box.createHorizontalStrut(5)); box.add(browseButton); if (isRTL) { box.setComponentOrientation(ComponentOrientation.RIGHT_TO_LEFT); } return box; } public static JComponent createSelectMultipleComponent( final Object model, final AttributeSet attrs) { // MULTIPLE final boolean isMultiple = (attrs.getAttribute(HTML.Attribute.MULTIPLE) != null); // SIZE int linesCount = 0; String attribute = (String)attrs.getAttribute(HTML.Attribute.SIZE); if (!Utilities.isEmptyString(attribute)) { try { linesCount = Integer.parseInt(attribute); } catch (NumberFormatException e) { //DO nothing } } /* * JList attributes */ JList selectionList = new JList(); FormSelectListModel optionModel; // Model if (model != null) { optionModel = (FormSelectListModel)model; } else { optionModel = new FormSelectListModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY, selectionList.getSelectionModel()); } selectionList.setModel(optionModel); selectionList.setSelectionModel(optionModel.getSelectionModel()); if (isMultiple) { selectionList.setSelectionMode(ListSelectionModel .MULTIPLE_INTERVAL_SELECTION); } // TITLE if (!Utilities.isEmptyString(optionModel.getTitle())) { selectionList.setToolTipText(optionModel.getTitle()); } // DIR setTextDir(selectionList, attrs); // OPTION attributes if (linesCount <= 1) { linesCount = Math.max(1, selectionList.getModel().getSize()); } // Selection FormViewUtils.resetMultipleSelection(optionModel); /* * JScrollPane attributes */ final FontMetrics fontMetrics = selectionList.getFontMetrics(selectionList.getFont()); Dimension size; if (optionModel.getSize() == 0) { size = selectionList.getPreferredSize(); Insets insets = selectionList.getInsets(); size.width = fontMetrics.charWidth(MEAN_CHAR) + insets.left + insets.right; selectionList.setPreferredSize(size); } JScrollPane pane = new JScrollPane(selectionList, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); size = pane.getPreferredSize(); size.height = linesCount * fontMetrics.getHeight(); pane.setMinimumSize(size); pane.setMaximumSize(size); pane.setPreferredSize(size); pane.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); // DISABLED if (optionModel.isEnabled()) { pane.setEnabled(false); } return pane; } public static JComponent createSelectSimpleComponent( final Object model, final AttributeSet attrs) { JComboBox selectElement = new JComboBox(); FormSelectComboBoxModel comboBoxModel = (FormSelectComboBoxModel)model; // Model if (comboBoxModel == null) { comboBoxModel = new FormSelectComboBoxModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } selectElement.setModel(comboBoxModel); selectElement.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); // TITLE if (!Utilities.isEmptyString(comboBoxModel.getTitle())) { selectElement.setToolTipText(comboBoxModel.getTitle()); } // DIR setTextDir(selectElement, attrs); // Selection FormViewUtils.resetSimpleSelection(selectElement.getModel()); // Size final Dimension size = selectElement.getPreferredSize(); selectElement.setMinimumSize(size); selectElement.setMaximumSize(size); // DISABLED if (!comboBoxModel.isEnabled()) { selectElement.setEnabled(false); } return selectElement; } public static Component createTextAreaComponent(final Object model, final AttributeSet attrs, final FormView view) { /* * JTextArea attributes */ Dimension size; PlainDocument document = (PlainDocument)model; //ROWS int rowsCount = DEFAULT_ROWS_COUNT; String attribute = (String)attrs.getAttribute(HTML.Attribute.ROWS); if (!Utilities.isEmptyString(attribute)) { try { rowsCount = Integer.parseInt(attribute); } catch (NumberFormatException e) { //Do nothing } } //COLS int columnsCount = DEFAULT_COLS_COUNT; attribute = (String)attrs.getAttribute(HTML.Attribute.COLS); if (!Utilities.isEmptyString(attribute)) { try { columnsCount = Integer.parseInt(attribute); } catch (NumberFormatException e) { //Do nothing } } //Model if (document == null) { document = new FormTextModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } final JTextArea textArea = new JTextArea(document, null, rowsCount, columnsCount); //DIR setTextDir(textArea, attrs); //ACCESSKEY setTextAccessKey(textArea, attrs); //READONLY setTextReadonly(textArea, attrs); /* * JScrollPane attributes */ final JScrollPane pane = new JScrollPane(textArea); size = pane.getPreferredSize(); pane.setMinimumSize(size); pane.setPreferredSize(size); pane.setMaximumSize(size); pane.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); //TITLE attribute = (String) attrs.getAttribute(HTML.Attribute.TITLE); if (!Utilities.isEmptyString(attribute)) { textArea.setToolTipText(attribute); pane.setToolTipText(attribute); pane.getVerticalScrollBar().setToolTipText(attribute); pane.getHorizontalScrollBar().setToolTipText(attribute); } //DISABLED if (attrs.getAttribute(HTML.Attribute.DISABLED) != null) { textArea.setEnabled(false); pane.setEnabled(false); } return pane; } private static Component createImageComponent(final Object model, final AttributeSet attrs, final FormView view) { ButtonModel imageModel = (ButtonModel) model; final JButton image = new JButton(""); // Model if (imageModel == null) { imageModel = new FormButtonModel(new Form(SimpleAttributeSet.EMPTY), SimpleAttributeSet.EMPTY); } image.setModel(imageModel); image.setCursor(new Cursor(Cursor.HAND_CURSOR)); // SRC, ALT String attribute = (String)attrs.getAttribute(HTML.Attribute.SRC); InputImageIcon icon = new InputImageIcon(attribute, ((HTMLDocument)view.getDocument()) .getBase(), view); image.setIcon(icon); image.setBackground(Color.WHITE); Dimension size; if (icon.imageWasLoaded()) { image.setBorderPainted(false); size = new Dimension(icon.getIconWidth(), icon.getIconHeight()); } else { Border outside = new BevelBorder(BevelBorder.LOWERED, IMG_BORDER_SHADOW, IMG_BORDER_HIGHLIGHT); image.setBorder(new CompoundBorder(outside, new EmptyBorder(5, 5, 5, 5))); image.setContentAreaFilled(false); image.setFocusPainted(false); attribute = (String)attrs.getAttribute(HTML.Attribute.ALT); if (!Utilities.isEmptyString(attribute)) { image.setFont(new Font("Button.font", 0 , 12)); image.setText(attribute); image.setToolTipText(attribute); } size = image.getPreferredSize(); } image.setMinimumSize(size); image.setPreferredSize(size); image.setMaximumSize(size); //SIZE setButtonSize(image, attrs); //TITLE setTitle(image, attrs); //ACCESSKEY setButtonAccessKey(image, attrs); //ALIGN setButtonAlign(image); //DISABLED setDisabled(image, attrs); return image; } private static void setTextSize(final JTextComponent textComponent, final AttributeSet attrs, final char widestChar) { final String attribute = (String) attrs.getAttribute(HTML.Attribute.SIZE); int width = DEFAULT_TEXTFIELD_SIZE; if (attribute != null) { try { final int newWidth = Integer.parseInt(attribute); if (newWidth > width) { width = newWidth; } } catch (NumberFormatException e) { // do nothing } } final FontMetrics fontMetrics = textComponent.getFontMetrics(textComponent.getFont()); final int charWidth = fontMetrics.charWidth(widestChar); Dimension size = textComponent.getPreferredSize(); size.width = width * charWidth; textComponent.setPreferredSize(size); textComponent.setMaximumSize(size); size = new Dimension(DEFAULT_TEXTFIELD_SIZE * charWidth, size.height); textComponent.setMinimumSize(size); } private static String setTitle(final JComponent component, final AttributeSet attrs) { final String attribute = (String) attrs.getAttribute(HTML.Attribute .TITLE); if (!Utilities.isEmptyString(attribute)) { component.setToolTipText(attribute); } return attribute; } private static void setTextReadonly(final JTextComponent textComponent, final AttributeSet attrs) { if (attrs.getAttribute(HTML.Attribute.READONLY) != null) { textComponent.setEditable(false); } } private static void setButtonAccessKey(final AbstractButton button, final AttributeSet attrs) { final String attribute = (String) attrs.getAttribute(HTML.Attribute .ACCESSKEY); if (!Utilities.isEmptyString(attribute)) { button.setMnemonic(attribute.charAt(0)); } } private static void setButtonAlign(final AbstractButton button) { button.setAlignmentX(JComponent.LEFT_ALIGNMENT); button.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); } private static void setButtonSize(final AbstractButton button, final AttributeSet attrs) { final String attribute; attribute = (String)attrs.getAttribute(HTML.Attribute.SIZE); if (attribute != null) { Dimension size = button.getPreferredSize(); try { size.width = Integer.parseInt(attribute); } catch (NumberFormatException e) { //Do nothing } button.setPreferredSize(size); button.setMaximumSize(size); button.setMinimumSize(size); } } private static void setChecked(final JToggleButton button, final AttributeSet attrs) { if (attrs.getAttribute(HTML.Attribute.CHECKED) != null) { button.setSelected(true); } } private static void setDisabled(final Component component, final AttributeSet attrs) { if (attrs.getAttribute(HTML.Attribute.DISABLED) != null) { component.setEnabled(false); } } private static void setTextAccessKey(final JTextComponent textComponent, final AttributeSet attrs) { final String attribute = (String) attrs.getAttribute(HTML.Attribute .ACCESSKEY); if (!Utilities.isEmptyString(attribute)) { ActionListener listener = new ActionListener() { public void actionPerformed(final ActionEvent e) { textComponent.requestFocusInWindow(); } }; final char key = attribute.charAt(0); final KeyStroke keystroke1 = KeyStroke.getKeyStroke(Character.toLowerCase(key), InputEvent.ALT_MASK); final KeyStroke keystroke2 = KeyStroke.getKeyStroke(Character.toUpperCase(key), InputEvent.ALT_MASK); textComponent.registerKeyboardAction(listener, keystroke1, JComponent .WHEN_IN_FOCUSED_WINDOW); textComponent.registerKeyboardAction(listener, keystroke2, JComponent .WHEN_IN_FOCUSED_WINDOW); } } private static void setTextAlign(final JTextComponent component) { component.setAlignmentX(JComponent.CENTER_ALIGNMENT); component.setAlignmentY(JComponent.BOTTOM_ALIGNMENT); } private static boolean setTextDir(final Component component, final AttributeSet attrs) { final String attribute = (String)attrs.getAttribute(HTML.Attribute .DIR); if (!Utilities.isEmptyString(attribute)) { if (DIR_RTL.equals(attribute.toLowerCase())) { component.setComponentOrientation(ComponentOrientation .RIGHT_TO_LEFT); return true; } } return false; } }
googleapis/google-cloud-java
37,350
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/DataProfileFindingLocation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * Location of a data profile finding within a resource. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DataProfileFindingLocation} */ public final class DataProfileFindingLocation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DataProfileFindingLocation) DataProfileFindingLocationOrBuilder { private static final long serialVersionUID = 0L; // Use DataProfileFindingLocation.newBuilder() to construct. private DataProfileFindingLocation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DataProfileFindingLocation() { containerName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DataProfileFindingLocation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DataProfileFindingLocation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DataProfileFindingLocation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DataProfileFindingLocation.class, com.google.privacy.dlp.v2.DataProfileFindingLocation.Builder.class); } private int locationExtraDetailsCase_ = 0; @SuppressWarnings("serial") private java.lang.Object locationExtraDetails_; public enum LocationExtraDetailsCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { DATA_PROFILE_FINDING_RECORD_LOCATION(2), LOCATIONEXTRADETAILS_NOT_SET(0); private final int value; private LocationExtraDetailsCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static LocationExtraDetailsCase valueOf(int value) { return forNumber(value); } public static LocationExtraDetailsCase forNumber(int value) { switch (value) { case 2: return DATA_PROFILE_FINDING_RECORD_LOCATION; case 0: return LOCATIONEXTRADETAILS_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public LocationExtraDetailsCase getLocationExtraDetailsCase() { return LocationExtraDetailsCase.forNumber(locationExtraDetailsCase_); } public static final int CONTAINER_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object containerName_ = ""; /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @return The containerName. */ @java.lang.Override public java.lang.String getContainerName() { java.lang.Object ref = containerName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); containerName_ = s; return s; } } /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @return The bytes for containerName. */ @java.lang.Override public com.google.protobuf.ByteString getContainerNameBytes() { java.lang.Object ref = containerName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); containerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DATA_PROFILE_FINDING_RECORD_LOCATION_FIELD_NUMBER = 2; /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> * * @return Whether the dataProfileFindingRecordLocation field is set. */ @java.lang.Override public boolean hasDataProfileFindingRecordLocation() { return locationExtraDetailsCase_ == 2; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> * * @return The dataProfileFindingRecordLocation. */ @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingRecordLocation getDataProfileFindingRecordLocation() { if (locationExtraDetailsCase_ == 2) { return (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_; } return com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingRecordLocationOrBuilder getDataProfileFindingRecordLocationOrBuilder() { if (locationExtraDetailsCase_ == 2) { return (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_; } return com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, containerName_); } if (locationExtraDetailsCase_ == 2) { output.writeMessage( 2, (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(containerName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, containerName_); } if (locationExtraDetailsCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.DataProfileFindingLocation)) { return super.equals(obj); } com.google.privacy.dlp.v2.DataProfileFindingLocation other = (com.google.privacy.dlp.v2.DataProfileFindingLocation) obj; if (!getContainerName().equals(other.getContainerName())) return false; if (!getLocationExtraDetailsCase().equals(other.getLocationExtraDetailsCase())) return false; switch (locationExtraDetailsCase_) { case 2: if (!getDataProfileFindingRecordLocation() .equals(other.getDataProfileFindingRecordLocation())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONTAINER_NAME_FIELD_NUMBER; hash = (53 * hash) + getContainerName().hashCode(); switch (locationExtraDetailsCase_) { case 2: hash = (37 * hash) + DATA_PROFILE_FINDING_RECORD_LOCATION_FIELD_NUMBER; hash = (53 * hash) + getDataProfileFindingRecordLocation().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.DataProfileFindingLocation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Location of a data profile finding within a resource. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.DataProfileFindingLocation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DataProfileFindingLocation) com.google.privacy.dlp.v2.DataProfileFindingLocationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DataProfileFindingLocation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DataProfileFindingLocation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.DataProfileFindingLocation.class, com.google.privacy.dlp.v2.DataProfileFindingLocation.Builder.class); } // Construct using com.google.privacy.dlp.v2.DataProfileFindingLocation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; containerName_ = ""; if (dataProfileFindingRecordLocationBuilder_ != null) { dataProfileFindingRecordLocationBuilder_.clear(); } locationExtraDetailsCase_ = 0; locationExtraDetails_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_DataProfileFindingLocation_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingLocation getDefaultInstanceForType() { return com.google.privacy.dlp.v2.DataProfileFindingLocation.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingLocation build() { com.google.privacy.dlp.v2.DataProfileFindingLocation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingLocation buildPartial() { com.google.privacy.dlp.v2.DataProfileFindingLocation result = new com.google.privacy.dlp.v2.DataProfileFindingLocation(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.DataProfileFindingLocation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.containerName_ = containerName_; } } private void buildPartialOneofs(com.google.privacy.dlp.v2.DataProfileFindingLocation result) { result.locationExtraDetailsCase_ = locationExtraDetailsCase_; result.locationExtraDetails_ = this.locationExtraDetails_; if (locationExtraDetailsCase_ == 2 && dataProfileFindingRecordLocationBuilder_ != null) { result.locationExtraDetails_ = dataProfileFindingRecordLocationBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.DataProfileFindingLocation) { return mergeFrom((com.google.privacy.dlp.v2.DataProfileFindingLocation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.DataProfileFindingLocation other) { if (other == com.google.privacy.dlp.v2.DataProfileFindingLocation.getDefaultInstance()) return this; if (!other.getContainerName().isEmpty()) { containerName_ = other.containerName_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getLocationExtraDetailsCase()) { case DATA_PROFILE_FINDING_RECORD_LOCATION: { mergeDataProfileFindingRecordLocation(other.getDataProfileFindingRecordLocation()); break; } case LOCATIONEXTRADETAILS_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { containerName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getDataProfileFindingRecordLocationFieldBuilder().getBuilder(), extensionRegistry); locationExtraDetailsCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int locationExtraDetailsCase_ = 0; private java.lang.Object locationExtraDetails_; public LocationExtraDetailsCase getLocationExtraDetailsCase() { return LocationExtraDetailsCase.forNumber(locationExtraDetailsCase_); } public Builder clearLocationExtraDetails() { locationExtraDetailsCase_ = 0; locationExtraDetails_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object containerName_ = ""; /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @return The containerName. */ public java.lang.String getContainerName() { java.lang.Object ref = containerName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); containerName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @return The bytes for containerName. */ public com.google.protobuf.ByteString getContainerNameBytes() { java.lang.Object ref = containerName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); containerName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @param value The containerName to set. * @return This builder for chaining. */ public Builder setContainerName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } containerName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @return This builder for chaining. */ public Builder clearContainerName() { containerName_ = getDefaultInstance().getContainerName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Name of the container where the finding is located. * The top-level name is the source file name or table name. Names of some * common storage containers are formatted as follows: * * * BigQuery tables: `{project_id}:{dataset_id}.{table_id}` * * Cloud Storage files: `gs://{bucket}/{path}` * </pre> * * <code>string container_name = 1;</code> * * @param value The bytes for containerName to set. * @return This builder for chaining. */ public Builder setContainerNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); containerName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.DataProfileFindingRecordLocation, com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.Builder, com.google.privacy.dlp.v2.DataProfileFindingRecordLocationOrBuilder> dataProfileFindingRecordLocationBuilder_; /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> * * @return Whether the dataProfileFindingRecordLocation field is set. */ @java.lang.Override public boolean hasDataProfileFindingRecordLocation() { return locationExtraDetailsCase_ == 2; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> * * @return The dataProfileFindingRecordLocation. */ @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingRecordLocation getDataProfileFindingRecordLocation() { if (dataProfileFindingRecordLocationBuilder_ == null) { if (locationExtraDetailsCase_ == 2) { return (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_; } return com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } else { if (locationExtraDetailsCase_ == 2) { return dataProfileFindingRecordLocationBuilder_.getMessage(); } return com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ public Builder setDataProfileFindingRecordLocation( com.google.privacy.dlp.v2.DataProfileFindingRecordLocation value) { if (dataProfileFindingRecordLocationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } locationExtraDetails_ = value; onChanged(); } else { dataProfileFindingRecordLocationBuilder_.setMessage(value); } locationExtraDetailsCase_ = 2; return this; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ public Builder setDataProfileFindingRecordLocation( com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.Builder builderForValue) { if (dataProfileFindingRecordLocationBuilder_ == null) { locationExtraDetails_ = builderForValue.build(); onChanged(); } else { dataProfileFindingRecordLocationBuilder_.setMessage(builderForValue.build()); } locationExtraDetailsCase_ = 2; return this; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ public Builder mergeDataProfileFindingRecordLocation( com.google.privacy.dlp.v2.DataProfileFindingRecordLocation value) { if (dataProfileFindingRecordLocationBuilder_ == null) { if (locationExtraDetailsCase_ == 2 && locationExtraDetails_ != com.google.privacy.dlp.v2.DataProfileFindingRecordLocation .getDefaultInstance()) { locationExtraDetails_ = com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.newBuilder( (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_) .mergeFrom(value) .buildPartial(); } else { locationExtraDetails_ = value; } onChanged(); } else { if (locationExtraDetailsCase_ == 2) { dataProfileFindingRecordLocationBuilder_.mergeFrom(value); } else { dataProfileFindingRecordLocationBuilder_.setMessage(value); } } locationExtraDetailsCase_ = 2; return this; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ public Builder clearDataProfileFindingRecordLocation() { if (dataProfileFindingRecordLocationBuilder_ == null) { if (locationExtraDetailsCase_ == 2) { locationExtraDetailsCase_ = 0; locationExtraDetails_ = null; onChanged(); } } else { if (locationExtraDetailsCase_ == 2) { locationExtraDetailsCase_ = 0; locationExtraDetails_ = null; } dataProfileFindingRecordLocationBuilder_.clear(); } return this; } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ public com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.Builder getDataProfileFindingRecordLocationBuilder() { return getDataProfileFindingRecordLocationFieldBuilder().getBuilder(); } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingRecordLocationOrBuilder getDataProfileFindingRecordLocationOrBuilder() { if ((locationExtraDetailsCase_ == 2) && (dataProfileFindingRecordLocationBuilder_ != null)) { return dataProfileFindingRecordLocationBuilder_.getMessageOrBuilder(); } else { if (locationExtraDetailsCase_ == 2) { return (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_; } return com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } } /** * * * <pre> * Location of a finding within a resource that produces a table data * profile. * </pre> * * <code> * .google.privacy.dlp.v2.DataProfileFindingRecordLocation data_profile_finding_record_location = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.DataProfileFindingRecordLocation, com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.Builder, com.google.privacy.dlp.v2.DataProfileFindingRecordLocationOrBuilder> getDataProfileFindingRecordLocationFieldBuilder() { if (dataProfileFindingRecordLocationBuilder_ == null) { if (!(locationExtraDetailsCase_ == 2)) { locationExtraDetails_ = com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.getDefaultInstance(); } dataProfileFindingRecordLocationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.DataProfileFindingRecordLocation, com.google.privacy.dlp.v2.DataProfileFindingRecordLocation.Builder, com.google.privacy.dlp.v2.DataProfileFindingRecordLocationOrBuilder>( (com.google.privacy.dlp.v2.DataProfileFindingRecordLocation) locationExtraDetails_, getParentForChildren(), isClean()); locationExtraDetails_ = null; } locationExtraDetailsCase_ = 2; onChanged(); return dataProfileFindingRecordLocationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DataProfileFindingLocation) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DataProfileFindingLocation) private static final com.google.privacy.dlp.v2.DataProfileFindingLocation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DataProfileFindingLocation(); } public static com.google.privacy.dlp.v2.DataProfileFindingLocation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DataProfileFindingLocation> PARSER = new com.google.protobuf.AbstractParser<DataProfileFindingLocation>() { @java.lang.Override public DataProfileFindingLocation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DataProfileFindingLocation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DataProfileFindingLocation> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.DataProfileFindingLocation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/synapse
37,331
modules/core/src/main/java/org/apache/synapse/config/SynapseConfigUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.synapse.config; import org.apache.axiom.om.*; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axis2.context.ConfigurationContext; import org.apache.axis2.engine.AxisConfiguration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.codec.binary.Base64; import org.apache.synapse.*; import org.apache.synapse.aspects.AspectConfiguration; import org.apache.synapse.aspects.statistics.StatisticsCollector; import org.apache.synapse.securevault.definition.IdentityKeyStoreInformation; import org.apache.synapse.securevault.definition.KeyStoreInformation; import org.apache.synapse.securevault.definition.KeyStoreInformationFactory; import org.apache.synapse.securevault.definition.TrustKeyStoreInformation; import org.apache.synapse.core.SynapseEnvironment; import org.apache.synapse.mediators.MediatorProperty; import org.apache.synapse.mediators.base.SequenceMediator; import org.apache.synapse.mediators.builtin.DropMediator; import org.apache.synapse.mediators.builtin.LogMediator; import org.apache.synapse.util.SynapseBinaryDataSource; import org.apache.synapse.util.xpath.SynapseXPath; import org.jaxen.JaxenException; import org.xml.sax.InputSource; import javax.activation.DataHandler; import javax.net.ssl.*; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.transform.stream.StreamSource; import java.io.*; import java.net.*; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Properties; import java.util.List; import java.util.ArrayList; @SuppressWarnings({"UnusedDeclaration"}) public class SynapseConfigUtils { private static final Log log = LogFactory.getLog(SynapseConfigUtils.class); /** * Return a StreamSource for the given Object * * @param o the object * @return the StreamSource */ public static StreamSource getStreamSource(Object o) { if (o == null) { handleException("Cannot convert null to a StreamSource"); } else if (o instanceof OMElement) { OMElement omElement = (OMElement) o; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { omElement.serialize(baos); return new StreamSource(new ByteArrayInputStream(baos.toByteArray())); } catch (XMLStreamException e) { handleException("Error converting to a StreamSource", e); } } else if (o instanceof OMText) { DataHandler dataHandler = (DataHandler) ((OMText) o).getDataHandler(); if (dataHandler != null) { try { return new StreamSource(dataHandler.getInputStream()); } catch (IOException e) { handleException("Error in reading content as a stream "); } } } else { handleException("Cannot convert object to a StreamSource"); } return null; } public static InputStream getInputStream(Object o) { if (o == null) { handleException("Cannot convert null to a StreamSource"); } else if (o instanceof OMElement) { OMElement omElement = (OMElement) o; ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { omElement.serialize(baos); return new ByteArrayInputStream(baos.toByteArray()); } catch (XMLStreamException e) { handleException("Error converting to a StreamSource", e); } } else if (o instanceof OMText) { DataHandler dataHandler = (DataHandler) ((OMText) o).getDataHandler(); if (dataHandler != null) { try { return dataHandler.getInputStream(); } catch (IOException e) { handleException("Error in reading content as a stream "); } } } else if (o instanceof URI) { try { return ((URI) (o)).toURL().openStream(); } catch (IOException e) { handleException("Error opening stream form URI", e); } } else { handleException("Cannot convert object to a StreamSource"); } return null; } /** * Get an object from a given URL. Will first fetch the content from the * URL and depending on the content-type, a suitable XMLToObjectMapper * (if available) would be used to transform this content into an Object. * If a suitable XMLToObjectMapper cannot be found, the content would be * treated as XML and an OMNode would be returned * * @param url the URL to the resource * @param properties bag of properties to pass in any information to the factory * @return an Object created from the given URL */ public static Object getObject(URL url, Properties properties) { try { if (url != null && "file".equals(url.getProtocol())) { try { url.openStream(); } catch (IOException ignored) { String path = url.getPath(); if (log.isDebugEnabled()) { log.debug("Can not open a connection to the URL with a path :" + path); } String synapseHome = (String) properties.get(SynapseConstants.SYNAPSE_HOME); if (synapseHome != null) { if (log.isDebugEnabled()) { log.debug("Trying to resolve an absolute path of the " + " URL using the synapse.home : " + synapseHome); } if (synapseHome.endsWith("/")) { synapseHome = synapseHome.substring(0, synapseHome.lastIndexOf("/")); } url = new URL(url.getProtocol() + ":" + synapseHome + "/" + path); try { url.openStream(); } catch (IOException e) { if (log.isDebugEnabled()) { log.debug("Failed to resolve an absolute path of the " + " URL using the synapse.home : " + synapseHome); } log.warn("IO Error reading from URL " + url.getPath() + e); } } } } if (url == null) { return null; } URLConnection connection = null; // If wsdl url contains http basic authentication parameters. if (url.getUserInfo() != null) { String protocol = url.getProtocol(); if ("http".equalsIgnoreCase(protocol) || "https".equalsIgnoreCase(protocol)) { // Create new url excluding user info URL newUrl = new URL(protocol, url.getHost(), url.getPort(), url.getFile()); connection = getURLConnection(newUrl); String encoding = new String(new Base64().encode(url.getUserInfo().getBytes())); connection.setRequestProperty("Authorization", "Basic " + encoding); } else { handleException("Unsupported protocol [" + protocol + "]. Supports only http " + "and https with basic authentication."); } } else { connection = getURLConnection(url); } if (connection == null) { if (log.isDebugEnabled()) { log.debug("Cannot create a URLConnection for given URL : " + url); } return null; } XMLToObjectMapper xmlToObject = getXmlToObjectMapper(connection.getContentType()); InputStream inputStream = connection.getInputStream(); try { XMLStreamReader parser = XMLInputFactory.newInstance(). createXMLStreamReader(inputStream); OMXMLParserWrapper builder = OMXMLBuilderFactory.createStAXOMBuilder(parser); OMElement omElem = builder.getDocumentElement(); // detach from URL connection and keep in memory // TODO remove this omElem.build(); if (xmlToObject != null) { return xmlToObject.getObjectFromOMNode(omElem, properties); } else { return omElem; } } catch (XMLStreamException e) { if (log.isDebugEnabled()) { log.debug("Content at URL : " + url + " is non XML.."); } return readNonXML(url); } catch (OMException e) { if (log.isDebugEnabled()) { log.debug("Content at URL : " + url + " is non XML.."); } return readNonXML(url); } finally { inputStream.close(); } } catch (IOException e) { handleException("Error connecting to URL : " + url, e); } return null; } /** * Helper method to handle non-XMl resources * * @param url The resource url * @return The content as an OMNode */ public static OMNode readNonXML(URL url) { try { // Open a new connection URLConnection newConnection = getURLConnection(url); if (newConnection == null) { if (log.isDebugEnabled()) { log.debug("Cannot create a URLConnection for given URL : " + url); } return null; } BufferedInputStream newInputStream = new BufferedInputStream( newConnection.getInputStream()); OMFactory omFactory = OMAbstractFactory.getOMFactory(); return omFactory.createOMText( new DataHandler(new SynapseBinaryDataSource(newInputStream, newConnection.getContentType())), true); } catch (IOException e) { handleException("Error when getting a stream from resource's content", e); } return null; } /** * Return an OMElement from a URL source * * @param urlStr a URL string * @param synapseHome synapse home parameter to be used * @return an OMElement of the resource * @throws IOException for invalid URL's or IO errors */ public static OMNode getOMElementFromURL(String urlStr, String synapseHome) throws IOException { URL url = getURLFromPath(urlStr, synapseHome); if (url == null) { return null; } URLConnection connection = getURLConnection(url); if (connection == null) { if (log.isDebugEnabled()) { log.debug("Cannot create a URLConnection for given URL : " + urlStr); } return null; } InputStream inStream = connection.getInputStream(); try { OMXMLParserWrapper builder = OMXMLBuilderFactory.createOMBuilder(inStream); OMElement doc = builder.getDocumentElement(); doc.build(); return doc; } catch (Exception e) { if (log.isDebugEnabled()) { log.info("Content at URL : " + url + " is non XML.."); } return readNonXML(url); } finally { try { inStream.close(); } catch (IOException e) { log.warn("Error while closing the input stream to: " + url, e); } } } public static InputSource getInputSourceFormURI(URI uri) { if (uri == null) { if (log.isDebugEnabled()) { log.debug("Can not create a URL from 'null' "); } return null; } try { URL url = uri.toURL(); String protocol = url.getProtocol(); String path = url.getPath(); if (protocol == null || "".equals(protocol)) { url = new URL("file:" + path); } URLConnection connection = getURLConnection(url); if (connection == null) { if (log.isDebugEnabled()) { log.debug("Cannot create a URLConnection for given URL : " + uri); } return null; } BufferedInputStream urlInStream = new BufferedInputStream(connection.getInputStream()); return new InputSource(urlInStream); } catch (MalformedURLException e) { handleException("Invalid URL ' " + uri + " '", e); } catch (IOException e) { handleException("IOError when getting a stream from given url : " + uri, e); } return null; } private static void handleException(String msg, Exception e) { log.warn(msg, e); throw new SynapseException(msg, e); } /** * Helper method to create a HttpSURLConnection with provided KeyStores * * @param url Https URL * @param synapseProperties properties for extracting info * @param proxy if there is a proxy * @return gives out the connection created */ private static HttpsURLConnection getHttpsURLConnection( URL url, Properties synapseProperties, Proxy proxy) { if (log.isDebugEnabled()) { log.debug("Creating a HttpsURL Connection from given URL : " + url); } KeyManager[] keyManagers = null; TrustManager[] trustManagers = null; IdentityKeyStoreInformation identityInformation = KeyStoreInformationFactory.createIdentityKeyStoreInformation(synapseProperties); if (identityInformation != null) { KeyManagerFactory keyManagerFactory = identityInformation.getIdentityKeyManagerFactoryInstance(); if (keyManagerFactory != null) { keyManagers = keyManagerFactory.getKeyManagers(); } } else { if (log.isDebugEnabled()) { log.debug("There is no private key entry store configuration." + " Will use JDK's default one"); } } TrustKeyStoreInformation trustInformation = KeyStoreInformationFactory.createTrustKeyStoreInformation(synapseProperties); if (trustInformation != null) { TrustManagerFactory trustManagerFactory = trustInformation.getTrustManagerFactoryInstance(); if (trustManagerFactory != null) { trustManagers = trustManagerFactory.getTrustManagers(); } } else { if (log.isDebugEnabled()) { log.debug("There is no trusted certificate store configuration." + " Will use JDK's default one"); } } try { HttpsURLConnection connection; if (proxy != null) { connection = (HttpsURLConnection) url.openConnection(proxy); } else { connection = (HttpsURLConnection) url.openConnection(); } //Create a SSLContext SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(keyManagers, trustManagers, null); connection.setSSLSocketFactory(sslContext.getSocketFactory()); if (trustInformation != null) { // Determine is it need to overwrite default Host Name verifier boolean enableHostnameVerifier = true; String value = trustInformation.getParameter( KeyStoreInformation.ENABLE_HOST_NAME_VERIFIER); if (value != null) { enableHostnameVerifier = Boolean.parseBoolean(value); } if (!enableHostnameVerifier) { if (log.isDebugEnabled()) { log.debug("Overriding default HostName Verifier." + "HostName verification disabled"); } connection.setHostnameVerifier(new javax.net.ssl.HostnameVerifier() { @Override public boolean verify(String hostname, javax.net.ssl.SSLSession session) { if (log.isTraceEnabled()) { log.trace("HostName verification disabled"); log.trace("Host: " + hostname); log.trace("Peer Host: " + session.getPeerHost()); } return true; } }); } else { if (log.isDebugEnabled()) { log.debug("Using default HostName verifier..."); } } } return connection; } catch (NoSuchAlgorithmException e) { handleException("Error loading SSLContext ", e); } catch (KeyManagementException e) { handleException("Error initiation SSLContext with KeyManagers", e); } catch (IOException e) { handleException("Error opening a https connection from URL : " + url, e); } return null; } /** * Returns a URLCOnnection for given URL. If the URL is https one , then URLConnectin is a * HttpsURLCOnnection and it is configured with KeyStores given in the synapse.properties file * * @param url URL * @return URLConnection for given URL */ public static URLConnection getURLConnection(URL url) { try { if (url == null) { if (log.isDebugEnabled()) { log.debug("Provided URL is null"); } return null; } URLConnection connection; if (url.getProtocol().equalsIgnoreCase("http") || url.getProtocol().equalsIgnoreCase("https")) { Properties synapseProperties = SynapsePropertiesLoader.loadSynapseProperties(); String proxyHost = synapseProperties.getProperty( SynapseConstants.SYNPASE_HTTP_PROXY_HOST); String proxyPort = synapseProperties.getProperty( SynapseConstants.SYNPASE_HTTP_PROXY_PORT); // get the list of excluded hosts for proxy List<String> excludedHosts = getExcludedHostsForProxy(synapseProperties); if (proxyHost != null && proxyPort != null && !excludedHosts.contains(proxyHost)) { SocketAddress sockaddr = new InetSocketAddress( proxyHost, Integer.parseInt(proxyPort)); Proxy proxy = new Proxy(Proxy.Type.HTTP, sockaddr); if (url.getProtocol().equalsIgnoreCase("https")) { connection = getHttpsURLConnection(url, synapseProperties, proxy); } else { connection = url.openConnection(proxy); } } else { if (url.getProtocol().equalsIgnoreCase("https")) { connection = getHttpsURLConnection(url, synapseProperties, null); } else { connection = url.openConnection(); } } // try to see weather authentication is required String userName = synapseProperties.getProperty( SynapseConstants.SYNPASE_HTTP_PROXY_USER); String password = synapseProperties.getProperty( SynapseConstants.SYNPASE_HTTP_PROXY_PASSWORD); if (userName != null && password != null) { String header = userName + ":" + password; byte[] encodedHeaderBytes = new Base64().encode(header.getBytes()); String encodedHeader = new String(encodedHeaderBytes); connection.setRequestProperty("Proxy-Authorization", "Basic " + encodedHeader); } } else { connection = url.openConnection(); } connection.setReadTimeout(getReadTimeout()); connection.setConnectTimeout(getConnectTimeout()); connection.setRequestProperty("Connection", "close"); // if http is being used return connection; } catch (IOException e) { handleException("Error reading at URI ' " + url + " ' ", e); } return null; } /** * Get the exclued host list for proxy server. When a connection is made for these hosts it will * not go through the proxy server * @param synapseProperties properties from the synapse.properties file * @return list of excluded hosts */ private static List<String> getExcludedHostsForProxy(Properties synapseProperties) { List<String> excludedHosts = new ArrayList<String>(); String excludedHostsConfig = synapseProperties. getProperty(SynapseConstants.SYNAPSE_HTTP_PROXY_EXCLUDED_HOSTS); if (excludedHostsConfig != null) { String [] list = excludedHostsConfig.split(","); for (String host : list) { excludedHosts.add(host.trim()); } } return excludedHosts; } private static void handleException(String msg) { log.warn(msg); throw new SynapseException(msg); } /** * Return a suitable XMLToObjectMapper for the given content type if one * is available, else return null; * * @param contentType the content type for which a mapper is required * @return a suitable XMLToObjectMapper or null if none can be found */ public static XMLToObjectMapper getXmlToObjectMapper(String contentType) { return null; } /** * Utility method to resolve url(only If need) path using synapse home system property * * @param path Path to the URL * @param synapseHome synapse home parameter value to be used * @return Valid URL instance or null(if it is invalid or can not open a connection to it ) */ public static URL getURLFromPath(String path, String synapseHome) { if (path == null || "null".equals(path)) { if (log.isDebugEnabled()) { log.debug("Can not create a URL from 'null' "); } return null; } URL url = null; try { url = new URL(path); if ("file".equals(url.getProtocol())) { try { url.openStream(); } catch (MalformedURLException e) { handleException("Invalid URL reference : " + path, e); } catch (IOException ignored) { if (log.isDebugEnabled()) { log.debug("Can not open a connection to the URL with a path :" + path); } if (synapseHome != null) { if (synapseHome.endsWith("/")) { synapseHome = synapseHome.substring(0, synapseHome.lastIndexOf("/")); } if (log.isDebugEnabled()) { log.debug("Trying to resolve an absolute path of the " + " URL using the synapse.home : " + synapseHome); } try { url = new URL(url.getProtocol() + ":" + synapseHome + "/" + url.getPath()); url.openStream(); } catch (MalformedURLException e) { handleException("Invalid URL reference " + url.getPath() + e); } catch (IOException e) { if (log.isDebugEnabled()) { log.debug("Failed to resolve an absolute path of the " + " URL using the synapse.home : " + synapseHome); } log.warn("IO Error reading from URL : " + url.getPath() + e); } } } } } catch (MalformedURLException e) { handleException("Invalid URL reference : " + path, e); } catch (IOException e) { handleException("IO Error reading from URL : " + path, e); } return url; } /** * Resolve a relative URI. If the URI is a file and it's path is relative, the basePath will be * used as parent location. If the URI is null the basePath will be returned as URI. * * @param uri the URI to resolve * @param basePath the base path * @return the absolute URI */ public static URI resolveRelativeURI(URI uri, String basePath) { URI baseURI; if (uri != null) { if ("file".equals(uri.getScheme())) { String wsdlPath = uri.getSchemeSpecificPart(); if (!new File(wsdlPath).isAbsolute()) { baseURI = new File(new File(basePath), wsdlPath).toURI(); } else { baseURI = uri; } } else { baseURI = uri; } } else { baseURI = new File(basePath).toURI(); } return baseURI; } public static String resolveRelativeURI(String parentLocation, String relativeLocation) { if (relativeLocation == null) { throw new IllegalArgumentException("Import URI cannot be null"); } if (log.isDebugEnabled()) { log.debug("Resolving import URI ' " + parentLocation + " ' " + "against base URI ' " + relativeLocation + " ' "); } URI importUri = null; try { importUri = new URI(relativeLocation); if (importUri.isAbsolute()) { return importUri.toString(); } } catch (URISyntaxException e) { handleException("Invalid URI : " + relativeLocation, e); } if (parentLocation == null) { assert importUri != null; return importUri.toString(); } else { // if the import-uri is absolute if (relativeLocation.startsWith("/") || relativeLocation.startsWith("\\")) { if (importUri != null && !importUri.isAbsolute()) { try { importUri = new URI("file:" + relativeLocation); return importUri.toString(); } catch (URISyntaxException e) { handleException("Invalid URI ' " + importUri.getPath() + " '", e); } } } else { int index = parentLocation.lastIndexOf("/"); if (index == -1) { index = parentLocation.lastIndexOf("\\"); } if (index != -1) { String basepath = parentLocation.substring(0, index + 1); String resolvedPath = basepath + relativeLocation; try { URI resolvedUri = new URI(resolvedPath); if (!resolvedUri.isAbsolute()) { resolvedUri = new URI("file:" + resolvedPath); } return resolvedUri.toString(); } catch (URISyntaxException e) { handleException("Invalid URI ' " + resolvedPath + " '", e); } } else { assert importUri != null; return importUri.toString(); } } } return null; } public static int getConnectTimeout() { return Integer.parseInt(SynapsePropertiesLoader.getPropertyValue( SynapseConstants.CONNECTTIMEOUT, String.valueOf(SynapseConstants.DEFAULT_CONNECTTIMEOUT))); } public static int getReadTimeout() { return Integer.parseInt(SynapsePropertiesLoader.getPropertyValue( SynapseConstants.READTIMEOUT, String.valueOf(SynapseConstants.DEFAULT_READTIMEOUT))); } public static long getTimeoutHandlerInterval() { return Long.parseLong(SynapsePropertiesLoader.getPropertyValue( SynapseConstants.TIMEOUT_HANDLER_INTERVAL, String.valueOf(SynapseConstants.DEFAULT_TIMEOUT_HANDLER_INTERVAL))); } public static long getGlobalTimeoutInterval() { return Long.parseLong(SynapsePropertiesLoader.getPropertyValue( SynapseConstants.GLOBAL_TIMEOUT_INTERVAL, String.valueOf(SynapseConstants.DEFAULT_GLOBAL_TIMEOUT))); } public static SynapseEnvironment getSynapseEnvironment(AxisConfiguration axisCfg) { return axisCfg != null && axisCfg.getParameter(SynapseConstants.SYNAPSE_HOME) != null ? (SynapseEnvironment) axisCfg.getParameterValue(SynapseConstants.SYNAPSE_CONFIG) : null; } /** * Get the StatisticsCollector from synapse env. * * @param contextInfo server information * @return StatisticsCollector instance if there is any */ public static StatisticsCollector getStatisticsCollector(ServerContextInformation contextInfo) { if (contextInfo != null && contextInfo.getServerState() == ServerState.INITIALIZED) { Object o = contextInfo.getServerContext(); if (o instanceof ConfigurationContext) { ConfigurationContext context = (ConfigurationContext) o; SynapseEnvironment environment = (SynapseEnvironment) context.getAxisConfiguration().getParameterValue( SynapseConstants.SYNAPSE_ENV); if (environment != null) { return environment.getStatisticsCollector(); } } } return null; } public static OMElement stringToOM(String xml) { try { return AXIOMUtil.stringToOM(xml); // Just wrap to add logging for any errors } catch (XMLStreamException e) { handleException("Unable to convert a string to OM Node as the string " + "is malformed , String : " + xml, e); } return null; } /** * Construct a fresh SynapseConfiguration instance and registers the observers * with it as specified in the synapse.properties file. Use the initial.observers * property in the synapse.properties file to specify observers as a comma separated * list. * * @return a SynapseConfiguration instance */ public static SynapseConfiguration newConfiguration() { SynapseConfiguration synConfig = new SynapseConfiguration(); Properties synapseProps = SynapsePropertiesLoader.loadSynapseProperties(); String propValue = synapseProps.getProperty("synapse.observers"); if (propValue != null) { String[] observerNames = propValue.split(","); for (String observer : observerNames) { try { Class clazz = SynapseConfigUtils.class.getClassLoader(). loadClass(observer.trim()); SynapseObserver o = (SynapseObserver) clazz.newInstance(); synConfig.registerObserver(o); } catch (Exception e) { handleException("Error while initializing Synapse observers", e); } } } return synConfig; } /** * Return the main sequence if one is not defined. This implementation defaults to * a simple sequence with a <send/> * * @param config the configuration to be updated */ public static void setDefaultMainSequence(SynapseConfiguration config) { SequenceMediator main = new SequenceMediator(); main.setName(SynapseConstants.MAIN_SEQUENCE_KEY); main.addChild(new LogMediator()); main.addChild(new DropMediator()); config.addSequence(SynapseConstants.MAIN_SEQUENCE_KEY, main); // set the aspect configuration AspectConfiguration configuration = new AspectConfiguration(main.getName()); main.configure(configuration); } /** * Return the fault sequence if one is not defined. This implementation defaults to * a simple sequence : * <log level="full"> * <property name="MESSAGE" value="Executing default "fault" sequence"/> * <property name="ERROR_CODE" expression="get-property('ERROR_CODE')"/> * <property name="ERROR_MESSAGE" expression="get-property('ERROR_MESSAGE')"/> * </log> * <drop/> * * @param config the configuration to be updated */ public static void setDefaultFaultSequence(SynapseConfiguration config) { SequenceMediator fault = new SequenceMediator(); fault.setName(org.apache.synapse.SynapseConstants.FAULT_SEQUENCE_KEY); LogMediator log = new LogMediator(); log.setLogLevel(LogMediator.FULL); MediatorProperty mp = new MediatorProperty(); mp.setName("MESSAGE"); mp.setValue("Executing default \"fault\" sequence"); log.addProperty(mp); mp = new MediatorProperty(); mp.setName("ERROR_CODE"); try { mp.setExpression(new SynapseXPath("get-property('ERROR_CODE')")); } catch (JaxenException ignore) {} log.addProperty(mp); mp = new MediatorProperty(); mp.setName("ERROR_MESSAGE"); try { mp.setExpression(new SynapseXPath("get-property('ERROR_MESSAGE')")); } catch (JaxenException ignore) {} log.addProperty(mp); fault.addChild(log); fault.addChild(new DropMediator()); // set aspect configuration AspectConfiguration configuration = new AspectConfiguration(fault.getName()); fault.configure(configuration); config.addSequence(org.apache.synapse.SynapseConstants.FAULT_SEQUENCE_KEY, fault); } public static boolean isFailSafeEnabled(String componentName) { Properties synapseProps = SynapsePropertiesLoader.loadSynapseProperties(); String failSafeStr = synapseProps.getProperty(SynapseConstants.FAIL_SAFE_MODE_STATUS); if (failSafeStr != null) { String[] failSafeComponents = failSafeStr.split(","); List<String> componentList = Arrays.asList(failSafeComponents); if (componentList.indexOf(SynapseConstants.FAIL_SAFE_MODE_ALL) >= 0 || componentList.indexOf(componentName) >= 0) { return true; } } else { return true; // Enabled by default } return false; } /** * Retrieve injected environment variables when detecting parameters with "$" prefix * * @param parameter parameter to be populated with retrieved environment variable * @return string value in environment variables */ public static String fetchEnvironmentVariables(String parameter) { String injectedValue = parameter; if (parameter.startsWith(SynapseConstants.SYSTEM_VARIABLE_PREFIX)) { String extractedEnvVariableKey = parameter.substring(parameter.lastIndexOf(":") + 1); injectedValue = System.getenv(extractedEnvVariableKey); } return injectedValue; } }
googleapis/google-cloud-java
36,995
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/BuildOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; public interface BuildOrBuilder extends // @@protoc_insertion_point(interface_extends:google.devtools.cloudbuild.v1.Build) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Output only. The 'Build' name with format: * `projects/{project}/locations/{location}/builds/{build}`, where {build} * is a unique identifier generated by the service. * </pre> * * <code>string name = 45 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Output only. The 'Build' name with format: * `projects/{project}/locations/{location}/builds/{build}`, where {build} * is a unique identifier generated by the service. * </pre> * * <code>string name = 45 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * Output only. Unique identifier of the build. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The id. */ java.lang.String getId(); /** * * * <pre> * Output only. Unique identifier of the build. * </pre> * * <code>string id = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for id. */ com.google.protobuf.ByteString getIdBytes(); /** * * * <pre> * Output only. ID of the project. * </pre> * * <code>string project_id = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The projectId. */ java.lang.String getProjectId(); /** * * * <pre> * Output only. ID of the project. * </pre> * * <code>string project_id = 16 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for projectId. */ com.google.protobuf.ByteString getProjectIdBytes(); /** * * * <pre> * Output only. Status of the build. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Build.Status status = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for status. */ int getStatusValue(); /** * * * <pre> * Output only. Status of the build. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Build.Status status = 2 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The status. */ com.google.cloudbuild.v1.Build.Status getStatus(); /** * * * <pre> * Output only. Customer-readable message about the current status. * </pre> * * <code>string status_detail = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The statusDetail. */ java.lang.String getStatusDetail(); /** * * * <pre> * Output only. Customer-readable message about the current status. * </pre> * * <code>string status_detail = 24 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for statusDetail. */ com.google.protobuf.ByteString getStatusDetailBytes(); /** * * * <pre> * The location of the source files to build. * </pre> * * <code>.google.devtools.cloudbuild.v1.Source source = 3;</code> * * @return Whether the source field is set. */ boolean hasSource(); /** * * * <pre> * The location of the source files to build. * </pre> * * <code>.google.devtools.cloudbuild.v1.Source source = 3;</code> * * @return The source. */ com.google.cloudbuild.v1.Source getSource(); /** * * * <pre> * The location of the source files to build. * </pre> * * <code>.google.devtools.cloudbuild.v1.Source source = 3;</code> */ com.google.cloudbuild.v1.SourceOrBuilder getSourceOrBuilder(); /** * * * <pre> * Required. The operations to be performed on the workspace. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.BuildStep steps = 11;</code> */ java.util.List<com.google.cloudbuild.v1.BuildStep> getStepsList(); /** * * * <pre> * Required. The operations to be performed on the workspace. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.BuildStep steps = 11;</code> */ com.google.cloudbuild.v1.BuildStep getSteps(int index); /** * * * <pre> * Required. The operations to be performed on the workspace. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.BuildStep steps = 11;</code> */ int getStepsCount(); /** * * * <pre> * Required. The operations to be performed on the workspace. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.BuildStep steps = 11;</code> */ java.util.List<? extends com.google.cloudbuild.v1.BuildStepOrBuilder> getStepsOrBuilderList(); /** * * * <pre> * Required. The operations to be performed on the workspace. * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.BuildStep steps = 11;</code> */ com.google.cloudbuild.v1.BuildStepOrBuilder getStepsOrBuilder(int index); /** * * * <pre> * Output only. Results of the build. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Results results = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the results field is set. */ boolean hasResults(); /** * * * <pre> * Output only. Results of the build. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Results results = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The results. */ com.google.cloudbuild.v1.Results getResults(); /** * * * <pre> * Output only. Results of the build. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Results results = 10 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.ResultsOrBuilder getResultsOrBuilder(); /** * * * <pre> * Output only. Time at which the request to create the build was received. * </pre> * * <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the createTime field is set. */ boolean hasCreateTime(); /** * * * <pre> * Output only. Time at which the request to create the build was received. * </pre> * * <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The createTime. */ com.google.protobuf.Timestamp getCreateTime(); /** * * * <pre> * Output only. Time at which the request to create the build was received. * </pre> * * <code>.google.protobuf.Timestamp create_time = 6 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder(); /** * * * <pre> * Output only. Time at which execution of the build was started. * </pre> * * <code>.google.protobuf.Timestamp start_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the startTime field is set. */ boolean hasStartTime(); /** * * * <pre> * Output only. Time at which execution of the build was started. * </pre> * * <code>.google.protobuf.Timestamp start_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The startTime. */ com.google.protobuf.Timestamp getStartTime(); /** * * * <pre> * Output only. Time at which execution of the build was started. * </pre> * * <code>.google.protobuf.Timestamp start_time = 7 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder(); /** * * * <pre> * Output only. Time at which execution of the build was finished. * * The difference between finish_time and start_time is the duration of the * build's execution. * </pre> * * <code>.google.protobuf.Timestamp finish_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the finishTime field is set. */ boolean hasFinishTime(); /** * * * <pre> * Output only. Time at which execution of the build was finished. * * The difference between finish_time and start_time is the duration of the * build's execution. * </pre> * * <code>.google.protobuf.Timestamp finish_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The finishTime. */ com.google.protobuf.Timestamp getFinishTime(); /** * * * <pre> * Output only. Time at which execution of the build was finished. * * The difference between finish_time and start_time is the duration of the * build's execution. * </pre> * * <code>.google.protobuf.Timestamp finish_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getFinishTimeOrBuilder(); /** * * * <pre> * Amount of time that this build should be allowed to run, to second * granularity. If this amount of time elapses, work on the build will cease * and the build status will be `TIMEOUT`. * * `timeout` starts ticking from `startTime`. * * Default time is 60 minutes. * </pre> * * <code>.google.protobuf.Duration timeout = 12;</code> * * @return Whether the timeout field is set. */ boolean hasTimeout(); /** * * * <pre> * Amount of time that this build should be allowed to run, to second * granularity. If this amount of time elapses, work on the build will cease * and the build status will be `TIMEOUT`. * * `timeout` starts ticking from `startTime`. * * Default time is 60 minutes. * </pre> * * <code>.google.protobuf.Duration timeout = 12;</code> * * @return The timeout. */ com.google.protobuf.Duration getTimeout(); /** * * * <pre> * Amount of time that this build should be allowed to run, to second * granularity. If this amount of time elapses, work on the build will cease * and the build status will be `TIMEOUT`. * * `timeout` starts ticking from `startTime`. * * Default time is 60 minutes. * </pre> * * <code>.google.protobuf.Duration timeout = 12;</code> */ com.google.protobuf.DurationOrBuilder getTimeoutOrBuilder(); /** * * * <pre> * A list of images to be pushed upon the successful completion of all build * steps. * * The images are pushed using the builder service account's credentials. * * The digests of the pushed images will be stored in the `Build` resource's * results field. * * If any of the images fail to be pushed, the build status is marked * `FAILURE`. * </pre> * * <code>repeated string images = 13;</code> * * @return A list containing the images. */ java.util.List<java.lang.String> getImagesList(); /** * * * <pre> * A list of images to be pushed upon the successful completion of all build * steps. * * The images are pushed using the builder service account's credentials. * * The digests of the pushed images will be stored in the `Build` resource's * results field. * * If any of the images fail to be pushed, the build status is marked * `FAILURE`. * </pre> * * <code>repeated string images = 13;</code> * * @return The count of images. */ int getImagesCount(); /** * * * <pre> * A list of images to be pushed upon the successful completion of all build * steps. * * The images are pushed using the builder service account's credentials. * * The digests of the pushed images will be stored in the `Build` resource's * results field. * * If any of the images fail to be pushed, the build status is marked * `FAILURE`. * </pre> * * <code>repeated string images = 13;</code> * * @param index The index of the element to return. * @return The images at the given index. */ java.lang.String getImages(int index); /** * * * <pre> * A list of images to be pushed upon the successful completion of all build * steps. * * The images are pushed using the builder service account's credentials. * * The digests of the pushed images will be stored in the `Build` resource's * results field. * * If any of the images fail to be pushed, the build status is marked * `FAILURE`. * </pre> * * <code>repeated string images = 13;</code> * * @param index The index of the value to return. * @return The bytes of the images at the given index. */ com.google.protobuf.ByteString getImagesBytes(int index); /** * * * <pre> * TTL in queue for this build. If provided and the build is enqueued longer * than this value, the build will expire and the build status will be * `EXPIRED`. * * The TTL starts ticking from create_time. * </pre> * * <code>.google.protobuf.Duration queue_ttl = 40;</code> * * @return Whether the queueTtl field is set. */ boolean hasQueueTtl(); /** * * * <pre> * TTL in queue for this build. If provided and the build is enqueued longer * than this value, the build will expire and the build status will be * `EXPIRED`. * * The TTL starts ticking from create_time. * </pre> * * <code>.google.protobuf.Duration queue_ttl = 40;</code> * * @return The queueTtl. */ com.google.protobuf.Duration getQueueTtl(); /** * * * <pre> * TTL in queue for this build. If provided and the build is enqueued longer * than this value, the build will expire and the build status will be * `EXPIRED`. * * The TTL starts ticking from create_time. * </pre> * * <code>.google.protobuf.Duration queue_ttl = 40;</code> */ com.google.protobuf.DurationOrBuilder getQueueTtlOrBuilder(); /** * * * <pre> * Artifacts produced by the build that should be uploaded upon * successful completion of all build steps. * </pre> * * <code>.google.devtools.cloudbuild.v1.Artifacts artifacts = 37;</code> * * @return Whether the artifacts field is set. */ boolean hasArtifacts(); /** * * * <pre> * Artifacts produced by the build that should be uploaded upon * successful completion of all build steps. * </pre> * * <code>.google.devtools.cloudbuild.v1.Artifacts artifacts = 37;</code> * * @return The artifacts. */ com.google.cloudbuild.v1.Artifacts getArtifacts(); /** * * * <pre> * Artifacts produced by the build that should be uploaded upon * successful completion of all build steps. * </pre> * * <code>.google.devtools.cloudbuild.v1.Artifacts artifacts = 37;</code> */ com.google.cloudbuild.v1.ArtifactsOrBuilder getArtifactsOrBuilder(); /** * * * <pre> * Cloud Storage bucket where logs should be written (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * Logs file names will be of the format `${logs_bucket}/log-${build_id}.txt`. * </pre> * * <code>string logs_bucket = 19;</code> * * @return The logsBucket. */ java.lang.String getLogsBucket(); /** * * * <pre> * Cloud Storage bucket where logs should be written (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * Logs file names will be of the format `${logs_bucket}/log-${build_id}.txt`. * </pre> * * <code>string logs_bucket = 19;</code> * * @return The bytes for logsBucket. */ com.google.protobuf.ByteString getLogsBucketBytes(); /** * * * <pre> * Output only. A permanent fixed identifier for source. * </pre> * * <code> * .google.devtools.cloudbuild.v1.SourceProvenance source_provenance = 21 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the sourceProvenance field is set. */ boolean hasSourceProvenance(); /** * * * <pre> * Output only. A permanent fixed identifier for source. * </pre> * * <code> * .google.devtools.cloudbuild.v1.SourceProvenance source_provenance = 21 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The sourceProvenance. */ com.google.cloudbuild.v1.SourceProvenance getSourceProvenance(); /** * * * <pre> * Output only. A permanent fixed identifier for source. * </pre> * * <code> * .google.devtools.cloudbuild.v1.SourceProvenance source_provenance = 21 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.SourceProvenanceOrBuilder getSourceProvenanceOrBuilder(); /** * * * <pre> * Output only. The ID of the `BuildTrigger` that triggered this build, if it * was triggered automatically. * </pre> * * <code>string build_trigger_id = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The buildTriggerId. */ java.lang.String getBuildTriggerId(); /** * * * <pre> * Output only. The ID of the `BuildTrigger` that triggered this build, if it * was triggered automatically. * </pre> * * <code>string build_trigger_id = 22 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for buildTriggerId. */ com.google.protobuf.ByteString getBuildTriggerIdBytes(); /** * * * <pre> * Special options for this build. * </pre> * * <code>.google.devtools.cloudbuild.v1.BuildOptions options = 23;</code> * * @return Whether the options field is set. */ boolean hasOptions(); /** * * * <pre> * Special options for this build. * </pre> * * <code>.google.devtools.cloudbuild.v1.BuildOptions options = 23;</code> * * @return The options. */ com.google.cloudbuild.v1.BuildOptions getOptions(); /** * * * <pre> * Special options for this build. * </pre> * * <code>.google.devtools.cloudbuild.v1.BuildOptions options = 23;</code> */ com.google.cloudbuild.v1.BuildOptionsOrBuilder getOptionsOrBuilder(); /** * * * <pre> * Output only. URL to logs for this build in Google Cloud Console. * </pre> * * <code>string log_url = 25 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The logUrl. */ java.lang.String getLogUrl(); /** * * * <pre> * Output only. URL to logs for this build in Google Cloud Console. * </pre> * * <code>string log_url = 25 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for logUrl. */ com.google.protobuf.ByteString getLogUrlBytes(); /** * * * <pre> * Substitutions data for `Build` resource. * </pre> * * <code>map&lt;string, string&gt; substitutions = 29;</code> */ int getSubstitutionsCount(); /** * * * <pre> * Substitutions data for `Build` resource. * </pre> * * <code>map&lt;string, string&gt; substitutions = 29;</code> */ boolean containsSubstitutions(java.lang.String key); /** Use {@link #getSubstitutionsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getSubstitutions(); /** * * * <pre> * Substitutions data for `Build` resource. * </pre> * * <code>map&lt;string, string&gt; substitutions = 29;</code> */ java.util.Map<java.lang.String, java.lang.String> getSubstitutionsMap(); /** * * * <pre> * Substitutions data for `Build` resource. * </pre> * * <code>map&lt;string, string&gt; substitutions = 29;</code> */ /* nullable */ java.lang.String getSubstitutionsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Substitutions data for `Build` resource. * </pre> * * <code>map&lt;string, string&gt; substitutions = 29;</code> */ java.lang.String getSubstitutionsOrThrow(java.lang.String key); /** * * * <pre> * Tags for annotation of a `Build`. These are not docker tags. * </pre> * * <code>repeated string tags = 31;</code> * * @return A list containing the tags. */ java.util.List<java.lang.String> getTagsList(); /** * * * <pre> * Tags for annotation of a `Build`. These are not docker tags. * </pre> * * <code>repeated string tags = 31;</code> * * @return The count of tags. */ int getTagsCount(); /** * * * <pre> * Tags for annotation of a `Build`. These are not docker tags. * </pre> * * <code>repeated string tags = 31;</code> * * @param index The index of the element to return. * @return The tags at the given index. */ java.lang.String getTags(int index); /** * * * <pre> * Tags for annotation of a `Build`. These are not docker tags. * </pre> * * <code>repeated string tags = 31;</code> * * @param index The index of the value to return. * @return The bytes of the tags at the given index. */ com.google.protobuf.ByteString getTagsBytes(int index); /** * * * <pre> * Secrets to decrypt using Cloud Key Management Service. * Note: Secret Manager is the recommended technique * for managing sensitive data with Cloud Build. Use `available_secrets` to * configure builds to access secrets from Secret Manager. For instructions, * see: https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Secret secrets = 32;</code> */ java.util.List<com.google.cloudbuild.v1.Secret> getSecretsList(); /** * * * <pre> * Secrets to decrypt using Cloud Key Management Service. * Note: Secret Manager is the recommended technique * for managing sensitive data with Cloud Build. Use `available_secrets` to * configure builds to access secrets from Secret Manager. For instructions, * see: https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Secret secrets = 32;</code> */ com.google.cloudbuild.v1.Secret getSecrets(int index); /** * * * <pre> * Secrets to decrypt using Cloud Key Management Service. * Note: Secret Manager is the recommended technique * for managing sensitive data with Cloud Build. Use `available_secrets` to * configure builds to access secrets from Secret Manager. For instructions, * see: https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Secret secrets = 32;</code> */ int getSecretsCount(); /** * * * <pre> * Secrets to decrypt using Cloud Key Management Service. * Note: Secret Manager is the recommended technique * for managing sensitive data with Cloud Build. Use `available_secrets` to * configure builds to access secrets from Secret Manager. For instructions, * see: https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Secret secrets = 32;</code> */ java.util.List<? extends com.google.cloudbuild.v1.SecretOrBuilder> getSecretsOrBuilderList(); /** * * * <pre> * Secrets to decrypt using Cloud Key Management Service. * Note: Secret Manager is the recommended technique * for managing sensitive data with Cloud Build. Use `available_secrets` to * configure builds to access secrets from Secret Manager. For instructions, * see: https://cloud.google.com/cloud-build/docs/securing-builds/use-secrets * </pre> * * <code>repeated .google.devtools.cloudbuild.v1.Secret secrets = 32;</code> */ com.google.cloudbuild.v1.SecretOrBuilder getSecretsOrBuilder(int index); /** * * * <pre> * Output only. Stores timing information for phases of the build. Valid keys * are: * * * BUILD: time to execute all build steps. * * PUSH: time to push all artifacts including docker images and non docker * artifacts. * * FETCHSOURCE: time to fetch source. * * SETUPBUILD: time to set up build. * * If the build does not specify source or images, * these keys will not be included. * </pre> * * <code> * map&lt;string, .google.devtools.cloudbuild.v1.TimeSpan&gt; timing = 33 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getTimingCount(); /** * * * <pre> * Output only. Stores timing information for phases of the build. Valid keys * are: * * * BUILD: time to execute all build steps. * * PUSH: time to push all artifacts including docker images and non docker * artifacts. * * FETCHSOURCE: time to fetch source. * * SETUPBUILD: time to set up build. * * If the build does not specify source or images, * these keys will not be included. * </pre> * * <code> * map&lt;string, .google.devtools.cloudbuild.v1.TimeSpan&gt; timing = 33 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ boolean containsTiming(java.lang.String key); /** Use {@link #getTimingMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, com.google.cloudbuild.v1.TimeSpan> getTiming(); /** * * * <pre> * Output only. Stores timing information for phases of the build. Valid keys * are: * * * BUILD: time to execute all build steps. * * PUSH: time to push all artifacts including docker images and non docker * artifacts. * * FETCHSOURCE: time to fetch source. * * SETUPBUILD: time to set up build. * * If the build does not specify source or images, * these keys will not be included. * </pre> * * <code> * map&lt;string, .google.devtools.cloudbuild.v1.TimeSpan&gt; timing = 33 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.Map<java.lang.String, com.google.cloudbuild.v1.TimeSpan> getTimingMap(); /** * * * <pre> * Output only. Stores timing information for phases of the build. Valid keys * are: * * * BUILD: time to execute all build steps. * * PUSH: time to push all artifacts including docker images and non docker * artifacts. * * FETCHSOURCE: time to fetch source. * * SETUPBUILD: time to set up build. * * If the build does not specify source or images, * these keys will not be included. * </pre> * * <code> * map&lt;string, .google.devtools.cloudbuild.v1.TimeSpan&gt; timing = 33 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ /* nullable */ com.google.cloudbuild.v1.TimeSpan getTimingOrDefault( java.lang.String key, /* nullable */ com.google.cloudbuild.v1.TimeSpan defaultValue); /** * * * <pre> * Output only. Stores timing information for phases of the build. Valid keys * are: * * * BUILD: time to execute all build steps. * * PUSH: time to push all artifacts including docker images and non docker * artifacts. * * FETCHSOURCE: time to fetch source. * * SETUPBUILD: time to set up build. * * If the build does not specify source or images, * these keys will not be included. * </pre> * * <code> * map&lt;string, .google.devtools.cloudbuild.v1.TimeSpan&gt; timing = 33 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.TimeSpan getTimingOrThrow(java.lang.String key); /** * * * <pre> * Output only. Describes this build's approval configuration, status, * and result. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildApproval approval = 44 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the approval field is set. */ boolean hasApproval(); /** * * * <pre> * Output only. Describes this build's approval configuration, status, * and result. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildApproval approval = 44 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The approval. */ com.google.cloudbuild.v1.BuildApproval getApproval(); /** * * * <pre> * Output only. Describes this build's approval configuration, status, * and result. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildApproval approval = 44 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.BuildApprovalOrBuilder getApprovalOrBuilder(); /** * * * <pre> * IAM service account whose credentials will be used at build runtime. * Must be of the format `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. * ACCOUNT can be email address or uniqueId of the service account. * </pre> * * <code>string service_account = 42 [(.google.api.resource_reference) = { ... }</code> * * @return The serviceAccount. */ java.lang.String getServiceAccount(); /** * * * <pre> * IAM service account whose credentials will be used at build runtime. * Must be of the format `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. * ACCOUNT can be email address or uniqueId of the service account. * </pre> * * <code>string service_account = 42 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for serviceAccount. */ com.google.protobuf.ByteString getServiceAccountBytes(); /** * * * <pre> * Secrets and secret environment variables. * </pre> * * <code>.google.devtools.cloudbuild.v1.Secrets available_secrets = 47;</code> * * @return Whether the availableSecrets field is set. */ boolean hasAvailableSecrets(); /** * * * <pre> * Secrets and secret environment variables. * </pre> * * <code>.google.devtools.cloudbuild.v1.Secrets available_secrets = 47;</code> * * @return The availableSecrets. */ com.google.cloudbuild.v1.Secrets getAvailableSecrets(); /** * * * <pre> * Secrets and secret environment variables. * </pre> * * <code>.google.devtools.cloudbuild.v1.Secrets available_secrets = 47;</code> */ com.google.cloudbuild.v1.SecretsOrBuilder getAvailableSecretsOrBuilder(); /** * * * <pre> * Output only. Non-fatal problems encountered during the execution of the * build. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Build.Warning warnings = 49 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.cloudbuild.v1.Build.Warning> getWarningsList(); /** * * * <pre> * Output only. Non-fatal problems encountered during the execution of the * build. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Build.Warning warnings = 49 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.Build.Warning getWarnings(int index); /** * * * <pre> * Output only. Non-fatal problems encountered during the execution of the * build. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Build.Warning warnings = 49 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getWarningsCount(); /** * * * <pre> * Output only. Non-fatal problems encountered during the execution of the * build. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Build.Warning warnings = 49 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.cloudbuild.v1.Build.WarningOrBuilder> getWarningsOrBuilderList(); /** * * * <pre> * Output only. Non-fatal problems encountered during the execution of the * build. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Build.Warning warnings = 49 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.Build.WarningOrBuilder getWarningsOrBuilder(int index); /** * * * <pre> * Optional. Configuration for git operations. * </pre> * * <code> * .google.devtools.cloudbuild.v1.GitConfig git_config = 48 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the gitConfig field is set. */ boolean hasGitConfig(); /** * * * <pre> * Optional. Configuration for git operations. * </pre> * * <code> * .google.devtools.cloudbuild.v1.GitConfig git_config = 48 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The gitConfig. */ com.google.cloudbuild.v1.GitConfig getGitConfig(); /** * * * <pre> * Optional. Configuration for git operations. * </pre> * * <code> * .google.devtools.cloudbuild.v1.GitConfig git_config = 48 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloudbuild.v1.GitConfigOrBuilder getGitConfigOrBuilder(); /** * * * <pre> * Output only. Contains information about the build when status=FAILURE. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Build.FailureInfo failure_info = 51 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the failureInfo field is set. */ boolean hasFailureInfo(); /** * * * <pre> * Output only. Contains information about the build when status=FAILURE. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Build.FailureInfo failure_info = 51 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The failureInfo. */ com.google.cloudbuild.v1.Build.FailureInfo getFailureInfo(); /** * * * <pre> * Output only. Contains information about the build when status=FAILURE. * </pre> * * <code> * .google.devtools.cloudbuild.v1.Build.FailureInfo failure_info = 51 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloudbuild.v1.Build.FailureInfoOrBuilder getFailureInfoOrBuilder(); /** * * * <pre> * Optional. Dependencies that the Cloud Build worker will fetch before * executing user steps. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Dependency dependencies = 56 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<com.google.cloudbuild.v1.Dependency> getDependenciesList(); /** * * * <pre> * Optional. Dependencies that the Cloud Build worker will fetch before * executing user steps. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Dependency dependencies = 56 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloudbuild.v1.Dependency getDependencies(int index); /** * * * <pre> * Optional. Dependencies that the Cloud Build worker will fetch before * executing user steps. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Dependency dependencies = 56 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ int getDependenciesCount(); /** * * * <pre> * Optional. Dependencies that the Cloud Build worker will fetch before * executing user steps. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Dependency dependencies = 56 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<? extends com.google.cloudbuild.v1.DependencyOrBuilder> getDependenciesOrBuilderList(); /** * * * <pre> * Optional. Dependencies that the Cloud Build worker will fetch before * executing user steps. * </pre> * * <code> * repeated .google.devtools.cloudbuild.v1.Dependency dependencies = 56 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloudbuild.v1.DependencyOrBuilder getDependenciesOrBuilder(int index); }
googleapis/google-cloud-java
37,497
java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1/MatchServiceClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.aiplatform.v1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.aiplatform.v1.stub.MatchServiceStub; import com.google.cloud.aiplatform.v1.stub.MatchServiceStubSettings; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.util.concurrent.MoreExecutors; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: MatchService is a Google managed service for efficient vector similarity * search at scale. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * FindNeighborsRequest request = * FindNeighborsRequest.newBuilder() * .setIndexEndpoint( * IndexEndpointName.of("[PROJECT]", "[LOCATION]", "[INDEX_ENDPOINT]").toString()) * .setDeployedIndexId("deployedIndexId-1101212953") * .addAllQueries(new ArrayList<FindNeighborsRequest.Query>()) * .setReturnFullDatapoint(true) * .build(); * FindNeighborsResponse response = matchServiceClient.findNeighbors(request); * } * }</pre> * * <p>Note: close() needs to be called on the MatchServiceClient object to clean up resources such * as threads. In the example above, try-with-resources is used, which automatically calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> FindNeighbors</td> * <td><p> Finds the nearest neighbors of each vector within the request.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> findNeighbors(FindNeighborsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> findNeighborsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ReadIndexDatapoints</td> * <td><p> Reads the datapoints/vectors of the given IDs. A maximum of 1000 datapoints can be retrieved in a batch.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> readIndexDatapoints(ReadIndexDatapointsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> readIndexDatapointsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListLocations</td> * <td><p> Lists information about the supported locations for this service.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listLocations(ListLocationsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listLocationsPagedCallable() * <li><p> listLocationsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GetLocation</td> * <td><p> Gets information about a location.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getLocation(GetLocationRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getLocationCallable() * </ul> * </td> * </tr> * <tr> * <td><p> SetIamPolicy</td> * <td><p> Sets the access control policy on the specified resource. Replacesany existing policy. * <p> Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> setIamPolicy(SetIamPolicyRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> setIamPolicyCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GetIamPolicy</td> * <td><p> Gets the access control policy for a resource. Returns an empty policyif the resource exists and does not have a policy set.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getIamPolicy(GetIamPolicyRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getIamPolicyCallable() * </ul> * </td> * </tr> * <tr> * <td><p> TestIamPermissions</td> * <td><p> Returns permissions that a caller has on the specified resource. If theresource does not exist, this will return an empty set ofpermissions, not a `NOT_FOUND` error. * <p> Note: This operation is designed to be used for buildingpermission-aware UIs and command-line tools, not for authorizationchecking. This operation may "fail open" without warning.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> testIamPermissions(TestIamPermissionsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> testIamPermissionsCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of MatchServiceSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MatchServiceSettings matchServiceSettings = * MatchServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * MatchServiceClient matchServiceClient = MatchServiceClient.create(matchServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MatchServiceSettings matchServiceSettings = * MatchServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * MatchServiceClient matchServiceClient = MatchServiceClient.create(matchServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class MatchServiceClient implements BackgroundResource { private final MatchServiceSettings settings; private final MatchServiceStub stub; /** Constructs an instance of MatchServiceClient with default settings. */ public static final MatchServiceClient create() throws IOException { return create(MatchServiceSettings.newBuilder().build()); } /** * Constructs an instance of MatchServiceClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final MatchServiceClient create(MatchServiceSettings settings) throws IOException { return new MatchServiceClient(settings); } /** * Constructs an instance of MatchServiceClient, using the given stub for making calls. This is * for advanced usage - prefer using create(MatchServiceSettings). */ public static final MatchServiceClient create(MatchServiceStub stub) { return new MatchServiceClient(stub); } /** * Constructs an instance of MatchServiceClient, using the given settings. This is protected so * that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected MatchServiceClient(MatchServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((MatchServiceStubSettings) settings.getStubSettings()).createStub(); } protected MatchServiceClient(MatchServiceStub stub) { this.settings = null; this.stub = stub; } public final MatchServiceSettings getSettings() { return settings; } public MatchServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finds the nearest neighbors of each vector within the request. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * FindNeighborsRequest request = * FindNeighborsRequest.newBuilder() * .setIndexEndpoint( * IndexEndpointName.of("[PROJECT]", "[LOCATION]", "[INDEX_ENDPOINT]").toString()) * .setDeployedIndexId("deployedIndexId-1101212953") * .addAllQueries(new ArrayList<FindNeighborsRequest.Query>()) * .setReturnFullDatapoint(true) * .build(); * FindNeighborsResponse response = matchServiceClient.findNeighbors(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final FindNeighborsResponse findNeighbors(FindNeighborsRequest request) { return findNeighborsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Finds the nearest neighbors of each vector within the request. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * FindNeighborsRequest request = * FindNeighborsRequest.newBuilder() * .setIndexEndpoint( * IndexEndpointName.of("[PROJECT]", "[LOCATION]", "[INDEX_ENDPOINT]").toString()) * .setDeployedIndexId("deployedIndexId-1101212953") * .addAllQueries(new ArrayList<FindNeighborsRequest.Query>()) * .setReturnFullDatapoint(true) * .build(); * ApiFuture<FindNeighborsResponse> future = * matchServiceClient.findNeighborsCallable().futureCall(request); * // Do something. * FindNeighborsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<FindNeighborsRequest, FindNeighborsResponse> findNeighborsCallable() { return stub.findNeighborsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads the datapoints/vectors of the given IDs. A maximum of 1000 datapoints can be retrieved in * a batch. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * ReadIndexDatapointsRequest request = * ReadIndexDatapointsRequest.newBuilder() * .setIndexEndpoint( * IndexEndpointName.of("[PROJECT]", "[LOCATION]", "[INDEX_ENDPOINT]").toString()) * .setDeployedIndexId("deployedIndexId-1101212953") * .addAllIds(new ArrayList<String>()) * .build(); * ReadIndexDatapointsResponse response = matchServiceClient.readIndexDatapoints(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ReadIndexDatapointsResponse readIndexDatapoints(ReadIndexDatapointsRequest request) { return readIndexDatapointsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Reads the datapoints/vectors of the given IDs. A maximum of 1000 datapoints can be retrieved in * a batch. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * ReadIndexDatapointsRequest request = * ReadIndexDatapointsRequest.newBuilder() * .setIndexEndpoint( * IndexEndpointName.of("[PROJECT]", "[LOCATION]", "[INDEX_ENDPOINT]").toString()) * .setDeployedIndexId("deployedIndexId-1101212953") * .addAllIds(new ArrayList<String>()) * .build(); * ApiFuture<ReadIndexDatapointsResponse> future = * matchServiceClient.readIndexDatapointsCallable().futureCall(request); * // Do something. * ReadIndexDatapointsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<ReadIndexDatapointsRequest, ReadIndexDatapointsResponse> readIndexDatapointsCallable() { return stub.readIndexDatapointsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (Location element : matchServiceClient.listLocations(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListLocationsPagedResponse listLocations(ListLocationsRequest request) { return listLocationsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<Location> future = * matchServiceClient.listLocationsPagedCallable().futureCall(request); * // Do something. * for (Location element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return stub.listLocationsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists information about the supported locations for this service. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * ListLocationsRequest request = * ListLocationsRequest.newBuilder() * .setName("name3373707") * .setFilter("filter-1274492040") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListLocationsResponse response = matchServiceClient.listLocationsCallable().call(request); * for (Location element : response.getLocationsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return stub.listLocationsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets information about a location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); * Location response = matchServiceClient.getLocation(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Location getLocation(GetLocationRequest request) { return getLocationCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets information about a location. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build(); * ApiFuture<Location> future = matchServiceClient.getLocationCallable().futureCall(request); * // Do something. * Location response = future.get(); * } * }</pre> */ public final UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return stub.getLocationCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Sets the access control policy on the specified resource. Replacesany existing policy. * * <p>Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * SetIamPolicyRequest request = * SetIamPolicyRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .setPolicy(Policy.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * Policy response = matchServiceClient.setIamPolicy(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Policy setIamPolicy(SetIamPolicyRequest request) { return setIamPolicyCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Sets the access control policy on the specified resource. Replacesany existing policy. * * <p>Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED`errors. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * SetIamPolicyRequest request = * SetIamPolicyRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .setPolicy(Policy.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * ApiFuture<Policy> future = matchServiceClient.setIamPolicyCallable().futureCall(request); * // Do something. * Policy response = future.get(); * } * }</pre> */ public final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return stub.setIamPolicyCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the access control policy for a resource. Returns an empty policyif the resource exists * and does not have a policy set. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * GetIamPolicyRequest request = * GetIamPolicyRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .setOptions(GetPolicyOptions.newBuilder().build()) * .build(); * Policy response = matchServiceClient.getIamPolicy(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final Policy getIamPolicy(GetIamPolicyRequest request) { return getIamPolicyCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the access control policy for a resource. Returns an empty policyif the resource exists * and does not have a policy set. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * GetIamPolicyRequest request = * GetIamPolicyRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .setOptions(GetPolicyOptions.newBuilder().build()) * .build(); * ApiFuture<Policy> future = matchServiceClient.getIamPolicyCallable().futureCall(request); * // Do something. * Policy response = future.get(); * } * }</pre> */ public final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return stub.getIamPolicyCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns permissions that a caller has on the specified resource. If theresource does not exist, * this will return an empty set ofpermissions, not a `NOT_FOUND` error. * * <p>Note: This operation is designed to be used for buildingpermission-aware UIs and * command-line tools, not for authorizationchecking. This operation may "fail open" without * warning. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * TestIamPermissionsRequest request = * TestIamPermissionsRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .addAllPermissions(new ArrayList<String>()) * .build(); * TestIamPermissionsResponse response = matchServiceClient.testIamPermissions(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final TestIamPermissionsResponse testIamPermissions(TestIamPermissionsRequest request) { return testIamPermissionsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns permissions that a caller has on the specified resource. If theresource does not exist, * this will return an empty set ofpermissions, not a `NOT_FOUND` error. * * <p>Note: This operation is designed to be used for buildingpermission-aware UIs and * command-line tools, not for authorizationchecking. This operation may "fail open" without * warning. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MatchServiceClient matchServiceClient = MatchServiceClient.create()) { * TestIamPermissionsRequest request = * TestIamPermissionsRequest.newBuilder() * .setResource( * EndpointName.ofProjectLocationEndpointName( * "[PROJECT]", "[LOCATION]", "[ENDPOINT]") * .toString()) * .addAllPermissions(new ArrayList<String>()) * .build(); * ApiFuture<TestIamPermissionsResponse> future = * matchServiceClient.testIamPermissionsCallable().futureCall(request); * // Do something. * TestIamPermissionsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return stub.testIamPermissionsCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListLocationsPagedResponse extends AbstractPagedListResponse< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage, ListLocationsFixedSizeCollection> { public static ApiFuture<ListLocationsPagedResponse> createAsync( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ApiFuture<ListLocationsResponse> futureResponse) { ApiFuture<ListLocationsPage> futurePage = ListLocationsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListLocationsPagedResponse(input), MoreExecutors.directExecutor()); } private ListLocationsPagedResponse(ListLocationsPage page) { super(page, ListLocationsFixedSizeCollection.createEmptyCollection()); } } public static class ListLocationsPage extends AbstractPage< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage> { private ListLocationsPage( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ListLocationsResponse response) { super(context, response); } private static ListLocationsPage createEmptyPage() { return new ListLocationsPage(null, null); } @Override protected ListLocationsPage createPage( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ListLocationsResponse response) { return new ListLocationsPage(context, response); } @Override public ApiFuture<ListLocationsPage> createPageAsync( PageContext<ListLocationsRequest, ListLocationsResponse, Location> context, ApiFuture<ListLocationsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListLocationsFixedSizeCollection extends AbstractFixedSizeCollection< ListLocationsRequest, ListLocationsResponse, Location, ListLocationsPage, ListLocationsFixedSizeCollection> { private ListLocationsFixedSizeCollection(List<ListLocationsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListLocationsFixedSizeCollection createEmptyCollection() { return new ListLocationsFixedSizeCollection(null, 0); } @Override protected ListLocationsFixedSizeCollection createCollection( List<ListLocationsPage> pages, int collectionSize) { return new ListLocationsFixedSizeCollection(pages, collectionSize); } } }
apache/tomcat
37,697
test/org/apache/catalina/servlets/TestDefaultServletRfc9110Section13.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.servlets; import java.io.File; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import jakarta.servlet.http.HttpServletResponse; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.apache.catalina.Context; import org.apache.catalina.Wrapper; import org.apache.catalina.startup.SimpleHttpClient; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.startup.TomcatBaseTest; import org.apache.tomcat.util.buf.ByteChunk; import org.apache.tomcat.util.http.FastHttpDateFormat; /** * This test case is used to verify RFC 9110 Section 13. Conditional Requests. */ @RunWith(Parameterized.class) public class TestDefaultServletRfc9110Section13 extends TomcatBaseTest { @Parameter(0) public boolean useStrongETags; @Parameter(1) public Task task; @Parameter(2) public EtagPrecondition ifMatchPrecondition; @Parameter(3) public DatePrecondition ifUnmodifiedSincePrecondition; @Parameter(4) public EtagPrecondition ifNoneMatchPrecondition; @Parameter(5) public DatePrecondition ifModifiedSincePrecondition; @Parameter(6) public EtagPrecondition ifRangeEtagPrecondition; @Parameter(7) public DatePrecondition ifRangeDatePrecondition; @Parameter(8) public boolean addRangeHeader; @Parameter(9) public Integer scExpected; @Parameterized.Parameters(name = "{index} resource-strong [{0}], matchHeader [{1}]") public static Collection<Object[]> parameters() { List<Object[]> parameterSets = new ArrayList<>(); for (Boolean useStrongEtag : booleans) { for (Task task : Arrays.asList(Task.HEAD_INDEX_HTML, Task.GET_INDEX_HTML, Task.POST_INDEX_HTML)) { // RFC 9110, Section 13.2.2, Step 1, HEAD: If-Match with and without If-Unmodified-Since for (DatePrecondition dateCondition : DatePrecondition.values()) { parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.ALL, dateCondition, null, null, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.EXACTLY, dateCondition, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_200 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.IN, dateCondition, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_200 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.NOT_IN, dateCondition, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.INVALID, dateCondition, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.INVALID_ALL_PLUS_OTHER, dateCondition, null, null, null, null, Boolean.FALSE, SC_400 }); } // RFC 9110, Section 13.2.2, Step 2, HEAD: If-Unmodified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.EQ, null, null, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.LT, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.GT, null, null, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.MULTI_IN, null, null, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.MULTI_IN_REV, null, null, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.INVALID, null, null, null, null, Boolean.FALSE, SC_200 }); // Ensure If-Unmodified-Since takes precedence over If-Modified-Since // If-Unmodified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.LT, null, null, null, null, Boolean.FALSE, SC_412 }); // If-Modified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.GT, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_200 : SC_304 }); // Both parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.LT, null, DatePrecondition.GT, null, null, Boolean.FALSE, SC_412 }); // RFC 9110, Section 13.2.2, Step 3, HEAD: If-None-Match with and without If-Modified-Since for (DatePrecondition dateCondition : DatePrecondition.values()) { parameterSets .add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.ALL, dateCondition, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_412 : SC_304 }); parameterSets.add( new Object[] { useStrongEtag, task, null, null, EtagPrecondition.EXACTLY, dateCondition, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_412 : SC_304 }); parameterSets .add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.IN, dateCondition, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_412 : SC_304 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.NOT_IN, dateCondition, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.INVALID, dateCondition, null, null, Boolean.FALSE, SC_400 }); parameterSets.add( new Object[] { useStrongEtag, task, null, null, EtagPrecondition.INVALID_ALL_PLUS_OTHER, dateCondition, null, null, Boolean.FALSE, SC_400 }); } // RFC 9110, Section 13.2.2, Step 4, HEAD: If-Modified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.EQ, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_200 : SC_304 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.LT, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.GT, null, null, Boolean.FALSE, task.equals(Task.POST_INDEX_HTML) ? SC_200 : SC_304 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.MULTI_IN, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.MULTI_IN_REV, null, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.INVALID, null, null, Boolean.FALSE, SC_200 }); } for (Task task : Arrays.asList(Task.HEAD_404_HTML, Task.GET_404_HTML, Task.POST_404_HTML)) { // RFC 9110, Section 13.2.2, Step 1, HEAD: If-Match with and without If-Unmodified-Since for (DatePrecondition dateCondition : DatePrecondition.values()) { parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.ALL, dateCondition, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.IN, dateCondition, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.NOT_IN, dateCondition, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.INVALID, dateCondition, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, task, EtagPrecondition.INVALID_ALL_PLUS_OTHER, dateCondition, null, null, null, null, Boolean.FALSE, SC_400 }); } // RFC 9110, Section 13.2.2, Step 2, HEAD: If-Unmodified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.EQ, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.LT, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.GT, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.MULTI_IN, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.MULTI_IN_REV, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, DatePrecondition.INVALID, null, null, null, null, Boolean.FALSE, SC_404 }); // RFC 9110, Section 13.2.2, Step 3, HEAD: If-None-Match with and without If-Modified-Since for (DatePrecondition dateCondition : DatePrecondition.values()) { parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.ALL, dateCondition, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.IN, dateCondition, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.NOT_IN, dateCondition, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, EtagPrecondition.INVALID, dateCondition, null, null, Boolean.FALSE, SC_400 }); parameterSets.add( new Object[] { useStrongEtag, task, null, null, EtagPrecondition.INVALID_ALL_PLUS_OTHER, dateCondition, null, null, Boolean.FALSE, SC_400 }); } // RFC 9110, Section 13.2.2, Step 4, HEAD: If-Modified-Since only parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.EQ, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.LT, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.GT, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.MULTI_IN, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.MULTI_IN_REV, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, task, null, null, null, DatePrecondition.INVALID, null, null, Boolean.FALSE, SC_404 }); } // RFC 9110, Section 13.2.2, Step 5, GET: If-Range only // entity-tag parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.ALL, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.EXACTLY, null, Boolean.TRUE, useStrongEtag.booleanValue() ? SC_206 : SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.IN, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.NOT_IN, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.INVALID, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, Boolean.TRUE, SC_400 }); // HTTP-date parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.EQ, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.LT, Boolean.TRUE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.GT, Boolean.TRUE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.MULTI_IN, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.MULTI_IN_REV, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.INVALID, Boolean.TRUE, SC_400 }); // Range header without If-Range parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.ALL, null, null, null, null, null, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.EXACTLY, null, null, null, null, null, Boolean.TRUE, useStrongEtag.booleanValue() ? SC_206 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.IN, null, null, null, null, null, Boolean.TRUE, useStrongEtag.booleanValue() ? SC_206 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.NOT_IN, null, null, null, null, null, Boolean.TRUE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.INVALID, null, null, null, null, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, null, null, null, null, Boolean.TRUE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.EQ, null, null, null, null, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.LT, null, null, null, null, Boolean.TRUE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.GT, null, null, null, null, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.MULTI_IN, null, null, null, null, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.MULTI_IN_REV, null, null, null, null, Boolean.TRUE, SC_206 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, DatePrecondition.INVALID, null, null, null, null, Boolean.TRUE, SC_206 }); // If-Range header without Range parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.ALL, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.EXACTLY, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.IN, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.NOT_IN, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.INVALID, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.EQ, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.LT, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.GT, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.MULTI_IN, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.MULTI_IN_REV, Boolean.FALSE, SC_200 }); parameterSets.add(new Object[] { useStrongEtag, Task.GET_INDEX_HTML, null, null, null, null, null, DatePrecondition.INVALID, Boolean.FALSE, SC_200 }); // PUT tests parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, null, null, null, null, null, null, Boolean.FALSE, SC_204 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.ALL, null, null, null, null, null, Boolean.FALSE, SC_204 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.EXACTLY, null, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_204 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.IN, null, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_204 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.NOT_IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.INVALID, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_EXIST_TXT, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, null, null, null, null, null, null, Boolean.FALSE, SC_201 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, EtagPrecondition.ALL, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, EtagPrecondition.IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, EtagPrecondition.NOT_IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, EtagPrecondition.INVALID, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.PUT_NEW_TXT, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, null, null, null, null, Boolean.FALSE, SC_400 }); // DELETE TESTS parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, null, null, null, null, null, null, Boolean.FALSE, SC_204 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.ALL, null, null, null, null, null, Boolean.FALSE, SC_204 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.EXACTLY, null, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_204 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.IN, null, null, null, null, null, Boolean.FALSE, useStrongEtag.booleanValue() ? SC_204 : SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.NOT_IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.INVALID, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_EXIST_TXT, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, null, null, null, null, null, null, Boolean.FALSE, SC_404 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, EtagPrecondition.ALL, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, EtagPrecondition.IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, EtagPrecondition.NOT_IN, null, null, null, null, null, Boolean.FALSE, SC_412 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, EtagPrecondition.INVALID, null, null, null, null, null, Boolean.FALSE, SC_400 }); parameterSets.add(new Object[] { useStrongEtag, Task.DELETE_NOT_EXIST_TXT, EtagPrecondition.INVALID_ALL_PLUS_OTHER, null, null, null, null, null, Boolean.FALSE, SC_400 }); } return parameterSets; } private static Integer SC_200 = Integer.valueOf(HttpServletResponse.SC_OK); private static Integer SC_201 = Integer.valueOf(HttpServletResponse.SC_CREATED); private static Integer SC_204 = Integer.valueOf(HttpServletResponse.SC_NO_CONTENT); private static Integer SC_206 = Integer.valueOf(HttpServletResponse.SC_PARTIAL_CONTENT); private static Integer SC_304 = Integer.valueOf(HttpServletResponse.SC_NOT_MODIFIED); private static Integer SC_400 = Integer.valueOf(HttpServletResponse.SC_BAD_REQUEST); private static Integer SC_404 = Integer.valueOf(HttpServletResponse.SC_NOT_FOUND); private static Integer SC_412 = Integer.valueOf(HttpServletResponse.SC_PRECONDITION_FAILED); private enum HTTP_METHOD { GET, PUT, DELETE, POST, HEAD } private enum Task { HEAD_INDEX_HTML(HTTP_METHOD.HEAD, "/index.html"), HEAD_404_HTML(HTTP_METHOD.HEAD, "/sc_404.html"), GET_INDEX_HTML(HTTP_METHOD.GET, "/index.html"), GET_404_HTML(HTTP_METHOD.GET, "/sc_404.html"), POST_INDEX_HTML(HTTP_METHOD.POST, "/index.html"), POST_404_HTML(HTTP_METHOD.POST, "/sc_404.html"), PUT_EXIST_TXT(HTTP_METHOD.PUT, "/put_exist.txt"), PUT_NEW_TXT(HTTP_METHOD.PUT, "/put_new.txt"), DELETE_EXIST_TXT(HTTP_METHOD.DELETE, "/delete_exist.txt"), DELETE_NOT_EXIST_TXT(HTTP_METHOD.DELETE, "/delete_404.txt"); HTTP_METHOD m; String uri; Task(HTTP_METHOD m, String uri) { this.m = m; this.uri = uri; } @Override public String toString() { return m.name() + " " + uri; } } private enum EtagPrecondition { EXACTLY, IN, ALL, NOT_IN, INVALID, INVALID_ALL_PLUS_OTHER } private enum DatePrecondition { /** * Condition header value of http date is equivalent to actual resource lastModified date */ EQ, /** * Condition header value of http date is greater(later) than actual resource lastModified date */ GT, /** * Condition header value of http date is less(earlier) than actual resource lastModified date */ LT, MULTI_IN, MULTI_IN_REV, /** * not a valid HTTP-date */ INVALID, /** * None. */ NONE; } protected void genETagPrecondition(String strongETag, String weakETag, EtagPrecondition condition, String headerName, Map<String,List<String>> requestHeaders) { if (condition == null) { return; } List<String> headerValues = new ArrayList<>(); switch (condition) { case ALL: headerValues.add("*"); break; case EXACTLY: if (strongETag != null) { headerValues.add(strongETag); } else { // Should not happen throw new IllegalArgumentException("strong etag not found!"); } break; case IN: headerValues.add("\"1a2b3c4d\""); headerValues.add((weakETag != null ? weakETag + "," : "") + (strongETag != null ? strongETag + "," : "") + "W/\"*\""); headerValues.add("\"abcdefg\""); break; case NOT_IN: headerValues.add("\"1a2b3c4d\""); if (weakETag != null && weakETag.length() > 8) { headerValues.add(weakETag.substring(0, 3) + "XXXXX" + weakETag.substring(8)); } if (strongETag != null && strongETag.length() > 6) { headerValues.add(strongETag.substring(0, 1) + "XXXXX" + strongETag.substring(6)); } headerValues.add("\"abcdefg\""); break; case INVALID: headerValues.add("invalid-no-quotes"); break; case INVALID_ALL_PLUS_OTHER: headerValues.add("*"); headerValues.add("W/\"1abcd\""); break; } if (!headerValues.isEmpty()) { requestHeaders.put(headerName, headerValues); } } protected void genDatePrecondition(long lastModifiedTimestamp, DatePrecondition condition, String headerName, Map<String,List<String>> requestHeaders) { if (condition == null || lastModifiedTimestamp <= 0) { return; } List<String> headerValues = new ArrayList<>(); switch (condition) { case EQ: headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp)); break; case GT: headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp + 30000L)); break; case LT: headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp - 30000L)); break; case MULTI_IN: headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp - 30000L)); headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp)); headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp + 30000L)); break; case MULTI_IN_REV: headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp + 30000L)); headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp)); headerValues.add(FastHttpDateFormat.formatDate(lastModifiedTimestamp - 30000L)); break; case INVALID: headerValues.add("2024.12.09 GMT"); break; case NONE: // NO-OP break; } if (!headerValues.isEmpty()) { requestHeaders.put(headerName, headerValues); } } protected void addPreconditionHeaders(Map<String,List<String>> requestHeaders, String resourceETag, long lastModified) { String weakETag = resourceETag; String strongETag = resourceETag; if (resourceETag != null) { if (resourceETag.startsWith("W/")) { strongETag = resourceETag.substring(2); } else { weakETag = "W/" + resourceETag; } } genETagPrecondition(strongETag, weakETag, ifMatchPrecondition, "If-Match", requestHeaders); genDatePrecondition(lastModified, ifUnmodifiedSincePrecondition, "If-Unmodified-Since", requestHeaders); genETagPrecondition(strongETag, weakETag, ifNoneMatchPrecondition, "If-None-Match", requestHeaders); genDatePrecondition(lastModified, ifModifiedSincePrecondition, "If-Modified-Since", requestHeaders); genETagPrecondition(strongETag, weakETag, ifRangeEtagPrecondition, "If-Range", requestHeaders); genDatePrecondition(lastModified, ifRangeDatePrecondition, "If-Range", requestHeaders); } private File tempDocBase = null; @Override public void setUp() throws Exception { super.setUp(); tempDocBase = Files.createTempDirectory(getTemporaryDirectory().toPath(), "conditional").toFile(); long lastModified = FastHttpDateFormat.parseDate("Fri, 06 Dec 2024 00:00:00 GMT"); Files.write(Path.of(tempDocBase.getAbsolutePath(), "index.html"), "<html><body>Index</body></html>".getBytes(), StandardOpenOption.CREATE); Path.of(tempDocBase.getAbsolutePath(), "index.html").toFile().setLastModified(lastModified); if (task.m.equals(HTTP_METHOD.PUT)) { Files.write(Path.of(tempDocBase.getAbsolutePath(), "put_exist.txt"), "put_exist_v0".getBytes(), StandardOpenOption.CREATE); Path.of(tempDocBase.getAbsolutePath(), "put_exist.txt").toFile().setLastModified(lastModified); } if (task.m.equals(HTTP_METHOD.DELETE)) { Files.write(Path.of(tempDocBase.getAbsolutePath(), "delete_exist.txt"), "delete_exist_v0".getBytes(), StandardOpenOption.CREATE); Path.of(tempDocBase.getAbsolutePath(), "delete_exist.txt").toFile().setLastModified(lastModified); } } @Test public void testPreconditions() throws Exception { Tomcat tomcat = getTomcatInstance(); Context ctxt = tomcat.addContext("", tempDocBase.getAbsolutePath()); Wrapper w = Tomcat.addServlet(ctxt, "default", DefaultServlet.class.getName()); w.addInitParameter("readonly", "false"); w.addInitParameter("allowPartialPut", "true"); w.addInitParameter("allowPostAsGet", "true"); w.addInitParameter("useStrongETags", Boolean.toString(useStrongETags)); ctxt.addServletMappingDecoded("/", "default"); tomcat.start(); Assert.assertNotNull(task); Map<String,List<String>> requestHeaders = new HashMap<>(); Map<String,List<String>> responseHeaders = new HashMap<>(); String etag = null; long lastModified = -1; String uri = "http://localhost:" + getPort() + task.uri; // Try head to receives etag and lastModified Date int sc = headUrl(uri, new ByteChunk(), responseHeaders); if (sc == 200) { etag = getSingleHeader("ETag", responseHeaders); String dt = getSingleHeader("Last-Modified", responseHeaders); if (dt != null && dt.length() > 0) { lastModified = FastHttpDateFormat.parseDate(dt); } } addPreconditionHeaders(requestHeaders, etag, lastModified); responseHeaders.clear(); sc = 0; SimpleHttpClient client = null; client = new SimpleHttpClient() { @Override public boolean isResponseBodyOK() { return true; } }; client.setPort(getPort()); StringBuffer curl = new StringBuffer(); curl.append(task.m.name() + " " + task.uri + " HTTP/1.1" + SimpleHttpClient.CRLF + "Host: localhost" + SimpleHttpClient.CRLF + "Connection: Close" + SimpleHttpClient.CRLF); for (Entry<String,List<String>> e : requestHeaders.entrySet()) { for (String v : e.getValue()) { curl.append(e.getKey() + ": " + v + SimpleHttpClient.CRLF); } } if (addRangeHeader) { curl.append("Range: bytes=0-10" + SimpleHttpClient.CRLF); } curl.append("Content-Length: 6" + SimpleHttpClient.CRLF); curl.append(SimpleHttpClient.CRLF); curl.append("PUT_v2"); client.setRequest(new String[] { curl.toString() }); client.connect(); client.processRequest(); for (String e : client.getResponseHeaders()) { Assert.assertTrue("Separator ':' expected and not the last char of response header field `" + e + "`", e.contains(":") && e.indexOf(':') < e.length() - 1); String name = e.substring(0, e.indexOf(':')); String value = e.substring(e.indexOf(':') + 1); responseHeaders.computeIfAbsent(name, k -> new ArrayList<String>()).add(value); } sc = client.getStatusCode(); boolean test = scExpected.intValue() == sc; Assert.assertTrue( "Failure - sc expected:%d, sc actual:%d, task:%s, \ntarget resource:(%s,%s), \nreq headers: %s, \nresp headers: %s" .formatted(scExpected, Integer.valueOf(sc), task, etag, FastHttpDateFormat.formatDate(lastModified), requestHeaders.toString(), responseHeaders.toString()), test); } }
apache/datasketches-java
37,378
src/test/java/org/apache/datasketches/quantiles/HeapUpdateDoublesSketchTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.datasketches.quantiles; import static java.lang.Math.floor; import static org.apache.datasketches.common.Util.LS; import static org.apache.datasketches.common.Util.log2; import static org.apache.datasketches.quantiles.ClassicUtil.computeCombinedBufferItemCapacity; import static org.apache.datasketches.quantiles.ClassicUtil.computeNumLevelsNeeded; import static org.apache.datasketches.quantiles.HeapUpdateDoublesSketch.checkPreLongsFlagsSerVer; import static org.apache.datasketches.quantiles.PreambleUtil.COMPACT_FLAG_MASK; import static org.apache.datasketches.quantiles.PreambleUtil.EMPTY_FLAG_MASK; import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.EXCLUSIVE; import static org.apache.datasketches.quantilescommon.QuantileSearchCriteria.INCLUSIVE; import static org.apache.datasketches.quantilescommon.QuantilesUtil.equallySpacedDoubles; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertNull; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; import java.lang.foreign.MemorySegment; import java.nio.ByteOrder; import org.apache.datasketches.common.SketchesArgumentException; import org.apache.datasketches.quantilescommon.QuantilesUtil; import org.testng.Assert; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; public class HeapUpdateDoublesSketchTest { @BeforeMethod public void setUp() { DoublesSketch.rand.setSeed(32749); // make sketches deterministic for testing } // Please note that this is a randomized test that could probabilistically fail // if we didn't set the seed. (The probability of failure could be reduced by increasing k.) // Setting the seed has now made it deterministic. @Test public void checkEndToEnd() { final int k = 256; final UpdateDoublesSketch qs = DoublesSketch.builder().setK(k).build(); final UpdateDoublesSketch qs2 = DoublesSketch.builder().setK(k).build(); final int n = 1000000; for (int item = n; item >= 1; item--) { if ((item % 4) == 0) { qs.update(item); } else { qs2.update(item); } } assertEquals(qs.getN() + qs2.getN(), n); final DoublesUnion union = DoublesUnion.heapify(qs); union.union(qs2); final DoublesSketch result = union.getResult(); final int numPhiValues = 99; final double[] phiArr = new double[numPhiValues]; for (int q = 1; q <= 99; q++) { phiArr[q-1] = q / 100.0; } final double[] splitPoints = result.getQuantiles(phiArr); // for (int i = 0; i < 99; i++) { // String s = String.format("%d\t%.6f\t%.6f", i, phiArr[i], splitPoints[i]); // println(s); // } for (int q = 1; q <= 99; q++) { final double nominal = (1e6 * q) / 100.0; final double reported = splitPoints[q-1]; assertTrue(reported >= (nominal - 10000.0)); assertTrue(reported <= (nominal + 10000.0)); } final double[] pmfResult = result.getPMF(splitPoints); double subtotal = 0.0; for (int q = 1; q <= 100; q++) { final double phi = q / 100.0; subtotal += pmfResult[q-1]; assertTrue(subtotal >= (phi - 0.01)); assertTrue(subtotal <= (phi + 0.01)); } final double[] cdfResult = result.getCDF(splitPoints); for (int q = 1; q <= 100; q++) { final double phi = q / 100.0; subtotal = cdfResult[q-1]; assertTrue(subtotal >= (phi - 0.01)); assertTrue(subtotal <= (phi + 0.01)); } assertEquals(result.getRank(500000), 0.5, 0.01); } @Test public void checkSmallMinMax () { final int k = 32; final int n = 8; final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(k).build(); final UpdateDoublesSketch qs2 = DoublesSketch.builder().setK(k).build(); final UpdateDoublesSketch qs3 = DoublesSketch.builder().setK(k).build(); for (int i = n; i >= 1; i--) { qs1.update(i); qs2.update(10+i); qs3.update(i); } assertEquals(qs1.getQuantile (0.0, EXCLUSIVE), 1.0); assertEquals(qs1.getQuantile (0.5, EXCLUSIVE), 5.0); assertEquals(qs1.getQuantile (1.0, EXCLUSIVE), 8.0); assertEquals(qs2.getQuantile (0.0, EXCLUSIVE), 11.0); assertEquals(qs2.getQuantile (0.5, EXCLUSIVE), 15.0); assertEquals(qs2.getQuantile (1.0, EXCLUSIVE), 18.0); assertEquals(qs3.getQuantile (0.0, EXCLUSIVE), 1.0); assertEquals(qs3.getQuantile (0.5, EXCLUSIVE), 5.0); assertEquals(qs3.getQuantile (1.0, EXCLUSIVE), 8.0); final double[] queries = {0.0, 0.5, 1.0}; final double[] resultsA = qs1.getQuantiles(queries, EXCLUSIVE); assertEquals(resultsA[0], 1.0); assertEquals(resultsA[1], 5.0); assertEquals(resultsA[2], 8.0); final DoublesUnion union1 = DoublesUnion.heapify(qs1); union1.union(qs2); final DoublesSketch result1 = union1.getResult(); final DoublesUnion union2 = DoublesUnion.heapify(qs2); union2.union(qs3); final DoublesSketch result2 = union2.getResult(); final double[] resultsB = result1.getQuantiles(queries, EXCLUSIVE); assertEquals(resultsB[0], 1.0); assertEquals(resultsB[1], 11.0); assertEquals(resultsB[2], 18.0); final double[] resultsC = result2.getQuantiles(queries, EXCLUSIVE); assertEquals(resultsC[0], 1.0); assertEquals(resultsC[1], 11.0); assertEquals(resultsC[2], 18.0); } @Test public void checkMisc() { final int k = PreambleUtil.DEFAULT_K; final int n = 10000; final UpdateDoublesSketch qs = buildAndLoadQS(k, n); qs.update(Double.NaN); //ignore final int n2 = (int)qs.getN(); assertEquals(n2, n); qs.reset(); assertEquals(qs.getN(), 0); } @SuppressWarnings("unused") @Test public void checkToStringDetail() { final int k = PreambleUtil.DEFAULT_K; final int n = 1000000; UpdateDoublesSketch qs = buildAndLoadQS(k, 0); String s = qs.toString(); s = qs.toString(false, true); //println(s); qs = buildAndLoadQS(k, n); s = qs.toString(); //println(s); s = qs.toString(false, true); //println(qs.toString(false, true)); final int n2 = (int)qs.getN(); assertEquals(n2, n); qs.update(Double.NaN); //ignore qs.reset(); assertEquals(qs.getN(), 0); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkConstructorException() { DoublesSketch.builder().setK(0).build(); } @Test public void checkPreLongsFlagsAndSize() { byte[] byteArr; final UpdateDoublesSketch ds = DoublesSketch.builder().build(); //k = 128 //empty byteArr = ds.toByteArray(true); // compact assertEquals(byteArr.length, 8); byteArr = ds.toByteArray(false); // not compact assertEquals(byteArr.length, 8); assertEquals(byteArr[3], EMPTY_FLAG_MASK); //not empty ds.update(1); byteArr = ds.toByteArray(true); // compact assertEquals(byteArr.length, 40); //compact, 1 value byteArr = ds.toByteArray(false); // not compact assertEquals(byteArr.length, 64); // 32 + MIN_K(=2) * 2 * 8 = 64 } @Test public void checkPreLongsFlagsSerVerB() { checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK, 1, 1); //38 checkPreLongsFlagsSerVer(0, 1, 5); //164 checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK, 2, 1); //42 checkPreLongsFlagsSerVer(0, 2, 2); //72 checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK | COMPACT_FLAG_MASK, 3, 1); //47 checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK | COMPACT_FLAG_MASK, 3, 2); //79 checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK, 3, 2); //78 checkPreLongsFlagsSerVer(COMPACT_FLAG_MASK, 3, 2);//77 checkPreLongsFlagsSerVer(0, 3, 2); //76 } @Test(expectedExceptions = SketchesArgumentException.class) public void checkPreLongsFlagsSerVer3() { checkPreLongsFlagsSerVer(EMPTY_FLAG_MASK, 1, 2); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkGetQuantiles() { final int k = PreambleUtil.DEFAULT_K; final int n = 1000000; final DoublesSketch qs = buildAndLoadQS(k, n); final double[] frac = {-0.5}; qs.getQuantiles(frac); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkGetQuantile() { final int k = PreambleUtil.DEFAULT_K; final int n = 1000000; final DoublesSketch qs = buildAndLoadQS(k, n); final double frac = -0.5; //negative not allowed qs.getQuantile(frac); } //@Test //visual only public void summaryCheckViaMemory() { final DoublesSketch qs = buildAndLoadQS(256, 1000000); String s = qs.toString(); println(s); println(""); final MemorySegment srcSeg = MemorySegment.ofArray(qs.toByteArray()); final HeapUpdateDoublesSketch qs2 = HeapUpdateDoublesSketch.heapifyInstance(srcSeg); s = qs2.toString(); println(s); } @Test public void checkComputeNumLevelsNeeded() { final int n = 1 << 20; final int k = PreambleUtil.DEFAULT_K; final int lvls1 = computeNumLevelsNeeded(k, n); final int lvls2 = (int)Math.max(floor(log2((double)n/k)),0); assertEquals(lvls1, lvls2); } @Test public void checkComputeBitPattern() { final int n = 1 << 20; final int k = PreambleUtil.DEFAULT_K; final long bitP = ClassicUtil.computeBitPattern(k, n); assertEquals(bitP, n/(2L*k)); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkValidateSplitPointsOrder() { final double[] arr = {2, 1}; QuantilesUtil.checkDoublesSplitPointsOrder(arr); } @Test public void checkGetStorageBytes() { final int k = PreambleUtil.DEFAULT_K; //128 DoublesSketch qs = buildAndLoadQS(k, 0); //k, n int stor = qs.getCurrentCompactSerializedSizeBytes(); assertEquals(stor, 8); qs = buildAndLoadQS(k, 2*k); //forces one level stor = qs.getCurrentCompactSerializedSizeBytes(); int retItems = ClassicUtil.computeRetainedItems(k, 2*k); assertEquals(stor, 32 + (retItems << 3)); qs = buildAndLoadQS(k, (2*k)-1); //just Base Buffer stor = qs.getCurrentCompactSerializedSizeBytes(); retItems = ClassicUtil.computeRetainedItems(k, (2*k)-1); assertEquals(stor, 32 + (retItems << 3)); } @Test public void checkGetStorageBytes2() { final int k = PreambleUtil.DEFAULT_K; long v = 1; final UpdateDoublesSketch qs = DoublesSketch.builder().setK(k).build(); for (int i = 0; i< 1000; i++) { qs.update(v++); // for (int j = 0; j < 1000; j++) { // qs.update(v++); // } final byte[] byteArr = qs.toByteArray(false); assertEquals(byteArr.length, qs.getCurrentUpdatableSerializedSizeBytes()); } } @Test public void checkMerge() { final int k = PreambleUtil.DEFAULT_K; final int n = 1000000; final DoublesSketch qs1 = buildAndLoadQS(k,n,0); final DoublesSketch qs2 = buildAndLoadQS(k,0,0); //empty final DoublesUnion union = DoublesUnion.heapify(qs2); union.union(qs1); final DoublesSketch result = union.getResult(); final double med1 = qs1.getQuantile(0.5); final double med2 = result.getQuantile(0.5); assertEquals(med1, med2, 0.0); //println(med1+","+med2); } @Test public void checkReverseMerge() { final int k = PreambleUtil.DEFAULT_K; final DoublesSketch qs1 = buildAndLoadQS(k, 1000, 0); final DoublesSketch qs2 = buildAndLoadQS(2*k,1000, 1000); final DoublesUnion union = DoublesUnion.heapify(qs2); union.union(qs1); //attempt merge into larger k final DoublesSketch result = union.getResult(); assertEquals(result.getK(), k); } @Test public void checkInternalBuildHistogram() { final int k = PreambleUtil.DEFAULT_K; final int n = 1000000; final DoublesSketch qs = buildAndLoadQS(k,n,0); final double eps = qs.getNormalizedRankError(true); //println("EPS:"+eps); final double[] spts = {100000, 500000, 900000}; final double[] fracArr = qs.getPMF(spts); // println(fracArr[0]+", "+ (fracArr[0]-0.1)); // println(fracArr[1]+", "+ (fracArr[1]-0.4)); // println(fracArr[2]+", "+ (fracArr[2]-0.4)); // println(fracArr[3]+", "+ (fracArr[3]-0.1)); assertEquals(fracArr[0], .1, eps); assertEquals(fracArr[1], .4, eps); assertEquals(fracArr[2], .4, eps); assertEquals(fracArr[3], .1, eps); } @Test public void checkComputeBaseBufferCount() { final int n = 1 << 20; final int k = PreambleUtil.DEFAULT_K; final long bbCnt = ClassicUtil.computeBaseBufferItems(k, n); assertEquals(bbCnt, n % (2L*k)); } @Test public void checkToFromByteArray() { checkToFromByteArray2(128, 1300); //generates a pattern of 5 -> 101 checkToFromByteArray2(4, 7); checkToFromByteArray2(4, 8); checkToFromByteArray2(4, 9); } private static void checkToFromByteArray2(final int k, final int n) { final DoublesSketch qs = buildAndLoadQS(k, n); byte[] byteArr; MemorySegment seg; DoublesSketch qs2; // from compact byteArr = qs.toByteArray(true); seg = MemorySegment.ofArray(byteArr); qs2 = UpdateDoublesSketch.heapify(seg); for (double f = 0.1; f < 0.95; f += 0.1) { assertEquals(qs.getQuantile(f), qs2.getQuantile(f), 0.0); } // ordered, non-compact byteArr = qs.toByteArray(false); seg = MemorySegment.ofArray(byteArr); qs2 = DoublesSketch.heapify(seg); final DoublesSketchAccessor dsa = DoublesSketchAccessor.wrap(qs2, false); dsa.sort(); for (double f = 0.1; f < 0.95; f += 0.1) { assertEquals(qs.getQuantile(f), qs2.getQuantile(f), 0.0); } // not ordered, not compact byteArr = qs.toByteArray(false); seg = MemorySegment.ofArray(byteArr); qs2 = DoublesSketch.heapify(seg); for (double f = 0.1; f < 0.95; f += 0.1) { assertEquals(qs.getQuantile(f), qs2.getQuantile(f), 0.0); } } @Test public void checkEmpty() { final int k = PreambleUtil.DEFAULT_K; final DoublesSketch qs1 = buildAndLoadQS(k, 0); final byte[] byteArr = qs1.toByteArray(); final MemorySegment seg = MemorySegment.ofArray(byteArr); final DoublesSketch qs2 = DoublesSketch.heapify(seg); assertTrue(qs2.isEmpty()); final int expectedSizeBytes = 8; //COMBINED_BUFFER + ((2 * MIN_K) << 3); assertEquals(byteArr.length, expectedSizeBytes); try { qs2.getQuantile(0.5); fail(); } catch (final IllegalArgumentException e) { } try { qs2.getQuantiles(new double[] {0.0, 0.5, 1.0}); fail(); } catch (final IllegalArgumentException e) { } try { qs2.getRank(0); fail(); } catch (final IllegalArgumentException e) { } } @Test(expectedExceptions = SketchesArgumentException.class) public void checkSegTooSmall1() { final MemorySegment seg = MemorySegment.ofArray(new byte[7]); HeapUpdateDoublesSketch.heapifyInstance(seg); fail(); //qs2.getQuantile(0.5); } //Corruption tests @Test(expectedExceptions = SketchesArgumentException.class) public void checkSerVer() { DoublesUtil.checkDoublesSerVer(0, HeapUpdateDoublesSketch.MIN_HEAP_DOUBLES_SER_VER); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkFamilyID() { ClassicUtil.checkFamilyID(3); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkSegCapacityException() { final int k = PreambleUtil.DEFAULT_K; final long n = 1000; final int serVer = 3; final int combBufItemCap = computeCombinedBufferItemCapacity(k, n); final int segCapBytes = (combBufItemCap + 4) << 3; final int badCapBytes = segCapBytes - 1; //corrupt HeapUpdateDoublesSketch.checkHeapSegCapacity(k, n, false, serVer, badCapBytes); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkBufAllocAndCap() { final int k = PreambleUtil.DEFAULT_K; final long n = 1000; final int serVer = 3; final int combBufItemCap = computeCombinedBufferItemCapacity(k, n); //non-compact cap final int segCapBytes = (combBufItemCap + 4) << 3; final int segCapBytesV1 = (combBufItemCap + 5) << 3; HeapUpdateDoublesSketch.checkHeapSegCapacity(k, n, false, 1, segCapBytesV1); HeapUpdateDoublesSketch.checkHeapSegCapacity(k, n, false, serVer, segCapBytes - 1); //corrupt } @Test(expectedExceptions = SketchesArgumentException.class) public void checkPreLongsFlagsCap() { final int preLongs = 5; final int flags = EMPTY_FLAG_MASK; final int segCap = 8; ClassicUtil.checkPreLongsFlagsCap(preLongs, flags, segCap); //corrupt } @Test(expectedExceptions = SketchesArgumentException.class) public void checkPreLongsFlagsCap2() { final int preLongs = 5; final int flags = 0; final int segCap = 8; ClassicUtil.checkPreLongsFlagsCap(preLongs, flags, segCap); //corrupt } @Test(expectedExceptions = SketchesArgumentException.class) public void checkFlags() { final int flags = 1; ClassicUtil.checkHeapFlags(flags); } @Test public void checkZeroPatternReturn() { final int k = PreambleUtil.DEFAULT_K; final DoublesSketch qs1 = buildAndLoadQS(k, 64); final byte[] byteArr = qs1.toByteArray(); final MemorySegment seg = MemorySegment.ofArray(byteArr); HeapUpdateDoublesSketch.heapifyInstance(seg); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkBadDownSamplingRatio() { final int k1 = 64; final DoublesSketch qs1 = buildAndLoadQS(k1, k1); qs1.downSample(qs1, 2*k1, null, null);//should be smaller } @Test public void checkImproperKvalues() { checksForImproperK(0); checksForImproperK(1<<16); } //Primarily visual only tests static void testDownSampling(final int bigK, final int smallK) { final HeapUpdateDoublesSketch sketch1 = HeapUpdateDoublesSketch.newInstance(bigK); final HeapUpdateDoublesSketch sketch2 = HeapUpdateDoublesSketch.newInstance(smallK); for (int i = 127; i >= 1; i--) { sketch1.update (i); sketch2.update (i); } final HeapUpdateDoublesSketch downSketch = (HeapUpdateDoublesSketch)sketch1.downSample(sketch1, smallK, null, null); println (LS+"Sk1"+LS); String s1, s2, down; s1 = sketch1.toString(true, true); println(s1); println (LS+"Down"+LS); down = downSketch.toString(true, true); println(down); println(LS+"Sk2"+LS); s2 = sketch2.toString(true, true); println(s2); assertEquals(downSketch.getNumRetained(), sketch2.getNumRetained()); } @Test public void checkDownSampling() { testDownSampling(4,4); //no down sampling testDownSampling(16,4); //testDownSampling(12,3); } @Test public void testDownSampling2() { final HeapUpdateDoublesSketch sketch1 = HeapUpdateDoublesSketch.newInstance(8); final HeapUpdateDoublesSketch sketch2 = HeapUpdateDoublesSketch.newInstance(2); DoublesSketch downSketch; downSketch = sketch1.downSample(sketch1, 2, null, null); assertTrue(sameStructurePredicate(sketch2, downSketch)); for (int i = 0; i < 50; i++) { sketch1.update (i); sketch2.update (i); downSketch = sketch1.downSample(sketch1, 2, null, null); assertTrue (sameStructurePredicate(sketch2, downSketch)); } } @Test public void testDownSampling3() { final int k1 = 8; final int k2 = 2; final int n = 50; final UpdateDoublesSketch sketch1 = DoublesSketch.builder().setK(k1).build(); final UpdateDoublesSketch sketch2 = DoublesSketch.builder().setK(k2).build(); DoublesSketch downSketch; for (int i = 0; i < n; i++) { sketch1.update (i); sketch2.update (i); downSketch = sketch1.downSample(sketch1, k2, null, null); assertTrue (sameStructurePredicate(sketch2, downSketch)); } } @Test // public void testDownSampling3withSeg() { final int k1 = 8; final int k2 = 2; final int n = 50; final UpdateDoublesSketch sketch1 = DoublesSketch.builder().setK(k1).build(); final UpdateDoublesSketch sketch2 = DoublesSketch.builder().setK(k2).build(); DoublesSketch downSketch; final int bytes = DoublesSketch.getUpdatableStorageBytes(k2, n); final MemorySegment seg = MemorySegment.ofArray(new byte[bytes]); for (int i = 0; i < n; i++) { sketch1.update (i); sketch2.update (i); downSketch = sketch1.downSample(sketch1, k2, seg, null); assertTrue (sameStructurePredicate(sketch2, downSketch)); } } @Test public void testDownSampling4() { for (int n1 = 0; n1 < 50; n1++ ) { final HeapUpdateDoublesSketch bigSketch = HeapUpdateDoublesSketch.newInstance(8); for (int i1 = 1; i1 <= n1; i1++ ) { bigSketch.update(i1); } for (int n2 = 0; n2 < 50; n2++ ) { final HeapUpdateDoublesSketch directSketch = HeapUpdateDoublesSketch.newInstance(2); for (int i1 = 1; i1 <= n1; i1++ ) { directSketch.update(i1); } for (int i2 = 1; i2 <= n2; i2++ ) { directSketch.update(i2); } final HeapUpdateDoublesSketch smlSketch = HeapUpdateDoublesSketch.newInstance(2); for (int i2 = 1; i2 <= n2; i2++ ) { smlSketch.update(i2); } DoublesMergeImpl.downSamplingMergeInto(bigSketch, smlSketch); assertTrue (sameStructurePredicate(directSketch, smlSketch)); } } } @Test(expectedExceptions = SketchesArgumentException.class) public void testDownSamplingExceptions1() { final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(4).build(); // not smaller final DoublesSketch qs2 = DoublesSketch.builder().setK(3).build(); DoublesMergeImpl.mergeInto(qs2, qs1); } @Test(expectedExceptions = SketchesArgumentException.class) public void testDownSamplingExceptions2() { final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(4).build(); final DoublesSketch qs2 = DoublesSketch.builder().setK(7).build(); // 7/4 not pwr of 2 DoublesMergeImpl.mergeInto(qs2, qs1); } @Test(expectedExceptions = SketchesArgumentException.class) public void testDownSamplingExceptions3() { final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(4).build(); final DoublesSketch qs2 = DoublesSketch.builder().setK(12).build(); // 12/4 not pwr of 2 DoublesMergeImpl.mergeInto(qs2, qs1); } //@Test //visual only public void quantilesCheckViaMemory() { final int k = 256; final int n = 1000000; final DoublesSketch qs = buildAndLoadQS(k, n); final double[] ranks = {0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0}; final String s = getRanksTable(qs, ranks); println(s); println(""); final MemorySegment srcSeg = MemorySegment.ofArray(qs.toByteArray()); final HeapUpdateDoublesSketch qs2 = HeapUpdateDoublesSketch.heapifyInstance(srcSeg); println(getRanksTable(qs2, ranks)); } static String getRanksTable(final DoublesSketch qs, final double[] ranks) { final double rankError = qs.getNormalizedRankError(false); final double[] values = qs.getQuantiles(ranks); final double maxV = qs.getMaxItem(); final double minV = qs.getMinItem(); final double delta = maxV - minV; println("Note: This prints the relative value errors for illustration."); println("The quantiles sketch does not and can not guarantee relative value errors"); final StringBuilder sb = new StringBuilder(); sb.append(LS); sb.append("N = ").append(qs.getN()).append(LS); sb.append("K = ").append(qs.getK()).append(LS); final String formatStr1 = "%10s%15s%10s%15s%10s%10s"; final String formatStr2 = "%10.1f%15.5f%10.0f%15.5f%10.5f%10.5f"; final String hdr = String.format( formatStr1, "Rank", "ValueLB", "<= Value", "<= ValueUB", "RelErrLB", "RelErrUB"); sb.append(hdr).append(LS); for (int i=0; i<ranks.length; i++) { final double rank = ranks[i]; final double value = values[i]; if (rank == 0.0) { assertEquals(value, minV, 0.0); } else if (rank == 1.0) { assertEquals(value, maxV, 0.0); } else { final double rankUB = rank + rankError; final double valueUB = minV + (delta*rankUB); final double rankLB = Math.max(rank - rankError, 0.0); final double valueLB = minV + (delta*rankLB); assertTrue(value < valueUB); assertTrue(value > valueLB); final double valRelPctErrUB = (valueUB/ value) -1.0; final double valRelPctErrLB = (valueLB/ value) -1.0; final String row = String.format( formatStr2,rank, valueLB, value, valueUB, valRelPctErrLB, valRelPctErrUB); sb.append(row).append(LS); } } return sb.toString(); } @Test public void checkKisTwo() { final int k = 2; final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(k).build(); final double err = qs1.getNormalizedRankError(false); assertTrue(err < 1.0); byte[] arr = qs1.toByteArray(true); //8 assertEquals(arr.length, DoublesSketch.getCompactSerialiedSizeBytes(k, 0)); qs1.update(1.0); arr = qs1.toByteArray(true); //40 assertEquals(arr.length, DoublesSketch.getCompactSerialiedSizeBytes(k, 1)); } @Test public void checkKisTwoDeprecated() { final int k = 2; final UpdateDoublesSketch qs1 = DoublesSketch.builder().setK(k).build(); final double err = qs1.getNormalizedRankError(false); assertTrue(err < 1.0); byte[] arr = qs1.toByteArray(true); //8 assertEquals(arr.length, DoublesSketch.getCompactSerialiedSizeBytes(k, 0)); assertEquals(arr.length, qs1.getCurrentCompactSerializedSizeBytes()); qs1.update(1.0); arr = qs1.toByteArray(true); //40 assertEquals(arr.length, DoublesSketch.getCompactSerialiedSizeBytes(k, 1)); assertEquals(arr.length, qs1.getCurrentCompactSerializedSizeBytes()); } @Test public void checkPutMemory() { final UpdateDoublesSketch qs1 = DoublesSketch.builder().build(); //k = 128 for (int i=0; i<1000; i++) { qs1.update(i); } final int bytes = qs1.getCurrentUpdatableSerializedSizeBytes(); final MemorySegment dstSeg = MemorySegment.ofArray(new byte[bytes]); qs1.putIntoMemorySegment(dstSeg, false); final MemorySegment srcSeg = dstSeg; final DoublesSketch qs2 = DoublesSketch.heapify(srcSeg); assertEquals(qs1.getMinItem(), qs2.getMinItem(), 0.0); assertEquals(qs1.getMaxItem(), qs2.getMaxItem(), 0.0); } @Test(expectedExceptions = SketchesArgumentException.class) public void checkPutMemoryTooSmall() { final UpdateDoublesSketch qs1 = DoublesSketch.builder().build(); //k = 128 for (int i=0; i<1000; i++) { qs1.update(i); } final int bytes = qs1.getCurrentCompactSerializedSizeBytes(); final MemorySegment dstSeg = MemorySegment.ofArray(new byte[bytes-1]); //too small qs1.putIntoMemorySegment(dstSeg); } //Himanshu's case @Test public void testIt() { final java.nio.ByteBuffer bb = java.nio.ByteBuffer.allocate(1<<20).order(ByteOrder.nativeOrder()); final MemorySegment seg = MemorySegment.ofBuffer(bb); final int k = 1024; final DoublesSketch qsk = new DoublesSketchBuilder().setK(k).build(); final DoublesUnion u1 = DoublesUnion.heapify(qsk); u1.getResult().putIntoMemorySegment(seg); final DoublesUnion u2 = DoublesUnion.heapify(seg); final DoublesSketch qsk2 = u2.getResult(); assertTrue(qsk2.isEmpty()); } @Test public void checkEquallySpacedRanks() { final int n = 10; final double[] es = equallySpacedDoubles(n); final int len = es.length; for (int j=0; j<len; j++) { final double f = es[j]; assertEquals(f, j/10.0, (j/10.0) * 0.001); print(es[j]+", "); } println(""); } @Test public void checkPMFonEmpty() { final DoublesSketch qsk = buildAndLoadQS(32, 1001); final double[] array = {}; final double[] qOut = qsk.getQuantiles(array); assertEquals(qOut.length, 0); println("qOut: "+qOut.length); final double[] cdfOut = qsk.getCDF(array); println("cdfOut: "+cdfOut.length); assertEquals(cdfOut[0], 1.0, 0.0); } @Test public void checkPuts() { final long n1 = 1001; final UpdateDoublesSketch qsk = buildAndLoadQS(32, (int)n1); final long n2 = qsk.getN(); assertEquals(n2, n1); final int bbCnt1 = qsk.getBaseBufferCount(); final long pat1 = qsk.getBitPattern(); qsk.putBitPattern(pat1 + 1); //corrupt the pattern final long pat2 = qsk.getBitPattern(); assertEquals(pat1 + 1, pat2); qsk.putBaseBufferCount(bbCnt1 + 1); //corrupt the bbCount final int bbCnt2 = qsk.getBaseBufferCount(); assertEquals(bbCnt1 + 1, bbCnt2); qsk.putN(n1 + 1); //corrupt N final long n3 = qsk.getN(); assertEquals(n1 + 1, n3); assertNull(qsk.getMemorySegment()); } @Test public void serializeDeserializeCompact() { final UpdateDoublesSketch sketch1 = DoublesSketch.builder().build(); for (int i = 0; i < 1000; i++) { sketch1.update(i); } UpdateDoublesSketch sketch2; sketch2 = (UpdateDoublesSketch) DoublesSketch.heapify(MemorySegment.ofArray(sketch1.toByteArray())); for (int i = 0; i < 1000; i++) { sketch2.update(i + 1000); } assertEquals(sketch2.getMinItem(), 0.0); assertEquals(sketch2.getMaxItem(), 1999.0); assertEquals(sketch2.getQuantile(0.5), 1000.0, 10.0); } @Test public void serializeDeserializeEmptyNonCompact() { final UpdateDoublesSketch sketch1 = DoublesSketch.builder().build(); final byte[] byteArr = sketch1.toByteArray(false); //Ordered, Not Compact, Empty assertEquals(byteArr.length, sketch1.getSerializedSizeBytes()); final MemorySegment seg = MemorySegment.ofArray(byteArr); final UpdateDoublesSketch sketch2 = (UpdateDoublesSketch) DoublesSketch.heapify(seg); for (int i = 0; i < 1000; i++) { sketch2.update(i); } assertEquals(sketch2.getMinItem(), 0.0); assertEquals(sketch2.getMaxItem(), 999.0); assertEquals(sketch2.getQuantile(0.5), 500.0, 4.0); } @Test public void getRankAndGetCdfConsistency() { final UpdateDoublesSketch sketch = DoublesSketch.builder().build(); final int n = 1_000_000; final double[] values = new double[n]; for (int i = 0; i < n; i++) { sketch.update(i); values[i] = i; } { // inclusive = false (default) final double[] ranks = sketch.getCDF(values); for (int i = 0; i < n; i++) { assertEquals(ranks[i], sketch.getRank(values[i]), 0.00001, "CDF vs rank for value " + i); } } { // inclusive = true final double[] ranks = sketch.getCDF(values, INCLUSIVE); for (int i = 0; i < n; i++) { assertEquals(ranks[i], sketch.getRank(values[i], INCLUSIVE), 0.00001, "CDF vs rank for value " + i); } } } @Test public void maxK() { final UpdateDoublesSketch sketch = DoublesSketch.builder().setK(32768).build(); Assert.assertEquals(sketch.getK(), 32768); } @Test public void checkBounds() { final UpdateDoublesSketch sketch = DoublesSketch.builder().build(); for (int i = 0; i < 1000; i++) { sketch.update(i); } final double eps = sketch.getNormalizedRankError(false); final double est = sketch.getQuantile(0.5); final double ub = sketch.getQuantileUpperBound(0.5); final double lb = sketch.getQuantileLowerBound(0.5); assertEquals(ub, sketch.getQuantile(.5 + eps)); assertEquals(lb, sketch.getQuantile(0.5 - eps)); println("Ext : " + est); println("UB : " + ub); println("LB : " + lb); } @Test public void checkGetKFromEqs() { final UpdateDoublesSketch sketch = DoublesSketch.builder().build(); final int k = sketch.getK(); final double eps = DoublesSketch.getNormalizedRankError(k, false); final double epsPmf = DoublesSketch.getNormalizedRankError(k, true); final int kEps = DoublesSketch.getKFromEpsilon(eps, false); final int kEpsPmf = DoublesSketch.getKFromEpsilon(epsPmf, true); assertEquals(kEps, k); assertEquals(kEpsPmf, k); } @Test public void tenItems() { final UpdateDoublesSketch sketch = DoublesSketch.builder().build(); for (int i = 1; i <= 10; i++) { sketch.update(i); } assertFalse(sketch.isEmpty()); assertEquals(sketch.getN(), 10); assertEquals(sketch.getNumRetained(), 10); for (int i = 1; i <= 10; i++) { assertEquals(sketch.getRank(i, EXCLUSIVE), (i - 1) / 10.0); assertEquals(sketch.getRank(i, INCLUSIVE), i / 10.0); } // inclusive = false assertEquals(sketch.getQuantile(0, EXCLUSIVE), 1); assertEquals(sketch.getQuantile(0.1, EXCLUSIVE), 2); assertEquals(sketch.getQuantile(0.2, EXCLUSIVE), 3); assertEquals(sketch.getQuantile(0.3, EXCLUSIVE), 4); assertEquals(sketch.getQuantile(0.4, EXCLUSIVE), 5); assertEquals(sketch.getQuantile(0.5, EXCLUSIVE), 6); assertEquals(sketch.getQuantile(0.6, EXCLUSIVE), 7); assertEquals(sketch.getQuantile(0.7, EXCLUSIVE), 8); assertEquals(sketch.getQuantile(0.8, EXCLUSIVE), 9); assertEquals(sketch.getQuantile(0.9, EXCLUSIVE), 10); assertEquals(sketch.getQuantile(1, EXCLUSIVE), 10); // inclusive = true assertEquals(sketch.getQuantile(0, INCLUSIVE), 1); assertEquals(sketch.getQuantile(0.1, INCLUSIVE), 1); assertEquals(sketch.getQuantile(0.2, INCLUSIVE), 2); assertEquals(sketch.getQuantile(0.3, INCLUSIVE), 3); assertEquals(sketch.getQuantile(0.4, INCLUSIVE), 4); assertEquals(sketch.getQuantile(0.5, INCLUSIVE), 5); assertEquals(sketch.getQuantile(0.6, INCLUSIVE), 6); assertEquals(sketch.getQuantile(0.7, INCLUSIVE), 7); assertEquals(sketch.getQuantile(0.8, INCLUSIVE), 8); assertEquals(sketch.getQuantile(0.9, INCLUSIVE), 9); assertEquals(sketch.getQuantile(1, INCLUSIVE), 10); // getQuantile() and getQuantiles() equivalence { // inclusive = false (default) final double[] quantiles = sketch.getQuantiles(new double[] {0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1}); for (int i = 0; i <= 10; i++) { assertEquals(sketch.getQuantile(i / 10.0), quantiles[i]); } } { // inclusive = true final double[] quantiles = sketch.getQuantiles(new double[] {0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1}, INCLUSIVE); for (int i = 0; i <= 10; i++) { assertEquals(sketch.getQuantile(i / 10.0, INCLUSIVE), quantiles[i]); } } } //private methods private static void checksForImproperK(final int k) { final String s = "Did not catch improper k: " + k; try { DoublesSketch.builder().setK(k); fail(s); } catch (final SketchesArgumentException e) { //pass } try { DoublesSketch.builder().setK(k).build(); fail(s); } catch (final SketchesArgumentException e) { //pass } try { HeapUpdateDoublesSketch.newInstance(k); fail(s); } catch (final SketchesArgumentException e) { //pass } } private static boolean sameStructurePredicate(final DoublesSketch mq1, final DoublesSketch mq2) { final boolean b1 = ( (mq1.getK() == mq2.getK()) && (mq1.getN() == mq2.getN()) && (mq1.getCombinedBufferItemCapacity() >= ClassicUtil.computeCombinedBufferItemCapacity(mq1.getK(), mq1.getN())) && (mq2.getCombinedBufferItemCapacity() >= ClassicUtil.computeCombinedBufferItemCapacity(mq2.getK(), mq2.getN())) && (mq1.getBaseBufferCount() == mq2.getBaseBufferCount()) && (mq1.getBitPattern() == mq2.getBitPattern()) ); final boolean b2; if (mq1.isEmpty()) { b2 = mq2.isEmpty(); } else { b2 = (mq1.getMinItem() == mq2.getMinItem()) && (mq1.getMaxItem() == mq2.getMaxItem()); } return b1 && b2; } static UpdateDoublesSketch buildAndLoadQS(final int k, final int n) { return buildAndLoadQS(k, n, 0); } static UpdateDoublesSketch buildAndLoadQS(final int k, final int n, final int startV) { final UpdateDoublesSketch qs = DoublesSketch.builder().setK(k).build(); for (int i=1; i<=n; i++) { qs.update(startV + i); } return qs; } @Test public void printlnTest() { println("PRINTING: "+this.getClass().getName()); print("PRINTING: "+this.getClass().getName() + LS); } /** * @param s value to print */ static void println(final Object o) { print(o.toString() + LS); } /** * @param s value to print */ static void print(final Object o) { //System.out.print(o.toString()); //disable here } }
apache/lucene
36,414
lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.benchmark.byTask; import java.io.BufferedReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.text.Collator; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.concurrent.TimeUnit; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.benchmark.BenchmarkTestCase; import org.apache.lucene.benchmark.byTask.feeds.DocMaker; import org.apache.lucene.benchmark.byTask.stats.TaskStats; import org.apache.lucene.benchmark.byTask.tasks.CountingSearchTestTask; import org.apache.lucene.benchmark.byTask.tasks.WriteLineDocTask; import org.apache.lucene.collation.CollationKeyAnalyzer; import org.apache.lucene.facet.taxonomy.TaxonomyReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.index.LogMergePolicy; import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.analysis.BaseTokenStreamTestCase; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; /** Test very simply that perf tasks - simple algorithms - are doing what they should. */ @LuceneTestCase.SuppressCodecs({"SimpleText", "Direct"}) public class TestPerfTasksLogic extends BenchmarkTestCase { @Override public void setUp() throws Exception { super.setUp(); copyToWorkDir("reuters.first20.lines.txt"); copyToWorkDir("test-mapping-ISOLatin1Accent-partial.txt"); } /** Test index creation logic */ public void testIndexAndSearchTasks() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "ResetSystemErase", "CreateIndex", "{ AddDoc } : 1000", "ForceMerge(1)", "CloseIndex", "OpenReader", "{ CountingSearchTest } : 200", "CloseReader", "[ CountingSearchTest > : 70", "[ CountingSearchTest > : 9", }; // 2. we test this value later CountingSearchTestTask.numSearches = 0; // 3. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 4. test specific checks after the benchmark run completed. assertEquals( "TestSearchTask was supposed to be called!", 279, CountingSearchTestTask.numSearches); assertTrue( "Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. IndexWriter iw = new IndexWriter( benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); assertEquals( "1000 docs were added to the index, this is what we expect to find!", 1000, ir.numDocs()); ir.close(); } /** Test timed sequence task. */ public void testTimedSearchTask() throws Exception { String[] algLines = { "log.step=100000", "ResetSystemErase", "CreateIndex", "{ AddDoc } : 100", "ForceMerge(1)", "CloseIndex", "OpenReader", "{ CountingSearchTest } : .5s", "CloseReader", }; CountingSearchTestTask.numSearches = 0; execBenchmark(algLines); assertTrue(CountingSearchTestTask.numSearches > 0); long elapsed = TimeUnit.NANOSECONDS.toMillis( CountingSearchTestTask.prevLastNanos - CountingSearchTestTask.startNanos); assertTrue("elapsed time was " + elapsed + " ms", elapsed <= 1500); } // disabled until we fix BG thread prio -- this test // causes build to hang public void testBGSearchTaskThreads() throws Exception { String[] algLines = { "log.time.step.msec = 100", "log.step=100000", "ResetSystemErase", "CreateIndex", "{ AddDoc } : 1000", "ForceMerge(1)", "CloseIndex", "OpenReader", "{", " [ \"XSearch\" { CountingSearchTest > : * ] : 2 &-1", " Wait(0.5)", "}", "CloseReader", "RepSumByPref X" }; CountingSearchTestTask.numSearches = 0; execBenchmark(algLines); // NOTE: cannot assert this, because on a super-slow // system, it could be after waiting 0.5 seconds that // the search threads hadn't yet succeeded in starting // up and then they start up and do no searching: // assertTrue(CountingSearchTestTask.numSearches > 0); } /** Test Exhasting Doc Maker logic */ public void testExhaustContentSource() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.SingleDocSource", "content.source.log.step=1", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "doc.tokenized=false", "# ----- alg ", "CreateIndex", "{ AddDoc } : * ", "ForceMerge(1)", "CloseIndex", "OpenReader", "{ CountingSearchTest } : 100", "CloseReader", "[ CountingSearchTest > : 30", "[ CountingSearchTest > : 9", }; // 2. we test this value later CountingSearchTestTask.numSearches = 0; // 3. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 4. test specific checks after the benchmark run completed. assertEquals( "TestSearchTask was supposed to be called!", 139, CountingSearchTestTask.numSearches); assertTrue( "Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory())); // now we should be able to open the index for write. IndexWriter iw = new IndexWriter( benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); assertEquals( "1 docs were added to the index, this is what we expect to find!", 1, ir.numDocs()); ir.close(); } // LUCENE-1994: test thread safety of SortableSingleDocMaker public void testDocMakerThreadSafety() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.SortableSingleDocSource", "doc.term.vector=false", "log.step.AddDoc=10000", "content.source.forever=true", "directory=ByteBuffersDirectory", "doc.reuse.fields=false", "doc.stored=true", "doc.tokenized=false", "doc.index.props=true", "# ----- alg ", "CreateIndex", "[ { AddDoc > : 250 ] : 4", "CloseIndex", }; // 2. we test this value later CountingSearchTestTask.numSearches = 0; // 3. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); DirectoryReader r = DirectoryReader.open(benchmark.getRunData().getDirectory()); final int maxDoc = r.maxDoc(); assertEquals(1000, maxDoc); StoredFields storedFields = r.storedFields(); for (int i = 0; i < 1000; i++) { assertNotNull("doc " + i + " has null country", storedFields.document(i).getField("country")); } r.close(); } /** Test Parallel Doc Maker logic (for LUCENE-940) */ public void testParallelDocMaker() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "doc.term.vector=false", "content.source.forever=false", "directory=FSDirectory", "doc.stored=false", "doc.tokenized=false", "# ----- alg ", "CreateIndex", "[ { AddDoc } : * ] : 4 ", "CloseIndex", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } /** Test WriteLineDoc and LineDocSource. */ public void testLineDocFile() throws Exception { Path lineFile = createTempFile("test.reuters.lines", ".txt"); // We will call WriteLineDocs this many times final int NUM_TRY_DOCS = 50; // Creates a line file with first 50 docs from SingleDocSource String[] algLines1 = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.SingleDocSource", "content.source.forever=true", "line.file.out=" + lineFile.toAbsolutePath().toString().replace('\\', '/'), "# ----- alg ", "{WriteLineDoc()}:" + NUM_TRY_DOCS, }; // Run algo Benchmark benchmark = execBenchmark(algLines1); BufferedReader r = Files.newBufferedReader(lineFile, StandardCharsets.UTF_8); int numLines = 0; String line; while ((line = r.readLine()) != null) { if (numLines == 0 && line.startsWith(WriteLineDocTask.FIELDS_HEADER_INDICATOR)) { continue; // do not count the header line as a doc } numLines++; } r.close(); assertEquals( "did not see the right number of docs; should be " + NUM_TRY_DOCS + " but was " + numLines, NUM_TRY_DOCS, numLines); // Index the line docs String[] algLines2 = { "# ----- properties ", "analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + lineFile.toAbsolutePath().toString().replace('\\', '/'), "content.source.forever=false", "doc.reuse.fields=false", "ram.flush.mb=4", "# ----- alg ", "ResetSystemErase", "CreateIndex", "{AddDoc}: *", "CloseIndex", }; // Run algo benchmark = execBenchmark(algLines2); // now we should be able to open the index for write. IndexWriter iw = new IndexWriter( benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND)); iw.close(); IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); assertEquals( numLines + " lines were created but " + ir.numDocs() + " docs are in the index", numLines, ir.numDocs()); ir.close(); } /** Test ReadTokensTask */ public void testReadTokens() throws Exception { // We will call ReadTokens on this many docs final int NUM_DOCS = 20; // Read tokens from first NUM_DOCS docs from Reuters and // then build index from the same docs String[] algLines1 = { "# ----- properties ", "analyzer=org.apache.lucene.analysis.core.WhitespaceAnalyzer", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "# ----- alg ", "{ReadTokens}: " + NUM_DOCS, "ResetSystemErase", "CreateIndex", "{AddDoc}: " + NUM_DOCS, "CloseIndex", }; // Run algo Benchmark benchmark = execBenchmark(algLines1); List<TaskStats> stats = benchmark.getRunData().getPoints().taskStats(); // Count how many tokens all ReadTokens saw int totalTokenCount1 = 0; for (final TaskStats stat : stats) { if (stat.getTask().getName().equals("ReadTokens")) { totalTokenCount1 += stat.getCount(); } } // Separately count how many tokens are actually in the index: IndexReader reader = DirectoryReader.open(benchmark.getRunData().getDirectory()); assertEquals(NUM_DOCS, reader.numDocs()); int totalTokenCount2 = 0; Collection<String> fields = FieldInfos.getIndexedFields(reader); for (String fieldName : fields) { if (fieldName.equals(DocMaker.ID_FIELD) || fieldName.equals(DocMaker.DATE_MSEC_FIELD) || fieldName.equals(DocMaker.TIME_SEC_FIELD)) { continue; } Terms terms = MultiTerms.getTerms(reader, fieldName); if (terms == null) { continue; } TermsEnum termsEnum = terms.iterator(); PostingsEnum docs = null; while (termsEnum.next() != null) { docs = TestUtil.docs(random(), termsEnum, docs, PostingsEnum.FREQS); while (docs.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { totalTokenCount2 += docs.freq(); } } } reader.close(); // Make sure they are the same assertEquals(totalTokenCount1, totalTokenCount2); } /** Test that " {[AddDoc(4000)]: 4} : * " works corrcetly (for LUCENE-941) */ public void testParallelExhausted() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "doc.tokenized=false", "task.max.depth.log=1", "# ----- alg ", "CreateIndex", "{ [ AddDoc]: 4} : * ", "ResetInputs ", "{ [ AddDoc]: 4} : * ", "CloseIndex", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 2 * 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } /** Test that exhaust in loop works as expected (LUCENE-1115). */ public void testExhaustedLooped() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "doc.tokenized=false", "task.max.depth.log=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " CloseIndex", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } /** Test that we can close IndexWriter with argument "false". */ public void testCloseIndexFalse() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "ram.flush.mb=-1", "max.buffered=2", "content.source.log.step=3", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " CloseIndex(false)", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } public static class MyMergeScheduler extends SerialMergeScheduler { boolean called; public MyMergeScheduler() { super(); called = true; } } /** Test that we can set merge scheduler". */ public void testMergeScheduler() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "merge.scheduler=" + MyMergeScheduler.class.getName(), "doc.stored=false", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); assertTrue( "did not use the specified MergeScheduler", ((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getConfig().getMergeScheduler()) .called); benchmark.getRunData().getIndexWriter().close(); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } public static class MyMergePolicy extends LogDocMergePolicy { boolean called; public MyMergePolicy() { called = true; } } /** Test that we can set merge policy". */ public void testMergePolicy() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "ram.flush.mb=-1", "max.buffered=2", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "merge.policy=" + MyMergePolicy.class.getName(), "doc.stored=false", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); assertTrue( "did not use the specified MergePolicy", ((MyMergePolicy) benchmark.getRunData().getIndexWriter().getConfig().getMergePolicy()) .called); benchmark.getRunData().getIndexWriter().close(); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); } /** Test that IndexWriter settings stick. */ public void testIndexWriterSettings() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "ram.flush.mb=-1", "max.buffered=2", "compound=cmpnd:true:false", "doc.term.vector=vector:false:true", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "merge.factor=3", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " NewRound", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); final IndexWriter writer = benchmark.getRunData().getIndexWriter(); assertEquals(2, writer.getConfig().getMaxBufferedDocs()); assertEquals( IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB()); assertEquals(3, ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor()); assertEquals(0.0d, writer.getConfig().getMergePolicy().getNoCFSRatio(), 0.0); writer.close(); Directory dir = benchmark.getRunData().getDirectory(); IndexReader reader = DirectoryReader.open(dir); Fields tfv = reader.termVectors().get(0); assertNotNull(tfv); assertTrue(tfv.size() > 0); reader.close(); } /** Test indexing with facets tasks. */ public void testIndexingWithFacets() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=100", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "merge.factor=3", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "ResetSystemErase", "CreateIndex", "CreateTaxonomyIndex", "{ \"AddDocs\" AddFacetedDoc > : * ", "CloseIndex", "CloseTaxonomyIndex", "OpenTaxonomyReader", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); PerfRunData runData = benchmark.getRunData(); assertNull("taxo writer was not properly closed", runData.getTaxonomyWriter()); TaxonomyReader taxoReader = runData.getTaxonomyReader(); assertNotNull("taxo reader was not opened", taxoReader); assertTrue( "nothing was added to the taxnomy (expecting root and at least one addtional category)", taxoReader.getSize() > 1); taxoReader.close(); } /** Test that we can call forceMerge(maxNumSegments). */ public void testForceMerge() throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "ram.flush.mb=-1", "max.buffered=3", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "merge.policy=org.apache.lucene.index.LogDocMergePolicy", "doc.stored=false", "doc.tokenized=false", "debug.level=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " ForceMerge(3)", " CloseIndex()", "} : 2", }; // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test number of docs in the index IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory()); int ndocsExpected = 20; // first 20 reuters docs. assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs()); ir.close(); // Make sure we have 3 segments: SegmentInfos infos = SegmentInfos.readLatestCommit(benchmark.getRunData().getDirectory()); assertEquals(3, infos.size()); } /** Test disabling task count (LUCENE-1136). */ public void testDisableCounting() throws Exception { doTestDisableCounting(true); doTestDisableCounting(false); } private void doTestDisableCounting(boolean disable) throws Exception { // 1. alg definition (required in every "logic" test) String[] algLines = disableCountingLines(disable); // 2. execute the algorithm (required in every "logic" test) Benchmark benchmark = execBenchmark(algLines); // 3. test counters int n = disable ? 0 : 1; int nChecked = 0; for (final TaskStats stats : benchmark.getRunData().getPoints().taskStats()) { String taskName = stats.getTask().getName(); if (taskName.equals("Rounds")) { assertEquals("Wrong total count!", 20 + 2 * n, stats.getCount()); nChecked++; } else if (taskName.equals("CreateIndex")) { assertEquals("Wrong count for CreateIndex!", n, stats.getCount()); nChecked++; } else if (taskName.equals("CloseIndex")) { assertEquals("Wrong count for CloseIndex!", n, stats.getCount()); nChecked++; } } assertEquals("Missing some tasks to check!", 3, nChecked); } private String[] disableCountingLines(boolean disable) { String dis = disable ? "-" : ""; return new String[] { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=30", "doc.term.vector=false", "content.source.forever=false", "directory=ByteBuffersDirectory", "doc.stored=false", "doc.tokenized=false", "task.max.depth.log=1", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " " + dis + "CreateIndex", // optionally disable counting here " { \"AddDocs\" AddDoc > : * ", " " + dis + " CloseIndex", // optionally disable counting here (with extra blanks) "}", "RepSumByName", }; } /** Test that we can change the Locale in the runData, that it is parsed as we expect. */ public void testLocale() throws Exception { // empty Locale: clear it (null) Benchmark benchmark = execBenchmark(getLocaleConfig("")); assertNull(benchmark.getRunData().getLocale()); // ROOT locale benchmark = execBenchmark(getLocaleConfig("ROOT")); assertEquals(Locale.ROOT, benchmark.getRunData().getLocale()); // specify just a language benchmark = execBenchmark(getLocaleConfig("de")); assertEquals( new Locale.Builder().setLanguageTag("de").build(), benchmark.getRunData().getLocale()); // specify language + country benchmark = execBenchmark(getLocaleConfig("en-US")); assertEquals( new Locale.Builder().setLanguageTag("en-US").build(), benchmark.getRunData().getLocale()); } private String[] getLocaleConfig(String localeParam) { String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "content.source.forever=false", "directory=ByteBuffersDirectory", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " NewLocale(" + localeParam + ")", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " NewRound", "} : 1", }; return algLines; } /** Test that we can create CollationAnalyzers. */ public void testCollator() throws Exception { // ROOT locale Benchmark benchmark = execBenchmark(getCollatorConfig("ROOT", "impl:jdk")); CollationKeyAnalyzer expected = new CollationKeyAnalyzer(Collator.getInstance(Locale.ROOT)); assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar"); // specify just a language benchmark = execBenchmark(getCollatorConfig("de", "impl:jdk")); expected = new CollationKeyAnalyzer( Collator.getInstance(new Locale.Builder().setLanguageTag("de").build())); assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar"); // specify language + country benchmark = execBenchmark(getCollatorConfig("en-US", "impl:jdk")); expected = new CollationKeyAnalyzer( Collator.getInstance(new Locale.Builder().setLanguageTag("en-US").build())); assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar"); // specify language + country + variant benchmark = execBenchmark(getCollatorConfig("nn-NO", "impl:jdk")); expected = new CollationKeyAnalyzer( Collator.getInstance(new Locale.Builder().setLanguageTag("nn-NO").build())); assertEqualCollation(expected, benchmark.getRunData().getAnalyzer(), "foobar"); } private void assertEqualCollation(Analyzer a1, Analyzer a2, String text) throws Exception { TokenStream ts1 = a1.tokenStream("bogus", text); TokenStream ts2 = a2.tokenStream("bogus", text); ts1.reset(); ts2.reset(); TermToBytesRefAttribute termAtt1 = ts1.addAttribute(TermToBytesRefAttribute.class); TermToBytesRefAttribute termAtt2 = ts2.addAttribute(TermToBytesRefAttribute.class); assertTrue(ts1.incrementToken()); assertTrue(ts2.incrementToken()); BytesRef bytes1 = termAtt1.getBytesRef(); BytesRef bytes2 = termAtt2.getBytesRef(); assertEquals(bytes1, bytes2); assertFalse(ts1.incrementToken()); assertFalse(ts2.incrementToken()); ts1.close(); ts2.close(); } private String[] getCollatorConfig(String localeParam, String collationParam) { String[] algLines = { "# ----- properties ", "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "content.source.log.step=3", "content.source.forever=false", "directory=ByteBuffersDirectory", "# ----- alg ", "{ \"Rounds\"", " ResetSystemErase", " NewLocale(" + localeParam + ")", " NewCollationAnalyzer(" + collationParam + ")", " CreateIndex", " { \"AddDocs\" AddDoc > : * ", " NewRound", "} : 1", }; return algLines; } /** Test that we can create shingle analyzers using AnalyzerFactory. */ public void testShingleAnalyzer() throws Exception { String text = "one,two,three, four five six"; // StandardTokenizer, maxShingleSize, and outputUnigrams Benchmark benchmark = execBenchmark( getAnalyzerFactoryConfig("shingle-analyzer", "StandardTokenizer,ShingleFilter")); benchmark.getRunData().getAnalyzer().tokenStream("bogus", text).close(); BaseTokenStreamTestCase.assertAnalyzesTo( benchmark.getRunData().getAnalyzer(), text, new String[] { "one", "one two", "two", "two three", "three", "three four", "four", "four five", "five", "five six", "six" }); // StandardTokenizer, maxShingleSize = 3, and outputUnigrams = false benchmark = execBenchmark( getAnalyzerFactoryConfig( "shingle-analyzer", "StandardTokenizer,ShingleFilter(maxShingleSize:3,outputUnigrams:false)")); BaseTokenStreamTestCase.assertAnalyzesTo( benchmark.getRunData().getAnalyzer(), text, new String[] { "one two", "one two three", "two three", "two three four", "three four", "three four five", "four five", "four five six", "five six" }); // WhitespaceTokenizer, default maxShingleSize and outputUnigrams benchmark = execBenchmark( getAnalyzerFactoryConfig("shingle-analyzer", "WhitespaceTokenizer,ShingleFilter")); BaseTokenStreamTestCase.assertAnalyzesTo( benchmark.getRunData().getAnalyzer(), text, new String[] { "one,two,three,", "one,two,three, four", "four", "four five", "five", "five six", "six" }); // WhitespaceTokenizer, maxShingleSize=3 and outputUnigrams=false benchmark = execBenchmark( getAnalyzerFactoryConfig( "shingle-factory", "WhitespaceTokenizer,ShingleFilter(outputUnigrams:false,maxShingleSize:3)")); BaseTokenStreamTestCase.assertAnalyzesTo( benchmark.getRunData().getAnalyzer(), text, new String[] { "one,two,three, four", "one,two,three, four five", "four five", "four five six", "five six" }); } private String[] getAnalyzerFactoryConfig(String name, String params) { final String singleQuoteEscapedName = name.replace("'", "\\'"); String[] algLines = { "content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource", "docs.file=" + getReuters20LinesFile(), "work.dir=" + getWorkDir().toAbsolutePath().toString().replace('\\', '/'), // Fix Windows path "content.source.forever=false", "directory=ByteBuffersDirectory", "AnalyzerFactory(name:'" + singleQuoteEscapedName + "', " + params + ")", "NewAnalyzer('" + singleQuoteEscapedName + "')", "CreateIndex", "{ \"AddDocs\" AddDoc > : * " }; return algLines; } public void testAnalyzerFactory() throws Exception { String text = "Fortieth, Quarantième, Cuadragésimo"; Benchmark benchmark = execBenchmark( getAnalyzerFactoryConfig( "ascii folded, pattern replaced, standard tokenized, downcased, bigrammed.'analyzer'", "positionIncrementGap:100,offsetGap:1111," + "MappingCharFilter(mapping:'test-mapping-ISOLatin1Accent-partial.txt')," + "PatternReplaceCharFilterFactory(pattern:'e(\\\\\\\\S*)m',replacement:\"$1xxx$1\")," + "StandardTokenizer,LowerCaseFilter,NGramTokenFilter(minGramSize:2,maxGramSize:2)")); BaseTokenStreamTestCase.assertAnalyzesTo( benchmark.getRunData().getAnalyzer(), text, new String[] { "fo", "or", "rt", "ti", "ie", "et", "th", "qu", "ua", "ar", "ra", "an", "nt", "ti", "ix", "xx", "xx", "xe", "cu", "ua", "ad", "dr", "ra", "ag", "gs", "si", "ix", "xx", "xx", "xs", "si", "io" }); } private String getReuters20LinesFile() { return getWorkDirResourcePath("reuters.first20.lines.txt"); } }
googleapis/google-cloud-java
37,168
java-admanager/proto-ad-manager-v1/src/main/java/com/google/ads/admanager/v1/AdUnitOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/admanager/v1/ad_unit_messages.proto // Protobuf Java Version: 3.25.8 package com.google.ads.admanager.v1; public interface AdUnitOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.admanager.v1.AdUnit) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Identifier. The resource name of the AdUnit. * Format: `networks/{network_code}/adUnits/{ad_unit_id}` * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Identifier. The resource name of the AdUnit. * Format: `networks/{network_code}/adUnits/{ad_unit_id}` * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = IDENTIFIER];</code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * Output only. AdUnit ID. * </pre> * * <code>int64 ad_unit_id = 15 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The adUnitId. */ long getAdUnitId(); /** * * * <pre> * Required. Immutable. The AdUnit's parent. Every ad unit has a parent except * for the root ad unit, which is created by Google. Format: * "networks/{network_code}/adUnits/{ad_unit_id}" * </pre> * * <code> * optional string parent_ad_unit = 10 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... } * </code> * * @return Whether the parentAdUnit field is set. */ boolean hasParentAdUnit(); /** * * * <pre> * Required. Immutable. The AdUnit's parent. Every ad unit has a parent except * for the root ad unit, which is created by Google. Format: * "networks/{network_code}/adUnits/{ad_unit_id}" * </pre> * * <code> * optional string parent_ad_unit = 10 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... } * </code> * * @return The parentAdUnit. */ java.lang.String getParentAdUnit(); /** * * * <pre> * Required. Immutable. The AdUnit's parent. Every ad unit has a parent except * for the root ad unit, which is created by Google. Format: * "networks/{network_code}/adUnits/{ad_unit_id}" * </pre> * * <code> * optional string parent_ad_unit = 10 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parentAdUnit. */ com.google.protobuf.ByteString getParentAdUnitBytes(); /** * * * <pre> * Output only. The path to this AdUnit in the ad unit hierarchy represented * as a list from the root to this ad unit's parent. For root ad units, this * list is empty. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitParent parent_path = 11 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.ads.admanager.v1.AdUnitParent> getParentPathList(); /** * * * <pre> * Output only. The path to this AdUnit in the ad unit hierarchy represented * as a list from the root to this ad unit's parent. For root ad units, this * list is empty. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitParent parent_path = 11 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.AdUnitParent getParentPath(int index); /** * * * <pre> * Output only. The path to this AdUnit in the ad unit hierarchy represented * as a list from the root to this ad unit's parent. For root ad units, this * list is empty. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitParent parent_path = 11 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getParentPathCount(); /** * * * <pre> * Output only. The path to this AdUnit in the ad unit hierarchy represented * as a list from the root to this ad unit's parent. For root ad units, this * list is empty. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitParent parent_path = 11 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.AdUnitParentOrBuilder> getParentPathOrBuilderList(); /** * * * <pre> * Output only. The path to this AdUnit in the ad unit hierarchy represented * as a list from the root to this ad unit's parent. For root ad units, this * list is empty. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitParent parent_path = 11 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.AdUnitParentOrBuilder getParentPathOrBuilder(int index); /** * * * <pre> * Required. The display name of the ad unit. Its maximum length is 255 * characters. * </pre> * * <code>optional string display_name = 9 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the displayName field is set. */ boolean hasDisplayName(); /** * * * <pre> * Required. The display name of the ad unit. Its maximum length is 255 * characters. * </pre> * * <code>optional string display_name = 9 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The displayName. */ java.lang.String getDisplayName(); /** * * * <pre> * Required. The display name of the ad unit. Its maximum length is 255 * characters. * </pre> * * <code>optional string display_name = 9 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for displayName. */ com.google.protobuf.ByteString getDisplayNameBytes(); /** * * * <pre> * Optional. Immutable. A string used to uniquely identify the ad unit for the * purposes of serving the ad. This attribute can be set during ad unit * creation. If it is not provided, it will be assigned by Google based on the * ad unit ID. * </pre> * * <code> * optional string ad_unit_code = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return Whether the adUnitCode field is set. */ boolean hasAdUnitCode(); /** * * * <pre> * Optional. Immutable. A string used to uniquely identify the ad unit for the * purposes of serving the ad. This attribute can be set during ad unit * creation. If it is not provided, it will be assigned by Google based on the * ad unit ID. * </pre> * * <code> * optional string ad_unit_code = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The adUnitCode. */ java.lang.String getAdUnitCode(); /** * * * <pre> * Optional. Immutable. A string used to uniquely identify the ad unit for the * purposes of serving the ad. This attribute can be set during ad unit * creation. If it is not provided, it will be assigned by Google based on the * ad unit ID. * </pre> * * <code> * optional string ad_unit_code = 2 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = IMMUTABLE]; * </code> * * @return The bytes for adUnitCode. */ com.google.protobuf.ByteString getAdUnitCodeBytes(); /** * * * <pre> * Output only. The status of this ad unit. It defaults to ACTIVE. * </pre> * * <code> * optional .google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus status = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the status field is set. */ boolean hasStatus(); /** * * * <pre> * Output only. The status of this ad unit. It defaults to ACTIVE. * </pre> * * <code> * optional .google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus status = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for status. */ int getStatusValue(); /** * * * <pre> * Output only. The status of this ad unit. It defaults to ACTIVE. * </pre> * * <code> * optional .google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus status = 13 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The status. */ com.google.ads.admanager.v1.AdUnitStatusEnum.AdUnitStatus getStatus(); /** * * * <pre> * Optional. The target window directly applied to this AdUnit. * If this field is not set, this AdUnit uses the target window specified in * effectiveTargetWindow. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow applied_target_window = 44 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the appliedTargetWindow field is set. */ boolean hasAppliedTargetWindow(); /** * * * <pre> * Optional. The target window directly applied to this AdUnit. * If this field is not set, this AdUnit uses the target window specified in * effectiveTargetWindow. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow applied_target_window = 44 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The enum numeric value on the wire for appliedTargetWindow. */ int getAppliedTargetWindowValue(); /** * * * <pre> * Optional. The target window directly applied to this AdUnit. * If this field is not set, this AdUnit uses the target window specified in * effectiveTargetWindow. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow applied_target_window = 44 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The appliedTargetWindow. */ com.google.ads.admanager.v1.TargetWindowEnum.TargetWindow getAppliedTargetWindow(); /** * * * <pre> * Output only. Non-empty default. The target window of this AdUnit. This * value is inherited from ancestor AdUnits and defaults to TOP if no AdUnit * in the hierarchy specifies it. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow effective_target_window = 45 [(.google.api.field_behavior) = NON_EMPTY_DEFAULT, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the effectiveTargetWindow field is set. */ boolean hasEffectiveTargetWindow(); /** * * * <pre> * Output only. Non-empty default. The target window of this AdUnit. This * value is inherited from ancestor AdUnits and defaults to TOP if no AdUnit * in the hierarchy specifies it. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow effective_target_window = 45 [(.google.api.field_behavior) = NON_EMPTY_DEFAULT, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for effectiveTargetWindow. */ int getEffectiveTargetWindowValue(); /** * * * <pre> * Output only. Non-empty default. The target window of this AdUnit. This * value is inherited from ancestor AdUnits and defaults to TOP if no AdUnit * in the hierarchy specifies it. * </pre> * * <code> * optional .google.ads.admanager.v1.TargetWindowEnum.TargetWindow effective_target_window = 45 [(.google.api.field_behavior) = NON_EMPTY_DEFAULT, (.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The effectiveTargetWindow. */ com.google.ads.admanager.v1.TargetWindowEnum.TargetWindow getEffectiveTargetWindow(); /** * * * <pre> * Optional. The resource names of Teams directly applied to this AdUnit. * Format: "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string applied_teams = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the appliedTeams. */ java.util.List<java.lang.String> getAppliedTeamsList(); /** * * * <pre> * Optional. The resource names of Teams directly applied to this AdUnit. * Format: "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string applied_teams = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... } * </code> * * @return The count of appliedTeams. */ int getAppliedTeamsCount(); /** * * * <pre> * Optional. The resource names of Teams directly applied to this AdUnit. * Format: "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string applied_teams = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The appliedTeams at the given index. */ java.lang.String getAppliedTeams(int index); /** * * * <pre> * Optional. The resource names of Teams directly applied to this AdUnit. * Format: "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string applied_teams = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the appliedTeams at the given index. */ com.google.protobuf.ByteString getAppliedTeamsBytes(int index); /** * * * <pre> * Output only. The resource names of all Teams that this AdUnit is on as well * as those inherited from parent AdUnits. Format: * "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string teams = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return A list containing the teams. */ java.util.List<java.lang.String> getTeamsList(); /** * * * <pre> * Output only. The resource names of all Teams that this AdUnit is on as well * as those inherited from parent AdUnits. Format: * "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string teams = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @return The count of teams. */ int getTeamsCount(); /** * * * <pre> * Output only. The resource names of all Teams that this AdUnit is on as well * as those inherited from parent AdUnits. Format: * "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string teams = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the element to return. * @return The teams at the given index. */ java.lang.String getTeams(int index); /** * * * <pre> * Output only. The resource names of all Teams that this AdUnit is on as well * as those inherited from parent AdUnits. Format: * "networks/{network_code}/teams/{team_id}" * </pre> * * <code> * repeated string teams = 4 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.resource_reference) = { ... } * </code> * * @param index The index of the value to return. * @return The bytes of the teams at the given index. */ com.google.protobuf.ByteString getTeamsBytes(int index); /** * * * <pre> * Optional. A description of the ad unit. The maximum length is 65,535 * characters. * </pre> * * <code>optional string description = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the description field is set. */ boolean hasDescription(); /** * * * <pre> * Optional. A description of the ad unit. The maximum length is 65,535 * characters. * </pre> * * <code>optional string description = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The description. */ java.lang.String getDescription(); /** * * * <pre> * Optional. A description of the ad unit. The maximum length is 65,535 * characters. * </pre> * * <code>optional string description = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for description. */ com.google.protobuf.ByteString getDescriptionBytes(); /** * * * <pre> * Optional. If this field is set to true, then the AdUnit will not be * implicitly targeted when its parent is. Traffickers must explicitly * target such an AdUnit or else no line items will serve to it. This * feature is only available for Ad Manager 360 accounts. * </pre> * * <code>optional bool explicitly_targeted = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return Whether the explicitlyTargeted field is set. */ boolean hasExplicitlyTargeted(); /** * * * <pre> * Optional. If this field is set to true, then the AdUnit will not be * implicitly targeted when its parent is. Traffickers must explicitly * target such an AdUnit or else no line items will serve to it. This * feature is only available for Ad Manager 360 accounts. * </pre> * * <code>optional bool explicitly_targeted = 6 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The explicitlyTargeted. */ boolean getExplicitlyTargeted(); /** * * * <pre> * Output only. This field is set to true if the ad unit has any children. * </pre> * * <code>optional bool has_children = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return Whether the hasChildren field is set. */ boolean hasHasChildren(); /** * * * <pre> * Output only. This field is set to true if the ad unit has any children. * </pre> * * <code>optional bool has_children = 7 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The hasChildren. */ boolean getHasChildren(); /** * * * <pre> * Output only. The time this AdUnit was last modified. * </pre> * * <code> * optional .google.protobuf.Timestamp update_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the updateTime field is set. */ boolean hasUpdateTime(); /** * * * <pre> * Output only. The time this AdUnit was last modified. * </pre> * * <code> * optional .google.protobuf.Timestamp update_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The updateTime. */ com.google.protobuf.Timestamp getUpdateTime(); /** * * * <pre> * Output only. The time this AdUnit was last modified. * </pre> * * <code> * optional .google.protobuf.Timestamp update_time = 8 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder(); /** * * * <pre> * Optional. The sizes that can be served inside this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitSize ad_unit_sizes = 14 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<com.google.ads.admanager.v1.AdUnitSize> getAdUnitSizesList(); /** * * * <pre> * Optional. The sizes that can be served inside this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitSize ad_unit_sizes = 14 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.AdUnitSize getAdUnitSizes(int index); /** * * * <pre> * Optional. The sizes that can be served inside this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitSize ad_unit_sizes = 14 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ int getAdUnitSizesCount(); /** * * * <pre> * Optional. The sizes that can be served inside this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitSize ad_unit_sizes = 14 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.AdUnitSizeOrBuilder> getAdUnitSizesOrBuilderList(); /** * * * <pre> * Optional. The sizes that can be served inside this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AdUnitSize ad_unit_sizes = 14 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.AdUnitSizeOrBuilder getAdUnitSizesOrBuilder(int index); /** * * * <pre> * Optional. Determines what set top box video on demand channel this ad unit * corresponds to in an external set top box ad campaign system. * </pre> * * <code> * optional string external_set_top_box_channel_id = 17 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated google.ads.admanager.v1.AdUnit.external_set_top_box_channel_id is deprecated. See * google/ads/admanager/v1/ad_unit_messages.proto;l=139 * @return Whether the externalSetTopBoxChannelId field is set. */ @java.lang.Deprecated boolean hasExternalSetTopBoxChannelId(); /** * * * <pre> * Optional. Determines what set top box video on demand channel this ad unit * corresponds to in an external set top box ad campaign system. * </pre> * * <code> * optional string external_set_top_box_channel_id = 17 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated google.ads.admanager.v1.AdUnit.external_set_top_box_channel_id is deprecated. See * google/ads/admanager/v1/ad_unit_messages.proto;l=139 * @return The externalSetTopBoxChannelId. */ @java.lang.Deprecated java.lang.String getExternalSetTopBoxChannelId(); /** * * * <pre> * Optional. Determines what set top box video on demand channel this ad unit * corresponds to in an external set top box ad campaign system. * </pre> * * <code> * optional string external_set_top_box_channel_id = 17 [deprecated = true, (.google.api.field_behavior) = OPTIONAL]; * </code> * * @deprecated google.ads.admanager.v1.AdUnit.external_set_top_box_channel_id is deprecated. See * google/ads/admanager/v1/ad_unit_messages.proto;l=139 * @return The bytes for externalSetTopBoxChannelId. */ @java.lang.Deprecated com.google.protobuf.ByteString getExternalSetTopBoxChannelIdBytes(); /** * * * <pre> * Optional. The duration after which an Ad Unit will automatically refresh. * This is only valid for ad units in mobile apps. If not set, the ad unit * will not refresh. * </pre> * * <code> * optional .google.protobuf.Duration refresh_delay = 19 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the refreshDelay field is set. */ boolean hasRefreshDelay(); /** * * * <pre> * Optional. The duration after which an Ad Unit will automatically refresh. * This is only valid for ad units in mobile apps. If not set, the ad unit * will not refresh. * </pre> * * <code> * optional .google.protobuf.Duration refresh_delay = 19 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The refreshDelay. */ com.google.protobuf.Duration getRefreshDelay(); /** * * * <pre> * Optional. The duration after which an Ad Unit will automatically refresh. * This is only valid for ad units in mobile apps. If not set, the ad unit * will not refresh. * </pre> * * <code> * optional .google.protobuf.Duration refresh_delay = 19 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.protobuf.DurationOrBuilder getRefreshDelayOrBuilder(); /** * * * <pre> * Optional. The set of labels applied directly to this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel applied_labels = 21 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<com.google.ads.admanager.v1.AppliedLabel> getAppliedLabelsList(); /** * * * <pre> * Optional. The set of labels applied directly to this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel applied_labels = 21 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.AppliedLabel getAppliedLabels(int index); /** * * * <pre> * Optional. The set of labels applied directly to this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel applied_labels = 21 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ int getAppliedLabelsCount(); /** * * * <pre> * Optional. The set of labels applied directly to this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel applied_labels = 21 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.AppliedLabelOrBuilder> getAppliedLabelsOrBuilderList(); /** * * * <pre> * Optional. The set of labels applied directly to this ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel applied_labels = 21 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.AppliedLabelOrBuilder getAppliedLabelsOrBuilder(int index); /** * * * <pre> * Output only. Contains the set of labels applied directly to the ad unit as * well as those inherited from the parent ad units. If a label has been * negated, only the negated label is returned. This attribute is assigned by * Google. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel effective_applied_labels = 22 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.ads.admanager.v1.AppliedLabel> getEffectiveAppliedLabelsList(); /** * * * <pre> * Output only. Contains the set of labels applied directly to the ad unit as * well as those inherited from the parent ad units. If a label has been * negated, only the negated label is returned. This attribute is assigned by * Google. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel effective_applied_labels = 22 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.AppliedLabel getEffectiveAppliedLabels(int index); /** * * * <pre> * Output only. Contains the set of labels applied directly to the ad unit as * well as those inherited from the parent ad units. If a label has been * negated, only the negated label is returned. This attribute is assigned by * Google. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel effective_applied_labels = 22 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getEffectiveAppliedLabelsCount(); /** * * * <pre> * Output only. Contains the set of labels applied directly to the ad unit as * well as those inherited from the parent ad units. If a label has been * negated, only the negated label is returned. This attribute is assigned by * Google. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel effective_applied_labels = 22 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.AppliedLabelOrBuilder> getEffectiveAppliedLabelsOrBuilderList(); /** * * * <pre> * Output only. Contains the set of labels applied directly to the ad unit as * well as those inherited from the parent ad units. If a label has been * negated, only the negated label is returned. This attribute is assigned by * Google. * </pre> * * <code> * repeated .google.ads.admanager.v1.AppliedLabel effective_applied_labels = 22 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.AppliedLabelOrBuilder getEffectiveAppliedLabelsOrBuilder(int index); /** * * * <pre> * Optional. The set of label frequency caps applied directly to this ad unit. * There is a limit of 10 label frequency caps per ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap applied_label_frequency_caps = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<com.google.ads.admanager.v1.LabelFrequencyCap> getAppliedLabelFrequencyCapsList(); /** * * * <pre> * Optional. The set of label frequency caps applied directly to this ad unit. * There is a limit of 10 label frequency caps per ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap applied_label_frequency_caps = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.LabelFrequencyCap getAppliedLabelFrequencyCaps(int index); /** * * * <pre> * Optional. The set of label frequency caps applied directly to this ad unit. * There is a limit of 10 label frequency caps per ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap applied_label_frequency_caps = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ int getAppliedLabelFrequencyCapsCount(); /** * * * <pre> * Optional. The set of label frequency caps applied directly to this ad unit. * There is a limit of 10 label frequency caps per ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap applied_label_frequency_caps = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.LabelFrequencyCapOrBuilder> getAppliedLabelFrequencyCapsOrBuilderList(); /** * * * <pre> * Optional. The set of label frequency caps applied directly to this ad unit. * There is a limit of 10 label frequency caps per ad unit. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap applied_label_frequency_caps = 23 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.ads.admanager.v1.LabelFrequencyCapOrBuilder getAppliedLabelFrequencyCapsOrBuilder( int index); /** * * * <pre> * Output only. The label frequency caps applied directly to the ad unit as * well as those inherited from parent ad units. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap effective_label_frequency_caps = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<com.google.ads.admanager.v1.LabelFrequencyCap> getEffectiveLabelFrequencyCapsList(); /** * * * <pre> * Output only. The label frequency caps applied directly to the ad unit as * well as those inherited from parent ad units. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap effective_label_frequency_caps = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.LabelFrequencyCap getEffectiveLabelFrequencyCaps(int index); /** * * * <pre> * Output only. The label frequency caps applied directly to the ad unit as * well as those inherited from parent ad units. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap effective_label_frequency_caps = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ int getEffectiveLabelFrequencyCapsCount(); /** * * * <pre> * Output only. The label frequency caps applied directly to the ad unit as * well as those inherited from parent ad units. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap effective_label_frequency_caps = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ java.util.List<? extends com.google.ads.admanager.v1.LabelFrequencyCapOrBuilder> getEffectiveLabelFrequencyCapsOrBuilderList(); /** * * * <pre> * Output only. The label frequency caps applied directly to the ad unit as * well as those inherited from parent ad units. * </pre> * * <code> * repeated .google.ads.admanager.v1.LabelFrequencyCap effective_label_frequency_caps = 24 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.ads.admanager.v1.LabelFrequencyCapOrBuilder getEffectiveLabelFrequencyCapsOrBuilder( int index); /** * * * <pre> * Optional. Non-empty default. The smart size mode for this ad unit. This * attribute defaults to SmartSizeMode.NONE for fixed sizes. * </pre> * * <code> * optional .google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode smart_size_mode = 25 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = NON_EMPTY_DEFAULT]; * </code> * * @return Whether the smartSizeMode field is set. */ boolean hasSmartSizeMode(); /** * * * <pre> * Optional. Non-empty default. The smart size mode for this ad unit. This * attribute defaults to SmartSizeMode.NONE for fixed sizes. * </pre> * * <code> * optional .google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode smart_size_mode = 25 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = NON_EMPTY_DEFAULT]; * </code> * * @return The enum numeric value on the wire for smartSizeMode. */ int getSmartSizeModeValue(); /** * * * <pre> * Optional. Non-empty default. The smart size mode for this ad unit. This * attribute defaults to SmartSizeMode.NONE for fixed sizes. * </pre> * * <code> * optional .google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode smart_size_mode = 25 [(.google.api.field_behavior) = OPTIONAL, (.google.api.field_behavior) = NON_EMPTY_DEFAULT]; * </code> * * @return The smartSizeMode. */ com.google.ads.admanager.v1.SmartSizeModeEnum.SmartSizeMode getSmartSizeMode(); /** * * * <pre> * Optional. The value of AdSense enabled directly applied to this ad unit. If * not specified this ad unit will inherit the value of * effectiveAdsenseEnabled from its ancestors. * </pre> * * <code>optional bool applied_adsense_enabled = 26 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the appliedAdsenseEnabled field is set. */ boolean hasAppliedAdsenseEnabled(); /** * * * <pre> * Optional. The value of AdSense enabled directly applied to this ad unit. If * not specified this ad unit will inherit the value of * effectiveAdsenseEnabled from its ancestors. * </pre> * * <code>optional bool applied_adsense_enabled = 26 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The appliedAdsenseEnabled. */ boolean getAppliedAdsenseEnabled(); /** * * * <pre> * Output only. Specifies whether or not the AdUnit is enabled for serving ads * from the AdSense content network. This attribute defaults to the ad unit's * parent or ancestor's setting if one has been set. If no ancestor of the ad * unit has set appliedAdsenseEnabled, the attribute is defaulted to true. * </pre> * * <code> * optional bool effective_adsense_enabled = 27 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the effectiveAdsenseEnabled field is set. */ boolean hasEffectiveAdsenseEnabled(); /** * * * <pre> * Output only. Specifies whether or not the AdUnit is enabled for serving ads * from the AdSense content network. This attribute defaults to the ad unit's * parent or ancestor's setting if one has been set. If no ancestor of the ad * unit has set appliedAdsenseEnabled, the attribute is defaulted to true. * </pre> * * <code> * optional bool effective_adsense_enabled = 27 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The effectiveAdsenseEnabled. */ boolean getEffectiveAdsenseEnabled(); }
googleapis/google-cloud-java
37,236
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListMetadataSchemasRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/metadata_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListMetadataSchemasRequest} */ public final class ListMetadataSchemasRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListMetadataSchemasRequest) ListMetadataSchemasRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListMetadataSchemasRequest.newBuilder() to construct. private ListMetadataSchemasRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListMetadataSchemasRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListMetadataSchemasRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_ListMetadataSchemasRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_ListMetadataSchemasRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.class, com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * The maximum number of MetadataSchemas to return. The service may return * fewer. * Must be in range 1-1000, inclusive. Defaults to 100. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest other = (com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListMetadataSchemasRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListMetadataSchemasRequest) com.google.cloud.aiplatform.v1.ListMetadataSchemasRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_ListMetadataSchemasRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_ListMetadataSchemasRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.class, com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.MetadataServiceProto .internal_static_google_cloud_aiplatform_v1_ListMetadataSchemasRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest build() { com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest buildPartial() { com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest result = new com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest other) { if (other == com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The MetadataStore whose MetadataSchemas should be listed. * Format: * `projects/{project}/locations/{location}/metadataStores/{metadatastore}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of MetadataSchemas to return. The service may return * fewer. * Must be in range 1-1000, inclusive. Defaults to 100. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of MetadataSchemas to return. The service may return * fewer. * Must be in range 1-1000, inclusive. Defaults to 100. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The maximum number of MetadataSchemas to return. The service may return * fewer. * Must be in range 1-1000, inclusive. Defaults to 100. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token, received from a previous * [MetadataService.ListMetadataSchemas][google.cloud.aiplatform.v1.MetadataService.ListMetadataSchemas] * call. Provide this to retrieve the next page. * * When paginating, all other provided parameters must match the call that * provided the page token. (Otherwise the request will fail with * INVALID_ARGUMENT error.) * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * A query to filter available MetadataSchemas for matching results. * </pre> * * <code>string filter = 4;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListMetadataSchemasRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListMetadataSchemasRequest) private static final com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest(); } public static com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListMetadataSchemasRequest> PARSER = new com.google.protobuf.AbstractParser<ListMetadataSchemasRequest>() { @java.lang.Override public ListMetadataSchemasRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListMetadataSchemasRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListMetadataSchemasRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListMetadataSchemasRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,246
java-servicedirectory/proto-google-cloud-servicedirectory-v1beta1/src/main/java/com/google/cloud/servicedirectory/v1beta1/ListEndpointsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/servicedirectory/v1beta1/registration_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.servicedirectory.v1beta1; /** * * * <pre> * The response message for * [RegistrationService.ListEndpoints][google.cloud.servicedirectory.v1beta1.RegistrationService.ListEndpoints]. * </pre> * * Protobuf type {@code google.cloud.servicedirectory.v1beta1.ListEndpointsResponse} */ public final class ListEndpointsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) ListEndpointsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListEndpointsResponse.newBuilder() to construct. private ListEndpointsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListEndpointsResponse() { endpoints_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListEndpointsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1beta1_ListEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1beta1_ListEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.class, com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.Builder.class); } public static final int ENDPOINTS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.servicedirectory.v1beta1.Endpoint> endpoints_; /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.servicedirectory.v1beta1.Endpoint> getEndpointsList() { return endpoints_; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder> getEndpointsOrBuilderList() { return endpoints_; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ @java.lang.Override public int getEndpointsCount() { return endpoints_.size(); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.Endpoint getEndpoints(int index) { return endpoints_.get(index); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder getEndpointsOrBuilder( int index) { return endpoints_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < endpoints_.size(); i++) { output.writeMessage(1, endpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < endpoints_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, endpoints_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse)) { return super.equals(obj); } com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse other = (com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) obj; if (!getEndpointsList().equals(other.getEndpointsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getEndpointsCount() > 0) { hash = (37 * hash) + ENDPOINTS_FIELD_NUMBER; hash = (53 * hash) + getEndpointsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for * [RegistrationService.ListEndpoints][google.cloud.servicedirectory.v1beta1.RegistrationService.ListEndpoints]. * </pre> * * Protobuf type {@code google.cloud.servicedirectory.v1beta1.ListEndpointsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1beta1_ListEndpointsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1beta1_ListEndpointsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.class, com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.Builder.class); } // Construct using com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (endpointsBuilder_ == null) { endpoints_ = java.util.Collections.emptyList(); } else { endpoints_ = null; endpointsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto .internal_static_google_cloud_servicedirectory_v1beta1_ListEndpointsResponse_descriptor; } @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse getDefaultInstanceForType() { return com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse build() { com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse buildPartial() { com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse result = new com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse result) { if (endpointsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { endpoints_ = java.util.Collections.unmodifiableList(endpoints_); bitField0_ = (bitField0_ & ~0x00000001); } result.endpoints_ = endpoints_; } else { result.endpoints_ = endpointsBuilder_.build(); } } private void buildPartial0( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) { return mergeFrom((com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse other) { if (other == com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse.getDefaultInstance()) return this; if (endpointsBuilder_ == null) { if (!other.endpoints_.isEmpty()) { if (endpoints_.isEmpty()) { endpoints_ = other.endpoints_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureEndpointsIsMutable(); endpoints_.addAll(other.endpoints_); } onChanged(); } } else { if (!other.endpoints_.isEmpty()) { if (endpointsBuilder_.isEmpty()) { endpointsBuilder_.dispose(); endpointsBuilder_ = null; endpoints_ = other.endpoints_; bitField0_ = (bitField0_ & ~0x00000001); endpointsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getEndpointsFieldBuilder() : null; } else { endpointsBuilder_.addAllMessages(other.endpoints_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.servicedirectory.v1beta1.Endpoint m = input.readMessage( com.google.cloud.servicedirectory.v1beta1.Endpoint.parser(), extensionRegistry); if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(m); } else { endpointsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.servicedirectory.v1beta1.Endpoint> endpoints_ = java.util.Collections.emptyList(); private void ensureEndpointsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { endpoints_ = new java.util.ArrayList<com.google.cloud.servicedirectory.v1beta1.Endpoint>(endpoints_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1beta1.Endpoint, com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder, com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder> endpointsBuilder_; /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public java.util.List<com.google.cloud.servicedirectory.v1beta1.Endpoint> getEndpointsList() { if (endpointsBuilder_ == null) { return java.util.Collections.unmodifiableList(endpoints_); } else { return endpointsBuilder_.getMessageList(); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public int getEndpointsCount() { if (endpointsBuilder_ == null) { return endpoints_.size(); } else { return endpointsBuilder_.getCount(); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1beta1.Endpoint getEndpoints(int index) { if (endpointsBuilder_ == null) { return endpoints_.get(index); } else { return endpointsBuilder_.getMessage(index); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder setEndpoints( int index, com.google.cloud.servicedirectory.v1beta1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.set(index, value); onChanged(); } else { endpointsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder setEndpoints( int index, com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.set(index, builderForValue.build()); onChanged(); } else { endpointsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints(com.google.cloud.servicedirectory.v1beta1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.add(value); onChanged(); } else { endpointsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints( int index, com.google.cloud.servicedirectory.v1beta1.Endpoint value) { if (endpointsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureEndpointsIsMutable(); endpoints_.add(index, value); onChanged(); } else { endpointsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints( com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(builderForValue.build()); onChanged(); } else { endpointsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder addEndpoints( int index, com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder builderForValue) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.add(index, builderForValue.build()); onChanged(); } else { endpointsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder addAllEndpoints( java.lang.Iterable<? extends com.google.cloud.servicedirectory.v1beta1.Endpoint> values) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, endpoints_); onChanged(); } else { endpointsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder clearEndpoints() { if (endpointsBuilder_ == null) { endpoints_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { endpointsBuilder_.clear(); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public Builder removeEndpoints(int index) { if (endpointsBuilder_ == null) { ensureEndpointsIsMutable(); endpoints_.remove(index); onChanged(); } else { endpointsBuilder_.remove(index); } return this; } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder getEndpointsBuilder( int index) { return getEndpointsFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder getEndpointsOrBuilder( int index) { if (endpointsBuilder_ == null) { return endpoints_.get(index); } else { return endpointsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public java.util.List<? extends com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder> getEndpointsOrBuilderList() { if (endpointsBuilder_ != null) { return endpointsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(endpoints_); } } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder addEndpointsBuilder() { return getEndpointsFieldBuilder() .addBuilder(com.google.cloud.servicedirectory.v1beta1.Endpoint.getDefaultInstance()); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder addEndpointsBuilder( int index) { return getEndpointsFieldBuilder() .addBuilder( index, com.google.cloud.servicedirectory.v1beta1.Endpoint.getDefaultInstance()); } /** * * * <pre> * The list of endpoints. * </pre> * * <code>repeated .google.cloud.servicedirectory.v1beta1.Endpoint endpoints = 1;</code> */ public java.util.List<com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder> getEndpointsBuilderList() { return getEndpointsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1beta1.Endpoint, com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder, com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder> getEndpointsFieldBuilder() { if (endpointsBuilder_ == null) { endpointsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.servicedirectory.v1beta1.Endpoint, com.google.cloud.servicedirectory.v1beta1.Endpoint.Builder, com.google.cloud.servicedirectory.v1beta1.EndpointOrBuilder>( endpoints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); endpoints_ = null; } return endpointsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no * more results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.servicedirectory.v1beta1.ListEndpointsResponse) private static final com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse(); } public static com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListEndpointsResponse> PARSER = new com.google.protobuf.AbstractParser<ListEndpointsResponse>() { @java.lang.Override public ListEndpointsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListEndpointsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListEndpointsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.servicedirectory.v1beta1.ListEndpointsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,267
java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/ListRepositoriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securesourcemanager/v1/secure_source_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securesourcemanager.v1; /** Protobuf type {@code google.cloud.securesourcemanager.v1.ListRepositoriesResponse} */ public final class ListRepositoriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.ListRepositoriesResponse) ListRepositoriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListRepositoriesResponse.newBuilder() to construct. private ListRepositoriesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListRepositoriesResponse() { repositories_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListRepositoriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.class, com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.Builder.class); } public static final int REPOSITORIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securesourcemanager.v1.Repository> repositories_; /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.securesourcemanager.v1.Repository> getRepositoriesList() { return repositories_; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder> getRepositoriesOrBuilderList() { return repositories_; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ @java.lang.Override public int getRepositoriesCount() { return repositories_.size(); } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.Repository getRepositories(int index) { return repositories_.get(index); } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder getRepositoriesOrBuilder( int index) { return repositories_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < repositories_.size(); i++) { output.writeMessage(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < repositories_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, repositories_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse)) { return super.equals(obj); } com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse other = (com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse) obj; if (!getRepositoriesList().equals(other.getRepositoriesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getRepositoriesCount() > 0) { hash = (37 * hash) + REPOSITORIES_FIELD_NUMBER; hash = (53 * hash) + getRepositoriesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** Protobuf type {@code google.cloud.securesourcemanager.v1.ListRepositoriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.ListRepositoriesResponse) com.google.cloud.securesourcemanager.v1.ListRepositoriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListRepositoriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListRepositoriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.class, com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.Builder.class); } // Construct using com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); } else { repositories_ = null; repositoriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListRepositoriesResponse_descriptor; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse getDefaultInstanceForType() { return com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse build() { com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse buildPartial() { com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse result = new com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse result) { if (repositoriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { repositories_ = java.util.Collections.unmodifiableList(repositories_); bitField0_ = (bitField0_ & ~0x00000001); } result.repositories_ = repositories_; } else { result.repositories_ = repositoriesBuilder_.build(); } } private void buildPartial0( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse) { return mergeFrom((com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse other) { if (other == com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse.getDefaultInstance()) return this; if (repositoriesBuilder_ == null) { if (!other.repositories_.isEmpty()) { if (repositories_.isEmpty()) { repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureRepositoriesIsMutable(); repositories_.addAll(other.repositories_); } onChanged(); } } else { if (!other.repositories_.isEmpty()) { if (repositoriesBuilder_.isEmpty()) { repositoriesBuilder_.dispose(); repositoriesBuilder_ = null; repositories_ = other.repositories_; bitField0_ = (bitField0_ & ~0x00000001); repositoriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getRepositoriesFieldBuilder() : null; } else { repositoriesBuilder_.addAllMessages(other.repositories_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securesourcemanager.v1.Repository m = input.readMessage( com.google.cloud.securesourcemanager.v1.Repository.parser(), extensionRegistry); if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(m); } else { repositoriesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securesourcemanager.v1.Repository> repositories_ = java.util.Collections.emptyList(); private void ensureRepositoriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { repositories_ = new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.Repository>( repositories_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Repository, com.google.cloud.securesourcemanager.v1.Repository.Builder, com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder> repositoriesBuilder_; /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.Repository> getRepositoriesList() { if (repositoriesBuilder_ == null) { return java.util.Collections.unmodifiableList(repositories_); } else { return repositoriesBuilder_.getMessageList(); } } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public int getRepositoriesCount() { if (repositoriesBuilder_ == null) { return repositories_.size(); } else { return repositoriesBuilder_.getCount(); } } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public com.google.cloud.securesourcemanager.v1.Repository getRepositories(int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessage(index); } } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder setRepositories( int index, com.google.cloud.securesourcemanager.v1.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.set(index, value); onChanged(); } else { repositoriesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder setRepositories( int index, com.google.cloud.securesourcemanager.v1.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.set(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder addRepositories(com.google.cloud.securesourcemanager.v1.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(value); onChanged(); } else { repositoriesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder addRepositories( int index, com.google.cloud.securesourcemanager.v1.Repository value) { if (repositoriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureRepositoriesIsMutable(); repositories_.add(index, value); onChanged(); } else { repositoriesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder addRepositories( com.google.cloud.securesourcemanager.v1.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder addRepositories( int index, com.google.cloud.securesourcemanager.v1.Repository.Builder builderForValue) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.add(index, builderForValue.build()); onChanged(); } else { repositoriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder addAllRepositories( java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.Repository> values) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, repositories_); onChanged(); } else { repositoriesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder clearRepositories() { if (repositoriesBuilder_ == null) { repositories_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { repositoriesBuilder_.clear(); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public Builder removeRepositories(int index) { if (repositoriesBuilder_ == null) { ensureRepositoriesIsMutable(); repositories_.remove(index); onChanged(); } else { repositoriesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public com.google.cloud.securesourcemanager.v1.Repository.Builder getRepositoriesBuilder( int index) { return getRepositoriesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder getRepositoriesOrBuilder( int index) { if (repositoriesBuilder_ == null) { return repositories_.get(index); } else { return repositoriesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public java.util.List<? extends com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder> getRepositoriesOrBuilderList() { if (repositoriesBuilder_ != null) { return repositoriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(repositories_); } } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public com.google.cloud.securesourcemanager.v1.Repository.Builder addRepositoriesBuilder() { return getRepositoriesFieldBuilder() .addBuilder(com.google.cloud.securesourcemanager.v1.Repository.getDefaultInstance()); } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public com.google.cloud.securesourcemanager.v1.Repository.Builder addRepositoriesBuilder( int index) { return getRepositoriesFieldBuilder() .addBuilder( index, com.google.cloud.securesourcemanager.v1.Repository.getDefaultInstance()); } /** * * * <pre> * The list of repositories. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.Repository repositories = 1;</code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.Repository.Builder> getRepositoriesBuilderList() { return getRepositoriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Repository, com.google.cloud.securesourcemanager.v1.Repository.Builder, com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder> getRepositoriesFieldBuilder() { if (repositoriesBuilder_ == null) { repositoriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.Repository, com.google.cloud.securesourcemanager.v1.Repository.Builder, com.google.cloud.securesourcemanager.v1.RepositoryOrBuilder>( repositories_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); repositories_ = null; } return repositoriesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.ListRepositoriesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.ListRepositoriesResponse) private static final com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse(); } public static com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListRepositoriesResponse> PARSER = new com.google.protobuf.AbstractParser<ListRepositoriesResponse>() { @java.lang.Override public ListRepositoriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListRepositoriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListRepositoriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListRepositoriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/nifi
37,587
nifi-framework-api/src/main/java/org/apache/nifi/authorization/AbstractPolicyBasedAuthorizer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.authorization; import org.apache.nifi.authorization.exception.AuthorizationAccessException; import org.apache.nifi.authorization.exception.AuthorizerCreationException; import org.apache.nifi.authorization.exception.AuthorizerDestructionException; import org.apache.nifi.authorization.exception.UninheritableAuthorizationsException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Set; /** * An Authorizer that provides management of users, groups, and policies. */ public abstract class AbstractPolicyBasedAuthorizer implements ManagedAuthorizer { private static final Logger logger = LoggerFactory.getLogger(AbstractPolicyBasedAuthorizer.class); private static final String DISALLOW_DOCTYPES = "http://apache.org/xml/features/disallow-doctype-decl"; private static final String ALLOW_EXTERNAL_GENERAL_ENTITIES = "http://xml.org/sax/features/external-general-entities"; private static final String ALLOW_EXTERNAL_PARAM_ENTITIES = "http://xml.org/sax/features/external-parameter-entities"; private static final String ALLOW_EXTERNAL_DTD = "http://apache.org/xml/features/nonvalidating/load-external-dtd"; static final XMLOutputFactory XML_OUTPUT_FACTORY = XMLOutputFactory.newInstance(); static final String USER_ELEMENT = "user"; static final String GROUP_USER_ELEMENT = "groupUser"; static final String GROUP_ELEMENT = "group"; static final String POLICY_ELEMENT = "policy"; static final String POLICY_USER_ELEMENT = "policyUser"; static final String POLICY_GROUP_ELEMENT = "policyGroup"; static final String IDENTIFIER_ATTR = "identifier"; static final String IDENTITY_ATTR = "identity"; static final String NAME_ATTR = "name"; static final String RESOURCE_ATTR = "resource"; static final String ACTIONS_ATTR = "actions"; @Override public final void onConfigured(final AuthorizerConfigurationContext configurationContext) throws AuthorizerCreationException { doOnConfigured(configurationContext); } /** * Allows sub-classes to take action when onConfigured is called. * * @param configurationContext the configuration context * @throws AuthorizerCreationException if an error occurs during onConfigured process */ protected abstract void doOnConfigured(final AuthorizerConfigurationContext configurationContext) throws AuthorizerCreationException; @Override public final AuthorizationResult authorize(AuthorizationRequest request) throws AuthorizationAccessException { final UsersAndAccessPolicies usersAndAccessPolicies = getUsersAndAccessPolicies(); final String resourceIdentifier = request.getResource().getIdentifier(); final AccessPolicy policy = usersAndAccessPolicies.getAccessPolicy(resourceIdentifier, request.getAction()); if (policy == null) { return AuthorizationResult.resourceNotFound(); } final User user = usersAndAccessPolicies.getUser(request.getIdentity()); if (user == null) { return AuthorizationResult.denied(String.format("Unknown user with identity '%s'.", request.getIdentity())); } final Set<Group> userGroups = usersAndAccessPolicies.getGroups(user.getIdentity()); if (policy.getUsers().contains(user.getIdentifier()) || containsGroup(userGroups, policy)) { return AuthorizationResult.approved(); } return AuthorizationResult.denied(request.getExplanationSupplier().get()); } /** * Determines if the policy contains one of the user's groups. * * @param userGroups the set of the user's groups * @param policy the policy * @return true if one of the Groups in userGroups is contained in the policy */ private boolean containsGroup(final Set<Group> userGroups, final AccessPolicy policy) { if (userGroups.isEmpty() || policy.getGroups().isEmpty()) { return false; } for (Group userGroup : userGroups) { if (policy.getGroups().contains(userGroup.getIdentifier())) { return true; } } return false; } /** * Adds a new group. * * @param group the Group to add * @return the added Group * @throws AuthorizationAccessException if there was an unexpected error performing the operation * @throws IllegalStateException if a group with the same name already exists */ public final synchronized Group addGroup(Group group) throws AuthorizationAccessException { return doAddGroup(group); } /** * Adds a new group. * * @param group the Group to add * @return the added Group * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Group doAddGroup(Group group) throws AuthorizationAccessException; /** * Retrieves a Group by id. * * @param identifier the identifier of the Group to retrieve * @return the Group with the given identifier, or null if no matching group was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Group getGroup(String identifier) throws AuthorizationAccessException; /** * Retrieves a group by name. * * @param name the name of the group to retrieve * @return the group with the given name, or null if no matching group was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Group getGroupByName(String name) throws AuthorizationAccessException; protected abstract void purgePoliciesUsersAndGroups(); protected abstract void backupPoliciesUsersAndGroups(); /** * The group represented by the provided instance will be updated based on the provided instance. * * @param group an updated group instance * @return the updated group instance, or null if no matching group was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation * @throws IllegalStateException if there is already a group with the same name */ public final synchronized Group updateGroup(Group group) throws AuthorizationAccessException { return doUpdateGroup(group); } /** * The group represented by the provided instance will be updated based on the provided instance. * * @param group an updated group instance * @return the updated group instance, or null if no matching group was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Group doUpdateGroup(Group group) throws AuthorizationAccessException; /** * Deletes the given group. * * @param group the group to delete * @return the deleted group, or null if no matching group was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Group deleteGroup(Group group) throws AuthorizationAccessException; /** * Retrieves all groups. * * @return a list of groups * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Set<Group> getGroups() throws AuthorizationAccessException; /** * Adds the given user. * * @param user the user to add * @return the user that was added * @throws AuthorizationAccessException if there was an unexpected error performing the operation * @throws IllegalStateException if there is already a user with the same identity */ public final synchronized User addUser(User user) throws AuthorizationAccessException { return doAddUser(user); } /** * Adds the given user. * * @param user the user to add * @return the user that was added * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract User doAddUser(User user) throws AuthorizationAccessException; /** * Retrieves the user with the given identifier. * * @param identifier the id of the user to retrieve * @return the user with the given id, or null if no matching user was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract User getUser(String identifier) throws AuthorizationAccessException; /** * Retrieves the user with the given identity. * * @param identity the identity of the user to retrieve * @return the user with the given identity, or null if no matching user was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract User getUserByIdentity(String identity) throws AuthorizationAccessException; /** * The user represented by the provided instance will be updated based on the provided instance. * * @param user an updated user instance * @return the updated user instance, or null if no matching user was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation * @throws IllegalStateException if there is already a user with the same identity */ public final synchronized User updateUser(final User user) throws AuthorizationAccessException { return doUpdateUser(user); } /** * The user represented by the provided instance will be updated based on the provided instance. * * @param user an updated user instance * @return the updated user instance, or null if no matching user was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract User doUpdateUser(User user) throws AuthorizationAccessException; /** * Deletes the given user. * * @param user the user to delete * @return the user that was deleted, or null if no matching user was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract User deleteUser(User user) throws AuthorizationAccessException; /** * Retrieves all users. * * @return a list of users * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Set<User> getUsers() throws AuthorizationAccessException; /** * Adds the given policy ensuring that multiple policies can not be added for the same resource and action. * * @param accessPolicy the policy to add * @return the policy that was added * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public final synchronized AccessPolicy addAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException { return doAddAccessPolicy(accessPolicy); } /** * Adds the given policy. * * @param accessPolicy the policy to add * @return the policy that was added * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ protected abstract AccessPolicy doAddAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException; /** * Retrieves the policy with the given identifier. * * @param identifier the id of the policy to retrieve * @return the policy with the given id, or null if no matching policy exists * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract AccessPolicy getAccessPolicy(String identifier) throws AuthorizationAccessException; /** * The policy represented by the provided instance will be updated based on the provided instance. * * @param accessPolicy an updated policy * @return the updated policy, or null if no matching policy was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract AccessPolicy updateAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException; /** * Deletes the given policy. * * @param policy the policy to delete * @return the deleted policy, or null if no matching policy was found * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract AccessPolicy deleteAccessPolicy(AccessPolicy policy) throws AuthorizationAccessException; /** * Retrieves all access policies. * * @return a list of policies * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract Set<AccessPolicy> getAccessPolicies() throws AuthorizationAccessException; /** * Returns the UserAccessPolicies instance. * * @return the UserAccessPolicies instance * @throws AuthorizationAccessException if there was an unexpected error performing the operation */ public abstract UsersAndAccessPolicies getUsersAndAccessPolicies() throws AuthorizationAccessException; /** * Returns whether the proposed fingerprint is inheritable. * * @param proposedFingerprint the proposed fingerprint * @throws AuthorizationAccessException if there was an unexpected error performing the operation * @throws UninheritableAuthorizationsException if the proposed fingerprint was uninheritable */ @Override public final void checkInheritability(String proposedFingerprint) throws AuthorizationAccessException, UninheritableAuthorizationsException { final PoliciesUsersAndGroups policiesUsersAndGroups; try { // ensure we understand the proposed fingerprint policiesUsersAndGroups = parsePoliciesUsersAndGroups(proposedFingerprint); } catch (final AuthorizationAccessException e) { throw new UninheritableAuthorizationsException("Unable to parse proposed fingerprint: " + e); } if (!isInheritable(policiesUsersAndGroups)) { throw new UninheritableAuthorizationsException("Proposed fingerprint is not inheritable because the current Authorizations is not empty."); } } private boolean isInheritable(final PoliciesUsersAndGroups policiesUsersAndGroups) { return getUsers().isEmpty() && getGroups().isEmpty() && getAccessPolicies().isEmpty(); } /** * Parses the fingerprint and adds any users, groups, and policies to the current Authorizer. * * @param fingerprint the fingerprint that was obtained from calling getFingerprint() on another Authorizer. */ @Override public final void inheritFingerprint(final String fingerprint) throws AuthorizationAccessException { if (fingerprint == null || fingerprint.isBlank()) { return; } final PoliciesUsersAndGroups policiesUsersAndGroups = parsePoliciesUsersAndGroups(fingerprint); inheritPoliciesUsersAndGroups(policiesUsersAndGroups); } private void inheritPoliciesUsersAndGroups(final PoliciesUsersAndGroups policiesUsersAndGroups) { addPoliciesUsersAndGroups(policiesUsersAndGroups); } private void addPoliciesUsersAndGroups(final PoliciesUsersAndGroups policiesUsersAndGroups) { policiesUsersAndGroups.getUsers().forEach(this::addUser); policiesUsersAndGroups.getGroups().forEach(this::addGroup); policiesUsersAndGroups.getAccessPolicies().forEach(this::addAccessPolicy); } @Override public void forciblyInheritFingerprint(final String fingerprint) throws AuthorizationAccessException { if (fingerprint == null || fingerprint.isBlank()) { logger.info("Inheriting Empty Policies, Users & Groups. Will backup existing Policies, Users & Groups first."); backupPoliciesUsersAndGroups(); purgePoliciesUsersAndGroups(); return; } final PoliciesUsersAndGroups policiesUsersAndGroups = parsePoliciesUsersAndGroups(fingerprint); if (isInheritable(policiesUsersAndGroups)) { logger.debug("Inheriting Policies, Users & Groups"); inheritPoliciesUsersAndGroups(policiesUsersAndGroups); } else { logger.info("Cannot directly inherit Policies, Users & Groups. Will backup existing Policies, Users & Groups, and then replace with proposed configuration"); backupPoliciesUsersAndGroups(); purgePoliciesUsersAndGroups(); addPoliciesUsersAndGroups(policiesUsersAndGroups); } } private PoliciesUsersAndGroups parsePoliciesUsersAndGroups(final String fingerprint) { final List<AccessPolicy> accessPolicies = new ArrayList<>(); final List<User> users = new ArrayList<>(); final List<Group> groups = new ArrayList<>(); final byte[] fingerprintBytes = fingerprint.getBytes(StandardCharsets.UTF_8); try (final ByteArrayInputStream in = new ByteArrayInputStream(fingerprintBytes)) { final Document document = parseFingerprint(in); final Element rootElement = document.getDocumentElement(); // parse all the users and add them to the current authorizer NodeList userNodes = rootElement.getElementsByTagName(USER_ELEMENT); for (int i = 0; i < userNodes.getLength(); i++) { Node userNode = userNodes.item(i); users.add(parseUser((Element) userNode)); } // parse all the groups and add them to the current authorizer NodeList groupNodes = rootElement.getElementsByTagName(GROUP_ELEMENT); for (int i = 0; i < groupNodes.getLength(); i++) { Node groupNode = groupNodes.item(i); groups.add(parseGroup((Element) groupNode)); } // parse all the policies and add them to the current authorizer NodeList policyNodes = rootElement.getElementsByTagName(POLICY_ELEMENT); for (int i = 0; i < policyNodes.getLength(); i++) { Node policyNode = policyNodes.item(i); accessPolicies.add(parsePolicy((Element) policyNode)); } } catch (final IOException e) { throw new AuthorizationAccessException("Unable to parse fingerprint", e); } return new PoliciesUsersAndGroups(accessPolicies, users, groups); } private Document parseFingerprint(final InputStream inputStream) throws IOException { final DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); docFactory.setSchema(null); docFactory.setNamespaceAware(true); // Disable DTDs and external entities to protect against XXE docFactory.setAttribute(DISALLOW_DOCTYPES, true); docFactory.setAttribute(ALLOW_EXTERNAL_DTD, false); docFactory.setAttribute(ALLOW_EXTERNAL_GENERAL_ENTITIES, false); docFactory.setAttribute(ALLOW_EXTERNAL_PARAM_ENTITIES, false); docFactory.setXIncludeAware(false); docFactory.setExpandEntityReferences(false); try { docFactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true); final DocumentBuilder documentBuilder = docFactory.newDocumentBuilder(); return documentBuilder.parse(inputStream); } catch (final ParserConfigurationException | SAXException e) { throw new IOException("Fingerprint parsing failed", e); } } private User parseUser(final Element element) { final User.Builder builder = new User.Builder() .identifier(element.getAttribute(IDENTIFIER_ATTR)) .identity(element.getAttribute(IDENTITY_ATTR)); return builder.build(); } private Group parseGroup(final Element element) { final Group.Builder builder = new Group.Builder() .identifier(element.getAttribute(IDENTIFIER_ATTR)) .name(element.getAttribute(NAME_ATTR)); NodeList groupUsers = element.getElementsByTagName(GROUP_USER_ELEMENT); for (int i = 0; i < groupUsers.getLength(); i++) { Element groupUserNode = (Element) groupUsers.item(i); builder.addUser(groupUserNode.getAttribute(IDENTIFIER_ATTR)); } return builder.build(); } private AccessPolicy parsePolicy(final Element element) { final AccessPolicy.Builder builder = new AccessPolicy.Builder() .identifier(element.getAttribute(IDENTIFIER_ATTR)) .resource(element.getAttribute(RESOURCE_ATTR)); final String actions = element.getAttribute(ACTIONS_ATTR); if (actions.equals(RequestAction.READ.name())) { builder.action(RequestAction.READ); } else if (actions.equals(RequestAction.WRITE.name())) { builder.action(RequestAction.WRITE); } else { throw new IllegalStateException("Unknown Policy Action: " + actions); } NodeList policyUsers = element.getElementsByTagName(POLICY_USER_ELEMENT); for (int i = 0; i < policyUsers.getLength(); i++) { Element policyUserNode = (Element) policyUsers.item(i); builder.addUser(policyUserNode.getAttribute(IDENTIFIER_ATTR)); } NodeList policyGroups = element.getElementsByTagName(POLICY_GROUP_ELEMENT); for (int i = 0; i < policyGroups.getLength(); i++) { Element policyGroupNode = (Element) policyGroups.item(i); builder.addGroup(policyGroupNode.getAttribute(IDENTIFIER_ATTR)); } return builder.build(); } @Override public final AccessPolicyProvider getAccessPolicyProvider() { return new ConfigurableAccessPolicyProvider() { @Override public Set<AccessPolicy> getAccessPolicies() throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getAccessPolicies(); } @Override public AccessPolicy getAccessPolicy(String identifier) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getAccessPolicy(identifier); } @Override public AccessPolicy addAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.addAccessPolicy(accessPolicy); } @Override public AccessPolicy updateAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.updateAccessPolicy(accessPolicy); } @Override public AccessPolicy deleteAccessPolicy(AccessPolicy accessPolicy) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.deleteAccessPolicy(accessPolicy); } @Override public AccessPolicy getAccessPolicy(String resourceIdentifier, RequestAction action) throws AuthorizationAccessException { final UsersAndAccessPolicies usersAndAccessPolicies = AbstractPolicyBasedAuthorizer.this.getUsersAndAccessPolicies(); return usersAndAccessPolicies.getAccessPolicy(resourceIdentifier, action); } @Override public String getFingerprint() throws AuthorizationAccessException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void inheritFingerprint(String fingerprint) throws AuthorizationAccessException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void forciblyInheritFingerprint(final String fingerprint) throws AuthorizationAccessException { throw new UnsupportedOperationException(); } @Override public void checkInheritability(String proposedFingerprint) throws AuthorizationAccessException, UninheritableAuthorizationsException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public UserGroupProvider getUserGroupProvider() { return new ConfigurableUserGroupProvider() { @Override public User addUser(User user) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.addUser(user); } @Override public User updateUser(User user) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.updateUser(user); } @Override public User deleteUser(User user) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.deleteUser(user); } @Override public Group addGroup(Group group) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.addGroup(group); } @Override public Group updateGroup(Group group) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.updateGroup(group); } @Override public Group deleteGroup(Group group) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.deleteGroup(group); } @Override public Set<User> getUsers() throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getUsers(); } @Override public User getUser(String identifier) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getUser(identifier); } @Override public User getUserByIdentity(String identity) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getUserByIdentity(identity); } @Override public Set<Group> getGroups() throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getGroups(); } @Override public Group getGroup(String identifier) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getGroup(identifier); } @Override public Group getGroupByName(String name) throws AuthorizationAccessException { return AbstractPolicyBasedAuthorizer.this.getGroupByName(name); } @Override public UserAndGroups getUserAndGroups(String identity) throws AuthorizationAccessException { final UsersAndAccessPolicies usersAndAccessPolicies = AbstractPolicyBasedAuthorizer.this.getUsersAndAccessPolicies(); final User user = usersAndAccessPolicies.getUser(identity); final Set<Group> groups = usersAndAccessPolicies.getGroups(identity); return new UserAndGroups() { @Override public User getUser() { return user; } @Override public Set<Group> getGroups() { return groups; } }; } @Override public String getFingerprint() throws AuthorizationAccessException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void inheritFingerprint(String fingerprint) throws AuthorizationAccessException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void forciblyInheritFingerprint(final String fingerprint) throws AuthorizationAccessException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void checkInheritability(String proposedFingerprint) throws AuthorizationAccessException, UninheritableAuthorizationsException { // fingerprint is managed by the encapsulating class throw new UnsupportedOperationException(); } @Override public void initialize(UserGroupProviderInitializationContext initializationContext) throws AuthorizerCreationException { } @Override public void onConfigured(AuthorizerConfigurationContext configurationContext) throws AuthorizerCreationException { } @Override public void preDestruction() throws AuthorizerDestructionException { } }; } @Override public void initialize(AccessPolicyProviderInitializationContext initializationContext) throws AuthorizerCreationException { } @Override public void onConfigured(AuthorizerConfigurationContext configurationContext) throws AuthorizerCreationException { } @Override public void preDestruction() throws AuthorizerDestructionException { } }; } /** * Returns a fingerprint representing the authorizations managed by this authorizer. The fingerprint will be * used for comparison to determine if two policy-based authorizers represent a compatible set of users, * groups, and policies. * * @return the fingerprint for this Authorizer */ @Override public final String getFingerprint() throws AuthorizationAccessException { final List<User> users = getSortedUsers(); final List<Group> groups = getSortedGroups(); final List<AccessPolicy> policies = getSortedAccessPolicies(); XMLStreamWriter writer = null; final StringWriter out = new StringWriter(); try { writer = XML_OUTPUT_FACTORY.createXMLStreamWriter(out); writer.writeStartDocument(); writer.writeStartElement("authorizations"); for (User user : users) { writeUser(writer, user); } for (Group group : groups) { writeGroup(writer, group); } for (AccessPolicy policy : policies) { writePolicy(writer, policy); } writer.writeEndElement(); writer.writeEndDocument(); writer.flush(); } catch (XMLStreamException e) { throw new AuthorizationAccessException("Unable to generate fingerprint", e); } finally { if (writer != null) { try { writer.close(); } catch (XMLStreamException ignored) { // nothing to do here } } } return out.toString(); } private void writeUser(final XMLStreamWriter writer, final User user) throws XMLStreamException { writer.writeStartElement(USER_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, user.getIdentifier()); writer.writeAttribute(IDENTITY_ATTR, user.getIdentity()); writer.writeEndElement(); } private void writeGroup(final XMLStreamWriter writer, final Group group) throws XMLStreamException { List<String> users = new ArrayList<>(group.getUsers()); Collections.sort(users); writer.writeStartElement(GROUP_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, group.getIdentifier()); writer.writeAttribute(NAME_ATTR, group.getName()); for (String user : users) { writer.writeStartElement(GROUP_USER_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, user); writer.writeEndElement(); } writer.writeEndElement(); } private void writePolicy(final XMLStreamWriter writer, final AccessPolicy policy) throws XMLStreamException { // sort the users for the policy List<String> policyUsers = new ArrayList<>(policy.getUsers()); Collections.sort(policyUsers); // sort the groups for this policy List<String> policyGroups = new ArrayList<>(policy.getGroups()); Collections.sort(policyGroups); writer.writeStartElement(POLICY_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, policy.getIdentifier()); writer.writeAttribute(RESOURCE_ATTR, policy.getResource()); writer.writeAttribute(ACTIONS_ATTR, policy.getAction().name()); for (String policyUser : policyUsers) { writer.writeStartElement(POLICY_USER_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, policyUser); writer.writeEndElement(); } for (String policyGroup : policyGroups) { writer.writeStartElement(POLICY_GROUP_ELEMENT); writer.writeAttribute(IDENTIFIER_ATTR, policyGroup); writer.writeEndElement(); } writer.writeEndElement(); } private List<AccessPolicy> getSortedAccessPolicies() { final List<AccessPolicy> policies = new ArrayList<>(getAccessPolicies()); policies.sort(Comparator.comparing(AccessPolicy::getIdentifier)); return policies; } private List<Group> getSortedGroups() { final List<Group> groups = new ArrayList<>(getGroups()); groups.sort(Comparator.comparing(Group::getIdentifier)); return groups; } private List<User> getSortedUsers() { final List<User> users = new ArrayList<>(getUsers()); users.sort(Comparator.comparing(User::getIdentifier)); return users; } private static class PoliciesUsersAndGroups { final List<AccessPolicy> accessPolicies; final List<User> users; final List<Group> groups; public PoliciesUsersAndGroups(List<AccessPolicy> accessPolicies, List<User> users, List<Group> groups) { this.accessPolicies = accessPolicies; this.users = users; this.groups = groups; } public List<AccessPolicy> getAccessPolicies() { return accessPolicies; } public List<User> getUsers() { return users; } public List<Group> getGroups() { return groups; } } }
googleapis/google-cloud-java
37,166
java-grafeas/src/main/java/io/grafeas/v1/CloudRepoSourceContext.java
/* * Copyright 2025 The Grafeas Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: grafeas/v1/provenance.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1; /** * * * <pre> * A CloudRepoSourceContext denotes a particular revision in a Google Cloud * Source Repo. * </pre> * * Protobuf type {@code grafeas.v1.CloudRepoSourceContext} */ public final class CloudRepoSourceContext extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1.CloudRepoSourceContext) CloudRepoSourceContextOrBuilder { private static final long serialVersionUID = 0L; // Use CloudRepoSourceContext.newBuilder() to construct. private CloudRepoSourceContext(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CloudRepoSourceContext() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CloudRepoSourceContext(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_CloudRepoSourceContext_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.Provenance .internal_static_grafeas_v1_CloudRepoSourceContext_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.CloudRepoSourceContext.class, io.grafeas.v1.CloudRepoSourceContext.Builder.class); } private int bitField0_; private int revisionCase_ = 0; @SuppressWarnings("serial") private java.lang.Object revision_; public enum RevisionCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { REVISION_ID(2), ALIAS_CONTEXT(3), REVISION_NOT_SET(0); private final int value; private RevisionCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RevisionCase valueOf(int value) { return forNumber(value); } public static RevisionCase forNumber(int value) { switch (value) { case 2: return REVISION_ID; case 3: return ALIAS_CONTEXT; case 0: return REVISION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public RevisionCase getRevisionCase() { return RevisionCase.forNumber(revisionCase_); } public static final int REPO_ID_FIELD_NUMBER = 1; private io.grafeas.v1.RepoId repoId_; /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> * * @return Whether the repoId field is set. */ @java.lang.Override public boolean hasRepoId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> * * @return The repoId. */ @java.lang.Override public io.grafeas.v1.RepoId getRepoId() { return repoId_ == null ? io.grafeas.v1.RepoId.getDefaultInstance() : repoId_; } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ @java.lang.Override public io.grafeas.v1.RepoIdOrBuilder getRepoIdOrBuilder() { return repoId_ == null ? io.grafeas.v1.RepoId.getDefaultInstance() : repoId_; } public static final int REVISION_ID_FIELD_NUMBER = 2; /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return Whether the revisionId field is set. */ public boolean hasRevisionId() { return revisionCase_ == 2; } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return The revisionId. */ public java.lang.String getRevisionId() { java.lang.Object ref = ""; if (revisionCase_ == 2) { ref = revision_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (revisionCase_ == 2) { revision_ = s; } return s; } } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return The bytes for revisionId. */ public com.google.protobuf.ByteString getRevisionIdBytes() { java.lang.Object ref = ""; if (revisionCase_ == 2) { ref = revision_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (revisionCase_ == 2) { revision_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ALIAS_CONTEXT_FIELD_NUMBER = 3; /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> * * @return Whether the aliasContext field is set. */ @java.lang.Override public boolean hasAliasContext() { return revisionCase_ == 3; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> * * @return The aliasContext. */ @java.lang.Override public io.grafeas.v1.AliasContext getAliasContext() { if (revisionCase_ == 3) { return (io.grafeas.v1.AliasContext) revision_; } return io.grafeas.v1.AliasContext.getDefaultInstance(); } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ @java.lang.Override public io.grafeas.v1.AliasContextOrBuilder getAliasContextOrBuilder() { if (revisionCase_ == 3) { return (io.grafeas.v1.AliasContext) revision_; } return io.grafeas.v1.AliasContext.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getRepoId()); } if (revisionCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, revision_); } if (revisionCase_ == 3) { output.writeMessage(3, (io.grafeas.v1.AliasContext) revision_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRepoId()); } if (revisionCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, revision_); } if (revisionCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 3, (io.grafeas.v1.AliasContext) revision_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1.CloudRepoSourceContext)) { return super.equals(obj); } io.grafeas.v1.CloudRepoSourceContext other = (io.grafeas.v1.CloudRepoSourceContext) obj; if (hasRepoId() != other.hasRepoId()) return false; if (hasRepoId()) { if (!getRepoId().equals(other.getRepoId())) return false; } if (!getRevisionCase().equals(other.getRevisionCase())) return false; switch (revisionCase_) { case 2: if (!getRevisionId().equals(other.getRevisionId())) return false; break; case 3: if (!getAliasContext().equals(other.getAliasContext())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasRepoId()) { hash = (37 * hash) + REPO_ID_FIELD_NUMBER; hash = (53 * hash) + getRepoId().hashCode(); } switch (revisionCase_) { case 2: hash = (37 * hash) + REVISION_ID_FIELD_NUMBER; hash = (53 * hash) + getRevisionId().hashCode(); break; case 3: hash = (37 * hash) + ALIAS_CONTEXT_FIELD_NUMBER; hash = (53 * hash) + getAliasContext().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1.CloudRepoSourceContext parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.CloudRepoSourceContext parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1.CloudRepoSourceContext parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.CloudRepoSourceContext parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1.CloudRepoSourceContext prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A CloudRepoSourceContext denotes a particular revision in a Google Cloud * Source Repo. * </pre> * * Protobuf type {@code grafeas.v1.CloudRepoSourceContext} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1.CloudRepoSourceContext) io.grafeas.v1.CloudRepoSourceContextOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_CloudRepoSourceContext_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.Provenance .internal_static_grafeas_v1_CloudRepoSourceContext_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.CloudRepoSourceContext.class, io.grafeas.v1.CloudRepoSourceContext.Builder.class); } // Construct using io.grafeas.v1.CloudRepoSourceContext.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getRepoIdFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; repoId_ = null; if (repoIdBuilder_ != null) { repoIdBuilder_.dispose(); repoIdBuilder_ = null; } if (aliasContextBuilder_ != null) { aliasContextBuilder_.clear(); } revisionCase_ = 0; revision_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1.Provenance.internal_static_grafeas_v1_CloudRepoSourceContext_descriptor; } @java.lang.Override public io.grafeas.v1.CloudRepoSourceContext getDefaultInstanceForType() { return io.grafeas.v1.CloudRepoSourceContext.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1.CloudRepoSourceContext build() { io.grafeas.v1.CloudRepoSourceContext result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1.CloudRepoSourceContext buildPartial() { io.grafeas.v1.CloudRepoSourceContext result = new io.grafeas.v1.CloudRepoSourceContext(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(io.grafeas.v1.CloudRepoSourceContext result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.repoId_ = repoIdBuilder_ == null ? repoId_ : repoIdBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } private void buildPartialOneofs(io.grafeas.v1.CloudRepoSourceContext result) { result.revisionCase_ = revisionCase_; result.revision_ = this.revision_; if (revisionCase_ == 3 && aliasContextBuilder_ != null) { result.revision_ = aliasContextBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1.CloudRepoSourceContext) { return mergeFrom((io.grafeas.v1.CloudRepoSourceContext) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1.CloudRepoSourceContext other) { if (other == io.grafeas.v1.CloudRepoSourceContext.getDefaultInstance()) return this; if (other.hasRepoId()) { mergeRepoId(other.getRepoId()); } switch (other.getRevisionCase()) { case REVISION_ID: { revisionCase_ = 2; revision_ = other.revision_; onChanged(); break; } case ALIAS_CONTEXT: { mergeAliasContext(other.getAliasContext()); break; } case REVISION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getRepoIdFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); revisionCase_ = 2; revision_ = s; break; } // case 18 case 26: { input.readMessage(getAliasContextFieldBuilder().getBuilder(), extensionRegistry); revisionCase_ = 3; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int revisionCase_ = 0; private java.lang.Object revision_; public RevisionCase getRevisionCase() { return RevisionCase.forNumber(revisionCase_); } public Builder clearRevision() { revisionCase_ = 0; revision_ = null; onChanged(); return this; } private int bitField0_; private io.grafeas.v1.RepoId repoId_; private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.RepoId, io.grafeas.v1.RepoId.Builder, io.grafeas.v1.RepoIdOrBuilder> repoIdBuilder_; /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> * * @return Whether the repoId field is set. */ public boolean hasRepoId() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> * * @return The repoId. */ public io.grafeas.v1.RepoId getRepoId() { if (repoIdBuilder_ == null) { return repoId_ == null ? io.grafeas.v1.RepoId.getDefaultInstance() : repoId_; } else { return repoIdBuilder_.getMessage(); } } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public Builder setRepoId(io.grafeas.v1.RepoId value) { if (repoIdBuilder_ == null) { if (value == null) { throw new NullPointerException(); } repoId_ = value; } else { repoIdBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public Builder setRepoId(io.grafeas.v1.RepoId.Builder builderForValue) { if (repoIdBuilder_ == null) { repoId_ = builderForValue.build(); } else { repoIdBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public Builder mergeRepoId(io.grafeas.v1.RepoId value) { if (repoIdBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && repoId_ != null && repoId_ != io.grafeas.v1.RepoId.getDefaultInstance()) { getRepoIdBuilder().mergeFrom(value); } else { repoId_ = value; } } else { repoIdBuilder_.mergeFrom(value); } if (repoId_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public Builder clearRepoId() { bitField0_ = (bitField0_ & ~0x00000001); repoId_ = null; if (repoIdBuilder_ != null) { repoIdBuilder_.dispose(); repoIdBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public io.grafeas.v1.RepoId.Builder getRepoIdBuilder() { bitField0_ |= 0x00000001; onChanged(); return getRepoIdFieldBuilder().getBuilder(); } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ public io.grafeas.v1.RepoIdOrBuilder getRepoIdOrBuilder() { if (repoIdBuilder_ != null) { return repoIdBuilder_.getMessageOrBuilder(); } else { return repoId_ == null ? io.grafeas.v1.RepoId.getDefaultInstance() : repoId_; } } /** * * * <pre> * The ID of the repo. * </pre> * * <code>.grafeas.v1.RepoId repo_id = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.RepoId, io.grafeas.v1.RepoId.Builder, io.grafeas.v1.RepoIdOrBuilder> getRepoIdFieldBuilder() { if (repoIdBuilder_ == null) { repoIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.RepoId, io.grafeas.v1.RepoId.Builder, io.grafeas.v1.RepoIdOrBuilder>( getRepoId(), getParentForChildren(), isClean()); repoId_ = null; } return repoIdBuilder_; } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return Whether the revisionId field is set. */ @java.lang.Override public boolean hasRevisionId() { return revisionCase_ == 2; } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return The revisionId. */ @java.lang.Override public java.lang.String getRevisionId() { java.lang.Object ref = ""; if (revisionCase_ == 2) { ref = revision_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (revisionCase_ == 2) { revision_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return The bytes for revisionId. */ @java.lang.Override public com.google.protobuf.ByteString getRevisionIdBytes() { java.lang.Object ref = ""; if (revisionCase_ == 2) { ref = revision_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (revisionCase_ == 2) { revision_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @param value The revisionId to set. * @return This builder for chaining. */ public Builder setRevisionId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } revisionCase_ = 2; revision_ = value; onChanged(); return this; } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @return This builder for chaining. */ public Builder clearRevisionId() { if (revisionCase_ == 2) { revisionCase_ = 0; revision_ = null; onChanged(); } return this; } /** * * * <pre> * A revision ID. * </pre> * * <code>string revision_id = 2;</code> * * @param value The bytes for revisionId to set. * @return This builder for chaining. */ public Builder setRevisionIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); revisionCase_ = 2; revision_ = value; onChanged(); return this; } private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.AliasContext, io.grafeas.v1.AliasContext.Builder, io.grafeas.v1.AliasContextOrBuilder> aliasContextBuilder_; /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> * * @return Whether the aliasContext field is set. */ @java.lang.Override public boolean hasAliasContext() { return revisionCase_ == 3; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> * * @return The aliasContext. */ @java.lang.Override public io.grafeas.v1.AliasContext getAliasContext() { if (aliasContextBuilder_ == null) { if (revisionCase_ == 3) { return (io.grafeas.v1.AliasContext) revision_; } return io.grafeas.v1.AliasContext.getDefaultInstance(); } else { if (revisionCase_ == 3) { return aliasContextBuilder_.getMessage(); } return io.grafeas.v1.AliasContext.getDefaultInstance(); } } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ public Builder setAliasContext(io.grafeas.v1.AliasContext value) { if (aliasContextBuilder_ == null) { if (value == null) { throw new NullPointerException(); } revision_ = value; onChanged(); } else { aliasContextBuilder_.setMessage(value); } revisionCase_ = 3; return this; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ public Builder setAliasContext(io.grafeas.v1.AliasContext.Builder builderForValue) { if (aliasContextBuilder_ == null) { revision_ = builderForValue.build(); onChanged(); } else { aliasContextBuilder_.setMessage(builderForValue.build()); } revisionCase_ = 3; return this; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ public Builder mergeAliasContext(io.grafeas.v1.AliasContext value) { if (aliasContextBuilder_ == null) { if (revisionCase_ == 3 && revision_ != io.grafeas.v1.AliasContext.getDefaultInstance()) { revision_ = io.grafeas.v1.AliasContext.newBuilder((io.grafeas.v1.AliasContext) revision_) .mergeFrom(value) .buildPartial(); } else { revision_ = value; } onChanged(); } else { if (revisionCase_ == 3) { aliasContextBuilder_.mergeFrom(value); } else { aliasContextBuilder_.setMessage(value); } } revisionCase_ = 3; return this; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ public Builder clearAliasContext() { if (aliasContextBuilder_ == null) { if (revisionCase_ == 3) { revisionCase_ = 0; revision_ = null; onChanged(); } } else { if (revisionCase_ == 3) { revisionCase_ = 0; revision_ = null; } aliasContextBuilder_.clear(); } return this; } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ public io.grafeas.v1.AliasContext.Builder getAliasContextBuilder() { return getAliasContextFieldBuilder().getBuilder(); } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ @java.lang.Override public io.grafeas.v1.AliasContextOrBuilder getAliasContextOrBuilder() { if ((revisionCase_ == 3) && (aliasContextBuilder_ != null)) { return aliasContextBuilder_.getMessageOrBuilder(); } else { if (revisionCase_ == 3) { return (io.grafeas.v1.AliasContext) revision_; } return io.grafeas.v1.AliasContext.getDefaultInstance(); } } /** * * * <pre> * An alias, which may be a branch or tag. * </pre> * * <code>.grafeas.v1.AliasContext alias_context = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.AliasContext, io.grafeas.v1.AliasContext.Builder, io.grafeas.v1.AliasContextOrBuilder> getAliasContextFieldBuilder() { if (aliasContextBuilder_ == null) { if (!(revisionCase_ == 3)) { revision_ = io.grafeas.v1.AliasContext.getDefaultInstance(); } aliasContextBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.AliasContext, io.grafeas.v1.AliasContext.Builder, io.grafeas.v1.AliasContextOrBuilder>( (io.grafeas.v1.AliasContext) revision_, getParentForChildren(), isClean()); revision_ = null; } revisionCase_ = 3; onChanged(); return aliasContextBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1.CloudRepoSourceContext) } // @@protoc_insertion_point(class_scope:grafeas.v1.CloudRepoSourceContext) private static final io.grafeas.v1.CloudRepoSourceContext DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1.CloudRepoSourceContext(); } public static io.grafeas.v1.CloudRepoSourceContext getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CloudRepoSourceContext> PARSER = new com.google.protobuf.AbstractParser<CloudRepoSourceContext>() { @java.lang.Override public CloudRepoSourceContext parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CloudRepoSourceContext> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CloudRepoSourceContext> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1.CloudRepoSourceContext getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,231
java-shopping-merchant-reports/proto-google-shopping-merchant-reports-v1alpha/src/main/java/com/google/shopping/merchant/reports/v1alpha/SearchRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/reports/v1alpha/reports.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.reports.v1alpha; /** * * * <pre> * Request message for the `ReportService.Search` method. * </pre> * * Protobuf type {@code google.shopping.merchant.reports.v1alpha.SearchRequest} */ public final class SearchRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.reports.v1alpha.SearchRequest) SearchRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SearchRequest.newBuilder() to construct. private SearchRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchRequest() { parent_ = ""; query_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.reports.v1alpha.ReportsProto .internal_static_google_shopping_merchant_reports_v1alpha_SearchRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.reports.v1alpha.ReportsProto .internal_static_google_shopping_merchant_reports_v1alpha_SearchRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.reports.v1alpha.SearchRequest.class, com.google.shopping.merchant.reports.v1alpha.SearchRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int QUERY_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object query_ = ""; /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The query. */ @java.lang.Override public java.lang.String getQuery() { java.lang.Object ref = query_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); query_ = s; return s; } } /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for query. */ @java.lang.Override public com.google.protobuf.ByteString getQueryBytes() { java.lang.Object ref = query_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); query_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * Optional. Number of `ReportRows` to retrieve in a single page. Defaults to * 1000. Values above 5000 are coerced to 5000. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, query_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, query_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.reports.v1alpha.SearchRequest)) { return super.equals(obj); } com.google.shopping.merchant.reports.v1alpha.SearchRequest other = (com.google.shopping.merchant.reports.v1alpha.SearchRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getQuery().equals(other.getQuery())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + QUERY_FIELD_NUMBER; hash = (53 * hash) + getQuery().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.reports.v1alpha.SearchRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the `ReportService.Search` method. * </pre> * * Protobuf type {@code google.shopping.merchant.reports.v1alpha.SearchRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.reports.v1alpha.SearchRequest) com.google.shopping.merchant.reports.v1alpha.SearchRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.reports.v1alpha.ReportsProto .internal_static_google_shopping_merchant_reports_v1alpha_SearchRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.reports.v1alpha.ReportsProto .internal_static_google_shopping_merchant_reports_v1alpha_SearchRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.reports.v1alpha.SearchRequest.class, com.google.shopping.merchant.reports.v1alpha.SearchRequest.Builder.class); } // Construct using com.google.shopping.merchant.reports.v1alpha.SearchRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; query_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.reports.v1alpha.ReportsProto .internal_static_google_shopping_merchant_reports_v1alpha_SearchRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.reports.v1alpha.SearchRequest getDefaultInstanceForType() { return com.google.shopping.merchant.reports.v1alpha.SearchRequest.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.reports.v1alpha.SearchRequest build() { com.google.shopping.merchant.reports.v1alpha.SearchRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.reports.v1alpha.SearchRequest buildPartial() { com.google.shopping.merchant.reports.v1alpha.SearchRequest result = new com.google.shopping.merchant.reports.v1alpha.SearchRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.reports.v1alpha.SearchRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.query_ = query_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.reports.v1alpha.SearchRequest) { return mergeFrom((com.google.shopping.merchant.reports.v1alpha.SearchRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.reports.v1alpha.SearchRequest other) { if (other == com.google.shopping.merchant.reports.v1alpha.SearchRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getQuery().isEmpty()) { query_ = other.query_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { query_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Id of the account making the call. Must be a standalone account * or an MCA subaccount. Format: accounts/{account} * </pre> * * <code>string parent = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object query_ = ""; /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The query. */ public java.lang.String getQuery() { java.lang.Object ref = query_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); query_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for query. */ public com.google.protobuf.ByteString getQueryBytes() { java.lang.Object ref = query_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); query_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The query to set. * @return This builder for chaining. */ public Builder setQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } query_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearQuery() { query_ = getDefaultInstance().getQuery(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. Query that defines a report to be retrieved. * * For details on how to construct your query, see the Query Language * guide. For the full list of available tables and fields, see the Available * fields. * </pre> * * <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for query to set. * @return This builder for chaining. */ public Builder setQueryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); query_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Number of `ReportRows` to retrieve in a single page. Defaults to * 1000. Values above 5000 are coerced to 5000. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Number of `ReportRows` to retrieve in a single page. Defaults to * 1000. Values above 5000 are coerced to 5000. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Number of `ReportRows` to retrieve in a single page. Defaults to * 1000. Values above 5000 are coerced to 5000. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Token of the page to retrieve. If not specified, the first page * of results is returned. In order to request the next page of results, the * value obtained from `next_page_token` in the previous response should be * used. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.reports.v1alpha.SearchRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.reports.v1alpha.SearchRequest) private static final com.google.shopping.merchant.reports.v1alpha.SearchRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.reports.v1alpha.SearchRequest(); } public static com.google.shopping.merchant.reports.v1alpha.SearchRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchRequest> PARSER = new com.google.protobuf.AbstractParser<SearchRequest>() { @java.lang.Override public SearchRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.reports.v1alpha.SearchRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,246
java-video-intelligence/proto-google-cloud-video-intelligence-v1/src/main/java/com/google/cloud/videointelligence/v1/ExplicitContentAnnotation.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1; /** * * * <pre> * Explicit content annotation (based on per-frame visual signals only). * If no explicit content has been detected in a frame, no annotations are * present for that frame. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.ExplicitContentAnnotation} */ public final class ExplicitContentAnnotation extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1.ExplicitContentAnnotation) ExplicitContentAnnotationOrBuilder { private static final long serialVersionUID = 0L; // Use ExplicitContentAnnotation.newBuilder() to construct. private ExplicitContentAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExplicitContentAnnotation() { frames_ = java.util.Collections.emptyList(); version_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExplicitContentAnnotation(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_ExplicitContentAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_ExplicitContentAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.class, com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.Builder.class); } public static final int FRAMES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.videointelligence.v1.ExplicitContentFrame> frames_; /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.videointelligence.v1.ExplicitContentFrame> getFramesList() { return frames_; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder> getFramesOrBuilderList() { return frames_; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ @java.lang.Override public int getFramesCount() { return frames_.size(); } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentFrame getFrames(int index) { return frames_.get(index); } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder getFramesOrBuilder( int index) { return frames_.get(index); } public static final int VERSION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The version. */ @java.lang.Override public java.lang.String getVersion() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The bytes for version. */ @java.lang.Override public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < frames_.size(); i++) { output.writeMessage(1, frames_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < frames_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, frames_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1.ExplicitContentAnnotation)) { return super.equals(obj); } com.google.cloud.videointelligence.v1.ExplicitContentAnnotation other = (com.google.cloud.videointelligence.v1.ExplicitContentAnnotation) obj; if (!getFramesList().equals(other.getFramesList())) return false; if (!getVersion().equals(other.getVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getFramesCount() > 0) { hash = (37 * hash) + FRAMES_FIELD_NUMBER; hash = (53 * hash) + getFramesList().hashCode(); } hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Explicit content annotation (based on per-frame visual signals only). * If no explicit content has been detected in a frame, no annotations are * present for that frame. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1.ExplicitContentAnnotation} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1.ExplicitContentAnnotation) com.google.cloud.videointelligence.v1.ExplicitContentAnnotationOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_ExplicitContentAnnotation_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_ExplicitContentAnnotation_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.class, com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.Builder.class); } // Construct using com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (framesBuilder_ == null) { frames_ = java.util.Collections.emptyList(); } else { frames_ = null; framesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); version_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1_ExplicitContentAnnotation_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentAnnotation getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentAnnotation build() { com.google.cloud.videointelligence.v1.ExplicitContentAnnotation result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentAnnotation buildPartial() { com.google.cloud.videointelligence.v1.ExplicitContentAnnotation result = new com.google.cloud.videointelligence.v1.ExplicitContentAnnotation(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation result) { if (framesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { frames_ = java.util.Collections.unmodifiableList(frames_); bitField0_ = (bitField0_ & ~0x00000001); } result.frames_ = frames_; } else { result.frames_ = framesBuilder_.build(); } } private void buildPartial0( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.version_ = version_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1.ExplicitContentAnnotation) { return mergeFrom((com.google.cloud.videointelligence.v1.ExplicitContentAnnotation) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1.ExplicitContentAnnotation other) { if (other == com.google.cloud.videointelligence.v1.ExplicitContentAnnotation.getDefaultInstance()) return this; if (framesBuilder_ == null) { if (!other.frames_.isEmpty()) { if (frames_.isEmpty()) { frames_ = other.frames_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFramesIsMutable(); frames_.addAll(other.frames_); } onChanged(); } } else { if (!other.frames_.isEmpty()) { if (framesBuilder_.isEmpty()) { framesBuilder_.dispose(); framesBuilder_ = null; frames_ = other.frames_; bitField0_ = (bitField0_ & ~0x00000001); framesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFramesFieldBuilder() : null; } else { framesBuilder_.addAllMessages(other.frames_); } } } if (!other.getVersion().isEmpty()) { version_ = other.version_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.videointelligence.v1.ExplicitContentFrame m = input.readMessage( com.google.cloud.videointelligence.v1.ExplicitContentFrame.parser(), extensionRegistry); if (framesBuilder_ == null) { ensureFramesIsMutable(); frames_.add(m); } else { framesBuilder_.addMessage(m); } break; } // case 10 case 18: { version_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.videointelligence.v1.ExplicitContentFrame> frames_ = java.util.Collections.emptyList(); private void ensureFramesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { frames_ = new java.util.ArrayList<com.google.cloud.videointelligence.v1.ExplicitContentFrame>( frames_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.ExplicitContentFrame, com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder, com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder> framesBuilder_; /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.ExplicitContentFrame> getFramesList() { if (framesBuilder_ == null) { return java.util.Collections.unmodifiableList(frames_); } else { return framesBuilder_.getMessageList(); } } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public int getFramesCount() { if (framesBuilder_ == null) { return frames_.size(); } else { return framesBuilder_.getCount(); } } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public com.google.cloud.videointelligence.v1.ExplicitContentFrame getFrames(int index) { if (framesBuilder_ == null) { return frames_.get(index); } else { return framesBuilder_.getMessage(index); } } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder setFrames( int index, com.google.cloud.videointelligence.v1.ExplicitContentFrame value) { if (framesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFramesIsMutable(); frames_.set(index, value); onChanged(); } else { framesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder setFrames( int index, com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder builderForValue) { if (framesBuilder_ == null) { ensureFramesIsMutable(); frames_.set(index, builderForValue.build()); onChanged(); } else { framesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder addFrames(com.google.cloud.videointelligence.v1.ExplicitContentFrame value) { if (framesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFramesIsMutable(); frames_.add(value); onChanged(); } else { framesBuilder_.addMessage(value); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder addFrames( int index, com.google.cloud.videointelligence.v1.ExplicitContentFrame value) { if (framesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFramesIsMutable(); frames_.add(index, value); onChanged(); } else { framesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder addFrames( com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder builderForValue) { if (framesBuilder_ == null) { ensureFramesIsMutable(); frames_.add(builderForValue.build()); onChanged(); } else { framesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder addFrames( int index, com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder builderForValue) { if (framesBuilder_ == null) { ensureFramesIsMutable(); frames_.add(index, builderForValue.build()); onChanged(); } else { framesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder addAllFrames( java.lang.Iterable<? extends com.google.cloud.videointelligence.v1.ExplicitContentFrame> values) { if (framesBuilder_ == null) { ensureFramesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, frames_); onChanged(); } else { framesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder clearFrames() { if (framesBuilder_ == null) { frames_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { framesBuilder_.clear(); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public Builder removeFrames(int index) { if (framesBuilder_ == null) { ensureFramesIsMutable(); frames_.remove(index); onChanged(); } else { framesBuilder_.remove(index); } return this; } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder getFramesBuilder( int index) { return getFramesFieldBuilder().getBuilder(index); } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder getFramesOrBuilder( int index) { if (framesBuilder_ == null) { return frames_.get(index); } else { return framesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public java.util.List< ? extends com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder> getFramesOrBuilderList() { if (framesBuilder_ != null) { return framesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(frames_); } } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder addFramesBuilder() { return getFramesFieldBuilder() .addBuilder( com.google.cloud.videointelligence.v1.ExplicitContentFrame.getDefaultInstance()); } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder addFramesBuilder( int index) { return getFramesFieldBuilder() .addBuilder( index, com.google.cloud.videointelligence.v1.ExplicitContentFrame.getDefaultInstance()); } /** * * * <pre> * All video frames where explicit content was detected. * </pre> * * <code>repeated .google.cloud.videointelligence.v1.ExplicitContentFrame frames = 1;</code> */ public java.util.List<com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder> getFramesBuilderList() { return getFramesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.ExplicitContentFrame, com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder, com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder> getFramesFieldBuilder() { if (framesBuilder_ == null) { framesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.videointelligence.v1.ExplicitContentFrame, com.google.cloud.videointelligence.v1.ExplicitContentFrame.Builder, com.google.cloud.videointelligence.v1.ExplicitContentFrameOrBuilder>( frames_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); frames_ = null; } return framesBuilder_; } private java.lang.Object version_ = ""; /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The version. */ public java.lang.String getVersion() { java.lang.Object ref = version_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); version_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return The bytes for version. */ public com.google.protobuf.ByteString getVersionBytes() { java.lang.Object ref = version_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); version_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @param value The version to set. * @return This builder for chaining. */ public Builder setVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @return This builder for chaining. */ public Builder clearVersion() { version_ = getDefaultInstance().getVersion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Feature version. * </pre> * * <code>string version = 2;</code> * * @param value The bytes for version to set. * @return This builder for chaining. */ public Builder setVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); version_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1.ExplicitContentAnnotation) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ExplicitContentAnnotation) private static final com.google.cloud.videointelligence.v1.ExplicitContentAnnotation DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1.ExplicitContentAnnotation(); } public static com.google.cloud.videointelligence.v1.ExplicitContentAnnotation getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExplicitContentAnnotation> PARSER = new com.google.protobuf.AbstractParser<ExplicitContentAnnotation>() { @java.lang.Override public ExplicitContentAnnotation parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExplicitContentAnnotation> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExplicitContentAnnotation> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1.ExplicitContentAnnotation getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
37,655
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ReachPlanServiceClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v20.services; import com.google.ads.googleads.v20.services.stub.ReachPlanServiceStub; import com.google.ads.googleads.v20.services.stub.ReachPlanServiceStubSettings; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.rpc.UnaryCallable; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Reach Plan Service gives users information about audience size that can be * reached through advertisement on YouTube. In particular, GenerateReachForecast provides estimated * number of people of specified demographics that can be reached by an ad in a given market by a * campaign of certain duration with a defined budget. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * String customerId = "customerId-1581184615"; * GenerateConversionRatesResponse response = * reachPlanServiceClient.generateConversionRates(customerId); * } * }</pre> * * <p>Note: close() needs to be called on the ReachPlanServiceClient object to clean up resources * such as threads. In the example above, try-with-resources is used, which automatically calls * close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> GenerateConversionRates</td> * <td><p> Returns a collection of conversion rate suggestions for supported plannable products. * <p> List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> generateConversionRates(GenerateConversionRatesRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> generateConversionRates(String customerId) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> generateConversionRatesCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListPlannableLocations</td> * <td><p> Returns the list of plannable locations (for example, countries). * <p> List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listPlannableLocations(ListPlannableLocationsRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listPlannableLocationsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListPlannableProducts</td> * <td><p> Returns the list of per-location plannable YouTube ad formats with allowed targeting. * <p> List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() [InternalError]() [QuotaError]() [RequestError]()</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listPlannableProducts(ListPlannableProductsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listPlannableProducts(String plannableLocationId) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listPlannableProductsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> GenerateReachForecast</td> * <td><p> Generates a reach forecast for a given targeting / product mix. * <p> List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() [RequestError]()</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> generateReachForecast(GenerateReachForecastRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> generateReachForecast(String customerId, CampaignDuration campaignDuration, List&lt;PlannedProduct&gt; plannedProducts) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> generateReachForecastCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListPlannableUserLists</td> * <td><p> Returns the list of plannable user lists with their plannable status. User lists may not be plannable for a number of reasons, including: - They are less than 10 days old. - They have a membership lifespan that is less than 30 days - They have less than 10,000 or more than 700,000 users. * <p> List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() [RequestError]()</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listPlannableUserLists(ListPlannableUserListsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listPlannableUserLists(String customerId) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listPlannableUserListsCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of ReachPlanServiceSettings to * create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ReachPlanServiceSettings reachPlanServiceSettings = * ReachPlanServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * ReachPlanServiceClient reachPlanServiceClient = * ReachPlanServiceClient.create(reachPlanServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * ReachPlanServiceSettings reachPlanServiceSettings = * ReachPlanServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * ReachPlanServiceClient reachPlanServiceClient = * ReachPlanServiceClient.create(reachPlanServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class ReachPlanServiceClient implements BackgroundResource { private final ReachPlanServiceSettings settings; private final ReachPlanServiceStub stub; /** Constructs an instance of ReachPlanServiceClient with default settings. */ public static final ReachPlanServiceClient create() throws IOException { return create(ReachPlanServiceSettings.newBuilder().build()); } /** * Constructs an instance of ReachPlanServiceClient, using the given settings. The channels are * created based on the settings passed in, or defaults for any settings that are not set. */ public static final ReachPlanServiceClient create(ReachPlanServiceSettings settings) throws IOException { return new ReachPlanServiceClient(settings); } /** * Constructs an instance of ReachPlanServiceClient, using the given stub for making calls. This * is for advanced usage - prefer using create(ReachPlanServiceSettings). */ public static final ReachPlanServiceClient create(ReachPlanServiceStub stub) { return new ReachPlanServiceClient(stub); } /** * Constructs an instance of ReachPlanServiceClient, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected ReachPlanServiceClient(ReachPlanServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((ReachPlanServiceStubSettings) settings.getStubSettings()).createStub(); } protected ReachPlanServiceClient(ReachPlanServiceStub stub) { this.settings = null; this.stub = stub; } public final ReachPlanServiceSettings getSettings() { return settings; } public ReachPlanServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns a collection of conversion rate suggestions for supported plannable products. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * String customerId = "customerId-1581184615"; * GenerateConversionRatesResponse response = * reachPlanServiceClient.generateConversionRates(customerId); * } * }</pre> * * @param customerId Required. The ID of the customer. A conversion rate based on the historical * data of this customer may be suggested. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final GenerateConversionRatesResponse generateConversionRates(String customerId) { GenerateConversionRatesRequest request = GenerateConversionRatesRequest.newBuilder().setCustomerId(customerId).build(); return generateConversionRates(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns a collection of conversion rate suggestions for supported plannable products. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * GenerateConversionRatesRequest request = * GenerateConversionRatesRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCustomerReachGroup("customerReachGroup123255626") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * GenerateConversionRatesResponse response = * reachPlanServiceClient.generateConversionRates(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final GenerateConversionRatesResponse generateConversionRates( GenerateConversionRatesRequest request) { return generateConversionRatesCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns a collection of conversion rate suggestions for supported plannable products. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * GenerateConversionRatesRequest request = * GenerateConversionRatesRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCustomerReachGroup("customerReachGroup123255626") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ApiFuture<GenerateConversionRatesResponse> future = * reachPlanServiceClient.generateConversionRatesCallable().futureCall(request); * // Do something. * GenerateConversionRatesResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<GenerateConversionRatesRequest, GenerateConversionRatesResponse> generateConversionRatesCallable() { return stub.generateConversionRatesCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of plannable locations (for example, countries). * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableLocationsRequest request = * ListPlannableLocationsRequest.newBuilder() * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ListPlannableLocationsResponse response = * reachPlanServiceClient.listPlannableLocations(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPlannableLocationsResponse listPlannableLocations( ListPlannableLocationsRequest request) { return listPlannableLocationsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of plannable locations (for example, countries). * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableLocationsRequest request = * ListPlannableLocationsRequest.newBuilder() * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ApiFuture<ListPlannableLocationsResponse> future = * reachPlanServiceClient.listPlannableLocationsCallable().futureCall(request); * // Do something. * ListPlannableLocationsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<ListPlannableLocationsRequest, ListPlannableLocationsResponse> listPlannableLocationsCallable() { return stub.listPlannableLocationsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of per-location plannable YouTube ad formats with allowed targeting. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * String plannableLocationId = "plannableLocationId-1266753969"; * ListPlannableProductsResponse response = * reachPlanServiceClient.listPlannableProducts(plannableLocationId); * } * }</pre> * * @param plannableLocationId Required. The ID of the selected location for planning. To list the * available plannable location IDs use * [ReachPlanService.ListPlannableLocations][google.ads.googleads.v20.services.ReachPlanService.ListPlannableLocations]. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPlannableProductsResponse listPlannableProducts(String plannableLocationId) { ListPlannableProductsRequest request = ListPlannableProductsRequest.newBuilder() .setPlannableLocationId(plannableLocationId) .build(); return listPlannableProducts(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of per-location plannable YouTube ad formats with allowed targeting. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableProductsRequest request = * ListPlannableProductsRequest.newBuilder() * .setPlannableLocationId("plannableLocationId-1266753969") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ListPlannableProductsResponse response = * reachPlanServiceClient.listPlannableProducts(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPlannableProductsResponse listPlannableProducts( ListPlannableProductsRequest request) { return listPlannableProductsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of per-location plannable YouTube ad formats with allowed targeting. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [HeaderError]() * [InternalError]() [QuotaError]() [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableProductsRequest request = * ListPlannableProductsRequest.newBuilder() * .setPlannableLocationId("plannableLocationId-1266753969") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ApiFuture<ListPlannableProductsResponse> future = * reachPlanServiceClient.listPlannableProductsCallable().futureCall(request); * // Do something. * ListPlannableProductsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<ListPlannableProductsRequest, ListPlannableProductsResponse> listPlannableProductsCallable() { return stub.listPlannableProductsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Generates a reach forecast for a given targeting / product mix. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * String customerId = "customerId-1581184615"; * CampaignDuration campaignDuration = CampaignDuration.newBuilder().build(); * List<PlannedProduct> plannedProducts = new ArrayList<>(); * GenerateReachForecastResponse response = * reachPlanServiceClient.generateReachForecast( * customerId, campaignDuration, plannedProducts); * } * }</pre> * * @param customerId Required. The ID of the customer. * @param campaignDuration Required. Campaign duration. * @param plannedProducts Required. The products to be forecast. The max number of allowed planned * products is 15. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final GenerateReachForecastResponse generateReachForecast( String customerId, CampaignDuration campaignDuration, List<PlannedProduct> plannedProducts) { GenerateReachForecastRequest request = GenerateReachForecastRequest.newBuilder() .setCustomerId(customerId) .setCampaignDuration(campaignDuration) .addAllPlannedProducts(plannedProducts) .build(); return generateReachForecast(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Generates a reach forecast for a given targeting / product mix. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * GenerateReachForecastRequest request = * GenerateReachForecastRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCurrencyCode("currencyCode1004773790") * .setCampaignDuration(CampaignDuration.newBuilder().build()) * .setCookieFrequencyCap(-1372585004) * .setCookieFrequencyCapSetting(FrequencyCap.newBuilder().build()) * .setMinEffectiveFrequency(-1801725097) * .setEffectiveFrequencyLimit(EffectiveFrequencyLimit.newBuilder().build()) * .setTargeting(Targeting.newBuilder().build()) * .addAllPlannedProducts(new ArrayList<PlannedProduct>()) * .setForecastMetricOptions(ForecastMetricOptions.newBuilder().build()) * .setCustomerReachGroup("customerReachGroup123255626") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * GenerateReachForecastResponse response = * reachPlanServiceClient.generateReachForecast(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final GenerateReachForecastResponse generateReachForecast( GenerateReachForecastRequest request) { return generateReachForecastCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Generates a reach forecast for a given targeting / product mix. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * GenerateReachForecastRequest request = * GenerateReachForecastRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCurrencyCode("currencyCode1004773790") * .setCampaignDuration(CampaignDuration.newBuilder().build()) * .setCookieFrequencyCap(-1372585004) * .setCookieFrequencyCapSetting(FrequencyCap.newBuilder().build()) * .setMinEffectiveFrequency(-1801725097) * .setEffectiveFrequencyLimit(EffectiveFrequencyLimit.newBuilder().build()) * .setTargeting(Targeting.newBuilder().build()) * .addAllPlannedProducts(new ArrayList<PlannedProduct>()) * .setForecastMetricOptions(ForecastMetricOptions.newBuilder().build()) * .setCustomerReachGroup("customerReachGroup123255626") * .setReachApplicationInfo(AdditionalApplicationInfo.newBuilder().build()) * .build(); * ApiFuture<GenerateReachForecastResponse> future = * reachPlanServiceClient.generateReachForecastCallable().futureCall(request); * // Do something. * GenerateReachForecastResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<GenerateReachForecastRequest, GenerateReachForecastResponse> generateReachForecastCallable() { return stub.generateReachForecastCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of plannable user lists with their plannable status. User lists may not be * plannable for a number of reasons, including: - They are less than 10 days old. - They have a * membership lifespan that is less than 30 days - They have less than 10,000 or more than 700,000 * users. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * String customerId = "customerId-1581184615"; * ListPlannableUserListsResponse response = * reachPlanServiceClient.listPlannableUserLists(customerId); * } * }</pre> * * @param customerId Required. The ID of the customer. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPlannableUserListsResponse listPlannableUserLists(String customerId) { ListPlannableUserListsRequest request = ListPlannableUserListsRequest.newBuilder().setCustomerId(customerId).build(); return listPlannableUserLists(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of plannable user lists with their plannable status. User lists may not be * plannable for a number of reasons, including: - They are less than 10 days old. - They have a * membership lifespan that is less than 30 days - They have less than 10,000 or more than 700,000 * users. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableUserListsRequest request = * ListPlannableUserListsRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCustomerReachGroup("customerReachGroup123255626") * .build(); * ListPlannableUserListsResponse response = * reachPlanServiceClient.listPlannableUserLists(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListPlannableUserListsResponse listPlannableUserLists( ListPlannableUserListsRequest request) { return listPlannableUserListsCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Returns the list of plannable user lists with their plannable status. User lists may not be * plannable for a number of reasons, including: - They are less than 10 days old. - They have a * membership lifespan that is less than 30 days - They have less than 10,000 or more than 700,000 * users. * * <p>List of thrown errors: [AuthenticationError]() [AuthorizationError]() [FieldError]() * [HeaderError]() [InternalError]() [QuotaError]() [RangeError]() [ReachPlanError]() * [RequestError]() * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (ReachPlanServiceClient reachPlanServiceClient = ReachPlanServiceClient.create()) { * ListPlannableUserListsRequest request = * ListPlannableUserListsRequest.newBuilder() * .setCustomerId("customerId-1581184615") * .setCustomerReachGroup("customerReachGroup123255626") * .build(); * ApiFuture<ListPlannableUserListsResponse> future = * reachPlanServiceClient.listPlannableUserListsCallable().futureCall(request); * // Do something. * ListPlannableUserListsResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<ListPlannableUserListsRequest, ListPlannableUserListsResponse> listPlannableUserListsCallable() { return stub.listPlannableUserListsCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } }
google/nomulus
36,566
core/src/test/java/google/registry/tools/UpdateRegistrarCommandTest.java
// Copyright 2017 The Nomulus Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package google.registry.tools; import static com.google.common.truth.Truth.assertThat; import static google.registry.testing.CertificateSamples.SAMPLE_CERT; import static google.registry.testing.CertificateSamples.SAMPLE_CERT3; import static google.registry.testing.CertificateSamples.SAMPLE_CERT3_HASH; import static google.registry.testing.DatabaseHelper.createTlds; import static google.registry.testing.DatabaseHelper.loadRegistrar; import static google.registry.testing.DatabaseHelper.newTld; import static google.registry.testing.DatabaseHelper.persistResource; import static google.registry.util.DateTimeUtils.START_OF_TIME; import static org.joda.money.CurrencyUnit.JPY; import static org.joda.money.CurrencyUnit.USD; import static org.joda.time.DateTimeZone.UTC; import static org.junit.jupiter.api.Assertions.assertThrows; import com.beust.jcommander.ParameterException; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import google.registry.flows.certs.CertificateChecker; import google.registry.flows.certs.CertificateChecker.InsecureCertificateException; import google.registry.model.registrar.Registrar; import google.registry.model.registrar.Registrar.State; import google.registry.model.registrar.Registrar.Type; import google.registry.persistence.transaction.JpaTransactionManagerExtension; import google.registry.util.CidrAddressBlock; import java.math.BigDecimal; import java.util.Optional; import org.joda.money.Money; import org.joda.time.DateTime; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; /** Unit tests for {@link UpdateRegistrarCommand}. */ class UpdateRegistrarCommandTest extends CommandTestCase<UpdateRegistrarCommand> { @BeforeEach void beforeEach() { command.certificateChecker = new CertificateChecker( ImmutableSortedMap.of(START_OF_TIME, 825, DateTime.parse("2020-09-01T00:00:00Z"), 398), 30, 15, 2048, ImmutableSet.of("secp256r1", "secp384r1"), fakeClock); } @Test void testSuccess_password() throws Exception { assertThat(loadRegistrar("NewRegistrar").verifyPassword("some_password")).isFalse(); runCommand("--password=some_password", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").verifyPassword("some_password")).isTrue(); } @Test void testSuccess_registrarType() throws Exception { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setType(Registrar.Type.OTE) .setIanaIdentifier(null) .build()); assertThat(loadRegistrar("NewRegistrar").getType()).isEqualTo(Type.OTE); runCommand("--registrar_type=TEST", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getType()).isEqualTo(Type.TEST); } @Test void testFailure_noPasscodeOnChangeToReal() { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setType(Registrar.Type.OTE) .setIanaIdentifier(null) .setPhonePasscode(null) .build()); IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommand("--registrar_type=REAL", "--iana_id=1000", "--force", "NewRegistrar")); assertThat(thrown).hasMessageThat().contains("--passcode is required for REAL registrars."); } @Test void testSuccess_registrarState() throws Exception { assertThat(loadRegistrar("NewRegistrar").getState()).isEqualTo(State.ACTIVE); runCommand("--registrar_state=SUSPENDED", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getState()).isEqualTo(State.SUSPENDED); } @Test void testSuccess_allowedTlds() throws Exception { persistWhoisAbuseContact(); createTlds("xn--q9jyb4c", "foobar"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setAllowedTlds(ImmutableSet.of("xn--q9jyb4c")) .build()); runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--allowed_tlds=xn--q9jyb4c,foobar", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getAllowedTlds()) .containsExactly("xn--q9jyb4c", "foobar"); } @Test void testSuccess_addAllowedTlds() throws Exception { persistWhoisAbuseContact(); createTlds("xn--q9jyb4c", "foo", "bar"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setAllowedTlds(ImmutableSet.of("xn--q9jyb4c")) .build()); runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--add_allowed_tlds=foo,bar", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getAllowedTlds()) .containsExactly("xn--q9jyb4c", "foo", "bar"); } @Test void testSuccess_addAllowedTldsWithDupes() throws Exception { persistWhoisAbuseContact(); createTlds("xn--q9jyb4c", "foo", "bar"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setAllowedTlds(ImmutableSet.of("xn--q9jyb4c")) .build()); runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--add_allowed_tlds=xn--q9jyb4c,foo,bar", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getAllowedTlds()) .isEqualTo(ImmutableSet.of("xn--q9jyb4c", "foo", "bar")); } @Test void testSuccess_allowedTldsInNonProductionEnvironment() throws Exception { createTlds("xn--q9jyb4c", "foobar"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setAllowedTlds(ImmutableSet.of("xn--q9jyb4c")) .build()); runCommandInEnvironment( RegistryToolEnvironment.SANDBOX, "--allowed_tlds=xn--q9jyb4c,foobar", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getAllowedTlds()) .containsExactly("xn--q9jyb4c", "foobar"); } @Test void testSuccess_allowedTldsInPdtRegistrar() throws Exception { createTlds("xn--q9jyb4c", "foobar"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setType(Type.PDT) .setIanaIdentifier(9995L) .setAllowedTlds(ImmutableSet.of("xn--q9jyb4c")) .build()); runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--allowed_tlds=xn--q9jyb4c,foobar", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getAllowedTlds()) .containsExactly("xn--q9jyb4c", "foobar"); } @Test void testSuccess_ipAllowList() throws Exception { assertThat(loadRegistrar("NewRegistrar").getIpAddressAllowList()).isEmpty(); runCommand("--ip_allow_list=192.168.1.1,192.168.0.2/16", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getIpAddressAllowList()) .containsExactly( CidrAddressBlock.create("192.168.1.1"), CidrAddressBlock.create("192.168.0.2/16")) .inOrder(); } @Test void testSuccess_clearIpAllowList_useEmpty() throws Exception { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setIpAddressAllowList( ImmutableList.of( CidrAddressBlock.create("192.168.1.1"), CidrAddressBlock.create("192.168.0.2/16"))) .build()); assertThat(loadRegistrar("NewRegistrar").getIpAddressAllowList()).isNotEmpty(); runCommand("--ip_allow_list=", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getIpAddressAllowList()).isEmpty(); } @Test void testSuccess_certFile() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getClientCertificate()).isEmpty(); assertThat(registrar.getClientCertificateHash()).isEmpty(); runCommand("--cert_file=" + getCertFilename(SAMPLE_CERT3), "--force", "NewRegistrar"); registrar = loadRegistrar("NewRegistrar"); // NB: Hash was computed manually using 'openssl x509 -fingerprint -sha256 -in ...' and then // converting the result from a hex string to non-padded base64 encoded string. assertThat(registrar.getClientCertificate()).hasValue(SAMPLE_CERT3); assertThat(registrar.getClientCertificateHash()).hasValue(SAMPLE_CERT3_HASH); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); assertThat(registrar.getFailoverClientCertificateHash()).isEmpty(); } @Test void testSuccess_rotatePrimaryCert() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setClientCertificate(SAMPLE_CERT3, fakeClock.nowUtc()) .setFailoverClientCertificate(null, fakeClock.nowUtc()) .build()); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); assertThat(registrar.getFailoverClientCertificateHash()).isEmpty(); runCommand( "--cert_file=" + getCertFilename(SAMPLE_CERT3), "--rotate_primary_cert", "--force", "NewRegistrar"); registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).hasValue(SAMPLE_CERT3); assertThat(registrar.getFailoverClientCertificateHash()).hasValue(SAMPLE_CERT3_HASH); } @Test void test_rotatePrimaryCert_noPrimaryCert() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getClientCertificate()).isEmpty(); assertThat(registrar.getClientCertificateHash()).isEmpty(); VerifyException thrown = assertThrows( VerifyException.class, () -> runCommand( "--cert_file=" + getCertFilename(SAMPLE_CERT3), "--rotate_primary_cert", "--force", "NewRegistrar")); assertThat(thrown) .hasMessageThat() .isEqualTo( "Primary cert is absent. Rotation may remove a failover certificate still in use."); } @Test public void test_rotatePrimaryCert_withoutNewCertFile_throws() { IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommand("--rotate_primary_cert", "--force", "NewRegistrar")); assertThat(thrown) .hasMessageThat() .isEqualTo("--rotate_primary_cert must be used with --cert_file."); } @Test void testFail_certFileWithViolation() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getClientCertificate()).isEmpty(); assertThat(registrar.getClientCertificateHash()).isEmpty(); InsecureCertificateException thrown = assertThrows( InsecureCertificateException.class, () -> runCommand("--cert_file=" + getCertFilename(), "--force", "NewRegistrar")); assertThat(thrown.getMessage()) .isEqualTo( "Certificate validity period is too long; it must be less than or equal to 398" + " days."); assertThat(registrar.getClientCertificate()).isEmpty(); } @Test void testFail_certFileWithMultipleViolations() throws Exception { fakeClock.setTo(DateTime.parse("2055-10-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getClientCertificate()).isEmpty(); assertThat(registrar.getClientCertificateHash()).isEmpty(); InsecureCertificateException thrown = assertThrows( InsecureCertificateException.class, () -> runCommand("--cert_file=" + getCertFilename(), "--force", "NewRegistrar")); assertThat(thrown.getMessage()) .isEqualTo( "Certificate is expired.\nCertificate validity period is too long; it must be less" + " than or equal to 398 days."); assertThat(registrar.getClientCertificate()).isEmpty(); } @Test void testFail_failoverCertFileWithViolation() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); InsecureCertificateException thrown = assertThrows( InsecureCertificateException.class, () -> runCommand("--failover_cert_file=" + getCertFilename(), "--force", "NewRegistrar")); assertThat(thrown.getMessage()) .isEqualTo( "Certificate validity period is too long; it must be less than or equal to 398" + " days."); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); } @Test void testFail_failoverCertFileWithMultipleViolations() throws Exception { fakeClock.setTo(DateTime.parse("2055-10-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); InsecureCertificateException thrown = assertThrows( InsecureCertificateException.class, () -> runCommand("--failover_cert_file=" + getCertFilename(), "--force", "NewRegistrar")); assertThat(thrown.getMessage()) .isEqualTo( "Certificate is expired.\nCertificate validity period is too long; it must be less" + " than or equal to 398 days."); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); } @Test void testSuccess_failoverCertFile() throws Exception { fakeClock.setTo(DateTime.parse("2020-11-01T00:00:00Z")); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).isEmpty(); runCommand("--failover_cert_file=" + getCertFilename(SAMPLE_CERT3), "--force", "NewRegistrar"); registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getFailoverClientCertificate()).hasValue(SAMPLE_CERT3); } @Test void testSuccess_clearCert() throws Exception { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setClientCertificate(SAMPLE_CERT, DateTime.now(UTC)) .build()); assertThat(loadRegistrar("NewRegistrar").getClientCertificate()).isPresent(); runCommand("--cert_file=/dev/null", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getClientCertificate()).isEmpty(); } @Test void testSuccess_ianaId() throws Exception { assertThat(loadRegistrar("NewRegistrar").getIanaIdentifier()).isEqualTo(8); runCommand("--iana_id=12345", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getIanaIdentifier()).isEqualTo(12345); } @Test void testSuccess_poNumber() throws Exception { assertThat(loadRegistrar("NewRegistrar").getPoNumber()).isEmpty(); runCommand("--po_number=52345", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getPoNumber()).hasValue("52345"); } @Test void testSuccess_billingAccountMap() throws Exception { persistResource( loadRegistrar("NewRegistrar").asBuilder().setBillingAccountMap(ImmutableMap.of()).build()); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()).isEmpty(); runCommand("--billing_account_map=USD=abc123,JPY=789xyz", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()) .containsExactly(USD, "abc123", JPY, "789xyz"); } @Test void testSuccess_billingAccountMap_nullify() throws Exception { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setBillingAccountMap(ImmutableMap.of(USD, "abc123", JPY, "789xyz")) .build()); runCommand("--billing_account_map=", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()).isEmpty(); } @Test void testFailure_billingAccountMap_doesNotContainEntryForAllowedTld() { createTlds("foo"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setAllowedTlds(ImmutableSet.of()) .setBillingAccountMap(ImmutableMap.of()) .build()); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()).isEmpty(); IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommand( "--billing_account_map=JPY=789xyz", "--allowed_tlds=foo", "--force", "--registrar_type=REAL", "NewRegistrar")); assertThat(thrown) .hasMessageThat() .contains("their currency is missing from the billing account map: [foo]"); } @Test void testSuccess_billingAccountMap_onlyAppliesToRealRegistrar() throws Exception { persistResource( newTld("foo", "FOO") .asBuilder() .setCurrency(JPY) .setCreateBillingCostTransitions( ImmutableSortedMap.of(START_OF_TIME, Money.of(JPY, new BigDecimal(1300)))) .setRestoreBillingCost(Money.of(JPY, new BigDecimal(1700))) .setServerStatusChangeBillingCost(Money.of(JPY, new BigDecimal(1900))) .setRegistryLockOrUnlockBillingCost(Money.of(JPY, new BigDecimal(2700))) .setRenewBillingCostTransitions( ImmutableSortedMap.of(START_OF_TIME, Money.of(JPY, new BigDecimal(1100)))) .setEapFeeSchedule(ImmutableSortedMap.of(START_OF_TIME, Money.zero(JPY))) .setPremiumList(null) .build()); persistResource( loadRegistrar("NewRegistrar").asBuilder().setBillingAccountMap(ImmutableMap.of()).build()); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()).isEmpty(); runCommand("--billing_account_map=JPY=789xyz", "--allowed_tlds=foo", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()).containsExactly(JPY, "789xyz"); } @Test void testSuccess_billingAccountMap_partialUpdate() throws Exception { createTlds("foo"); persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setBillingAccountMap(ImmutableMap.of(USD, "abc123", JPY, "789xyz")) .build()); runCommand("--billing_account_map=JPY=123xyz", "--allowed_tlds=foo", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBillingAccountMap()) .containsExactly(JPY, "123xyz", USD, "abc123"); } @Test void testSuccess_streetAddress() throws Exception { runCommand( "--street=1234 Main St", "--street 4th Floor", "--street Suite 1", "--city Brooklyn", "--state NY", "--zip 11223", "--cc US", "--force", "NewRegistrar"); Registrar registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getLocalizedAddress() != null).isTrue(); assertThat(registrar.getLocalizedAddress().getStreet()).hasSize(3); assertThat(registrar.getLocalizedAddress().getStreet().get(0)).isEqualTo("1234 Main St"); assertThat(registrar.getLocalizedAddress().getStreet().get(1)).isEqualTo("4th Floor"); assertThat(registrar.getLocalizedAddress().getStreet().get(2)).isEqualTo("Suite 1"); assertThat(registrar.getLocalizedAddress().getCity()).isEqualTo("Brooklyn"); assertThat(registrar.getLocalizedAddress().getState()).isEqualTo("NY"); assertThat(registrar.getLocalizedAddress().getZip()).isEqualTo("11223"); assertThat(registrar.getLocalizedAddress().getCountryCode()).isEqualTo("US"); } @Test void testSuccess_blockPremiumNames() throws Exception { assertThat(loadRegistrar("NewRegistrar").getBlockPremiumNames()).isFalse(); runCommandForced("--block_premium=true", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBlockPremiumNames()).isTrue(); } @Test void testSuccess_resetBlockPremiumNames() throws Exception { persistResource(loadRegistrar("NewRegistrar").asBuilder().setBlockPremiumNames(true).build()); runCommandForced("--block_premium=false", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getBlockPremiumNames()).isFalse(); } @Test void testSuccess_allowRegistryLock() throws Exception { assertThat(loadRegistrar("NewRegistrar").isRegistryLockAllowed()).isFalse(); runCommandForced("--registry_lock_allowed=true", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").isRegistryLockAllowed()).isTrue(); } @Test void testSuccess_disallowRegistryLock() throws Exception { persistResource(loadRegistrar("NewRegistrar").asBuilder().setRegistryLockAllowed(true).build()); runCommandForced("--registry_lock_allowed=false", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").isRegistryLockAllowed()).isFalse(); } @Test void testSuccess_unspecifiedBooleansArentChanged() throws Exception { persistResource( loadRegistrar("NewRegistrar") .asBuilder() .setBlockPremiumNames(true) .setContactsRequireSyncing(true) .build()); // Make some unrelated change where we don't specify the flags for the booleans. runCommandForced("NewRegistrar"); // Make sure that the boolean fields didn't get reset back to false. Registrar reloadedRegistrar = loadRegistrar("NewRegistrar"); assertThat(reloadedRegistrar.getBlockPremiumNames()).isTrue(); assertThat(reloadedRegistrar.getContactsRequireSyncing()).isTrue(); } @Test void testSuccess_updateMultiple() throws Exception { assertThat(loadRegistrar("TheRegistrar").getState()).isEqualTo(State.ACTIVE); assertThat(loadRegistrar("NewRegistrar").getState()).isEqualTo(State.ACTIVE); runCommandForced("--registrar_state=SUSPENDED", "TheRegistrar", "NewRegistrar"); assertThat(loadRegistrar("TheRegistrar").getState()).isEqualTo(State.SUSPENDED); assertThat(loadRegistrar("NewRegistrar").getState()).isEqualTo(State.SUSPENDED); } @Test void testSuccess_resetOptionalParamsNullString() throws Exception { Registrar registrar = loadRegistrar("NewRegistrar"); registrar = persistResource( registrar .asBuilder() .setType(Type.PDT) // for non-null IANA ID .setIanaIdentifier(9995L) .setPhoneNumber("+1.2125555555") .setFaxNumber("+1.2125555556") .setUrl("http://www.example.tld") .setDriveFolderId("id") .build()); assertThat(registrar.getIanaIdentifier()).isNotNull(); assertThat(registrar.getPhoneNumber()).isNotNull(); assertThat(registrar.getFaxNumber()).isNotNull(); assertThat(registrar.getUrl()).isNotNull(); assertThat(registrar.getDriveFolderId()).isNotNull(); runCommand( "--registrar_type=TEST", // necessary for null IANA ID "--iana_id=null", "--phone=null", "--fax=null", "--url=null", "--drive_folder_id=null", "--force", "NewRegistrar"); registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getIanaIdentifier()).isNull(); assertThat(registrar.getPhoneNumber()).isNull(); assertThat(registrar.getFaxNumber()).isNull(); assertThat(registrar.getUrl()).isNull(); assertThat(registrar.getDriveFolderId()).isNull(); } @Test void testSuccess_resetOptionalParamsEmptyString() throws Exception { Registrar registrar = loadRegistrar("NewRegistrar"); registrar = persistResource( registrar .asBuilder() .setType(Type.PDT) // for non-null IANA ID .setIanaIdentifier(9995L) .setPhoneNumber("+1.2125555555") .setFaxNumber("+1.2125555556") .setUrl("http://www.example.tld") .setDriveFolderId("id") .build()); assertThat(registrar.getIanaIdentifier()).isNotNull(); assertThat(registrar.getPhoneNumber()).isNotNull(); assertThat(registrar.getFaxNumber()).isNotNull(); assertThat(registrar.getUrl()).isNotNull(); assertThat(registrar.getDriveFolderId()).isNotNull(); runCommand( "--registrar_type=TEST", // necessary for null IANA ID "--iana_id=", "--phone=", "--fax=", "--url=", "--drive_folder_id=", "--force", "NewRegistrar"); registrar = loadRegistrar("NewRegistrar"); assertThat(registrar.getIanaIdentifier()).isNull(); assertThat(registrar.getPhoneNumber()).isNull(); assertThat(registrar.getFaxNumber()).isNull(); assertThat(registrar.getUrl()).isNull(); assertThat(registrar.getDriveFolderId()).isNull(); } @Test void testSuccess_setIcannEmail() throws Exception { Registrar registrar = loadRegistrar("TheRegistrar"); assertThat(registrar.getEmailAddress()).isEqualTo("the.registrar@example.com"); runCommand("--icann_referral_email=foo@bar.test", "--force", "TheRegistrar"); Registrar updatedRegistrar = loadRegistrar("TheRegistrar"); assertThat(updatedRegistrar.getIcannReferralEmail()).isEqualTo("foo@bar.test"); assertThat(updatedRegistrar.getEmailAddress()).isEqualTo("the.registrar@example.com"); } @Test void testSuccess_setEmail() throws Exception { runCommand("--email=foo@bar.baz", "--force", "TheRegistrar"); Registrar registrar = loadRegistrar("TheRegistrar"); assertThat(registrar.getEmailAddress()).isEqualTo("foo@bar.baz"); } @Test void testSuccess_setWhoisServer_works() throws Exception { runCommand("--whois=whois.goth.black", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getWhoisServer()).isEqualTo("whois.goth.black"); } @Test void testSuccess_triggerGroupSyncing_works() throws Exception { persistResource( loadRegistrar("NewRegistrar").asBuilder().setContactsRequireSyncing(false).build()); assertThat(loadRegistrar("NewRegistrar").getContactsRequireSyncing()).isFalse(); runCommand("--sync_groups=true", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getContactsRequireSyncing()).isTrue(); } @Test void testFailure_invalidRegistrarType() { assertThrows( ParameterException.class, () -> runCommand("--registrar_type=INVALID_TYPE", "--force", "NewRegistrar")); } @Test void testFailure_invalidRegistrarState() { assertThrows( ParameterException.class, () -> runCommand("--registrar_state=INVALID_STATE", "--force", "NewRegistrar")); } @Test void testFailure_negativeIanaId() { assertThrows( IllegalArgumentException.class, () -> runCommand("--iana_id=-1", "--force", "NewRegistrar")); } @Test void testFailure_nonIntegerIanaId() { assertThrows( ParameterException.class, () -> runCommand("--iana_id=ABC123", "--force", "NewRegistrar")); } @Test void testFailure_passcodeTooShort() { assertThrows( IllegalArgumentException.class, () -> runCommand("--passcode=0123", "--force", "NewRegistrar")); } @Test void testFailure_passcodeTooLong() { assertThrows( IllegalArgumentException.class, () -> runCommand("--passcode=012345", "--force", "NewRegistrar")); } @Test void testFailure_invalidPasscode() { assertThrows( IllegalArgumentException.class, () -> runCommand("--passcode=code1", "--force", "NewRegistrar")); } @Test void testFailure_allowedTldDoesNotExist() { assertThrows( IllegalArgumentException.class, () -> runCommand("--allowed_tlds=foobar", "--force", "NewRegistrar")); } @Test void testFailure_addAllowedTldDoesNotExist() { assertThrows( IllegalArgumentException.class, () -> runCommand("--add_allowed_tlds=foobar", "--force", "NewRegistrar")); } @Test void testFailure_allowedTldsAndAddAllowedTlds() { assertThrows( IllegalArgumentException.class, () -> runCommand("--allowed_tlds=bar", "--add_allowed_tlds=foo", "--force", "NewRegistrar")); } @Test void testFailure_setAllowedTldsWithoutAbuseContact() { createTlds("bar"); IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--allowed_tlds=bar", "--force", "TheRegistrar")); assertThat(thrown).hasMessageThat().startsWith("Cannot modify allowed TLDs"); } @Test void testFailure_addAllowedTldsWithoutAbuseContact() { createTlds("bar"); IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommandInEnvironment( RegistryToolEnvironment.PRODUCTION, "--add_allowed_tlds=bar", "--force", "TheRegistrar")); assertThat(thrown).hasMessageThat().startsWith("Cannot modify allowed TLDs"); } @Test void testFailure_invalidIpAllowList() { assertThrows( IllegalArgumentException.class, () -> runCommand("--ip_allow_list=foobarbaz", "--force", "NewRegistrar")); } @Test void testFailure_invalidCertFileContents() { assertThrows( Exception.class, () -> runCommand("--cert_file=" + writeToTmpFile("ABCDEF"), "--force", "NewRegistrar")); } @Test void testFailure_missingClientId() { assertThrows(ParameterException.class, () -> runCommand("--force")); } @Test void testFailure_missingStreetLines() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--city Brooklyn", "--state NY", "--zip 11223", "--cc US", "--force", "NewRegistrar")); } @Test void testFailure_missingCity() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street=\"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--state NY", "--zip 11223", "--cc US", "--force", "NewRegistrar")); } @Test void testFailure_missingState() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street=\"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--city Brooklyn", "--zip 11223", "--cc US", "--force", "NewRegistrar")); } @Test void testFailure_missingZip() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street=\"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--city Brooklyn", "--state NY", "--cc US", "--force", "NewRegistrar")); } @Test void testFailure_missingCc() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street=\"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--city Brooklyn", "--state NY", "--zip 11223", "--force", "NewRegistrar")); } @Test void testFailure_missingInvalidCc() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street=\"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--city Brooklyn", "--state NY", "--zip 11223", "--cc USA", "--force", "NewRegistrar")); } @Test void testFailure_tooManyStreetLines() { assertThrows( IllegalArgumentException.class, () -> runCommand( "--street \"Attn:Hey You Guys\"", "--street \"1234 Main St\"", "--street \"4th Floor\"", "--street \"Suite 1\"", "--city Brooklyn", "--state NY", "--zip 11223", "--cc USA", "--force", "NewRegistrar")); } @Test void testFailure_tooFewStreetLines() { assertThrows( ParameterException.class, () -> runCommand( "--street", "--city Brooklyn", "--state NY", "--zip 11223", "--cc USA", "--force", "NewRegistrar")); } @Test void testFailure_unknownFlag() { assertThrows( IllegalArgumentException.class, () -> runCommand("--force", "--unrecognized_flag=foo", "NewRegistrar")); } @Test void testFailure_doesNotExist() { IllegalArgumentException thrown = assertThrows(IllegalArgumentException.class, () -> runCommand("--force", "ClientZ")); assertThat(thrown).hasMessageThat().contains("Registrar ClientZ not found"); } @Test void testFailure_registrarNameSimilarToExisting() { // Note that "tHeRe GiStRaR" normalizes identically to "The Registrar", which is created by // JpaTransactionManagerExtension. assertThrows( IllegalArgumentException.class, () -> runCommand("--name tHeRe GiStRaR", "--force", "NewRegistrar")); } @Test void testSuccess_poNumberNotSpecified_doesntWipeOutExisting() throws Exception { Registrar registrar = persistResource( loadRegistrar("NewRegistrar").asBuilder().setPoNumber(Optional.of("1664")).build()); assertThat(registrar.verifyPassword("some_password")).isFalse(); runCommand("--password=some_password", "--force", "NewRegistrar"); Registrar reloadedRegistrar = loadRegistrar("NewRegistrar"); assertThat(reloadedRegistrar.verifyPassword("some_password")).isTrue(); assertThat(reloadedRegistrar.getPoNumber()).hasValue("1664"); } @Test void testSuccess_poNumber_canBeBlanked() throws Exception { persistResource( loadRegistrar("NewRegistrar").asBuilder().setPoNumber(Optional.of("1664")).build()); runCommand("--po_number=null", "--force", "NewRegistrar"); assertThat(loadRegistrar("NewRegistrar").getPoNumber()).isEmpty(); } @Test void testFailure_badEmail() { IllegalArgumentException thrown = assertThrows( IllegalArgumentException.class, () -> runCommand("--email=lolcat", "--force", "NewRegistrar")); assertThat(thrown) .hasMessageThat() .isEqualTo("Provided email lolcat is not a valid email address"); } private void persistWhoisAbuseContact() { persistResource( JpaTransactionManagerExtension.makeRegistrarContact1() .asBuilder() .setVisibleInDomainWhoisAsAbuse(true) .build()); } }
googleapis/google-cloud-java
37,241
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/UploadedPythonPackage.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * Artifact uploaded using the PythonPackage directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedPythonPackage} */ public final class UploadedPythonPackage extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.UploadedPythonPackage) UploadedPythonPackageOrBuilder { private static final long serialVersionUID = 0L; // Use UploadedPythonPackage.newBuilder() to construct. private UploadedPythonPackage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UploadedPythonPackage() { uri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UploadedPythonPackage(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedPythonPackage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedPythonPackage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedPythonPackage.class, com.google.cloudbuild.v1.UploadedPythonPackage.Builder.class); } private int bitField0_; public static final int URI_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILE_HASHES_FIELD_NUMBER = 2; private com.google.cloudbuild.v1.FileHashes fileHashes_; /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ @java.lang.Override public boolean hasFileHashes() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ @java.lang.Override public com.google.cloudbuild.v1.FileHashes getFileHashes() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ @java.lang.Override public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } public static final int PUSH_TIMING_FIELD_NUMBER = 3; private com.google.cloudbuild.v1.TimeSpan pushTiming_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ @java.lang.Override public boolean hasPushTiming() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpan getPushTiming() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getPushTiming()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPushTiming()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.UploadedPythonPackage)) { return super.equals(obj); } com.google.cloudbuild.v1.UploadedPythonPackage other = (com.google.cloudbuild.v1.UploadedPythonPackage) obj; if (!getUri().equals(other.getUri())) return false; if (hasFileHashes() != other.hasFileHashes()) return false; if (hasFileHashes()) { if (!getFileHashes().equals(other.getFileHashes())) return false; } if (hasPushTiming() != other.hasPushTiming()) return false; if (hasPushTiming()) { if (!getPushTiming().equals(other.getPushTiming())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); if (hasFileHashes()) { hash = (37 * hash) + FILE_HASHES_FIELD_NUMBER; hash = (53 * hash) + getFileHashes().hashCode(); } if (hasPushTiming()) { hash = (37 * hash) + PUSH_TIMING_FIELD_NUMBER; hash = (53 * hash) + getPushTiming().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedPythonPackage parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.UploadedPythonPackage prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Artifact uploaded using the PythonPackage directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedPythonPackage} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.UploadedPythonPackage) com.google.cloudbuild.v1.UploadedPythonPackageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedPythonPackage_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedPythonPackage_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedPythonPackage.class, com.google.cloudbuild.v1.UploadedPythonPackage.Builder.class); } // Construct using com.google.cloudbuild.v1.UploadedPythonPackage.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFileHashesFieldBuilder(); getPushTimingFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uri_ = ""; fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedPythonPackage_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.UploadedPythonPackage getDefaultInstanceForType() { return com.google.cloudbuild.v1.UploadedPythonPackage.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.UploadedPythonPackage build() { com.google.cloudbuild.v1.UploadedPythonPackage result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.UploadedPythonPackage buildPartial() { com.google.cloudbuild.v1.UploadedPythonPackage result = new com.google.cloudbuild.v1.UploadedPythonPackage(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v1.UploadedPythonPackage result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.uri_ = uri_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.fileHashes_ = fileHashesBuilder_ == null ? fileHashes_ : fileHashesBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pushTiming_ = pushTimingBuilder_ == null ? pushTiming_ : pushTimingBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.UploadedPythonPackage) { return mergeFrom((com.google.cloudbuild.v1.UploadedPythonPackage) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.UploadedPythonPackage other) { if (other == com.google.cloudbuild.v1.UploadedPythonPackage.getDefaultInstance()) return this; if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasFileHashes()) { mergeFileHashes(other.getFileHashes()); } if (other.hasPushTiming()) { mergePushTiming(other.getPushTiming()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getFileHashesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getPushTimingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloudbuild.v1.FileHashes fileHashes_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> fileHashesBuilder_; /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ public boolean hasFileHashes() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ public com.google.cloudbuild.v1.FileHashes getFileHashes() { if (fileHashesBuilder_ == null) { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } else { return fileHashesBuilder_.getMessage(); } } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } fileHashes_ = value; } else { fileHashesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashesBuilder_ == null) { fileHashes_ = builderForValue.build(); } else { fileHashesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder mergeFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && fileHashes_ != null && fileHashes_ != com.google.cloudbuild.v1.FileHashes.getDefaultInstance()) { getFileHashesBuilder().mergeFrom(value); } else { fileHashes_ = value; } } else { fileHashesBuilder_.mergeFrom(value); } if (fileHashes_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder clearFileHashes() { bitField0_ = (bitField0_ & ~0x00000002); fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder getFileHashesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFileHashesFieldBuilder().getBuilder(); } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { if (fileHashesBuilder_ != null) { return fileHashesBuilder_.getMessageOrBuilder(); } else { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } } /** * * * <pre> * Hash types and values of the Python Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashesFieldBuilder() { if (fileHashesBuilder_ == null) { fileHashesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder>( getFileHashes(), getParentForChildren(), isClean()); fileHashes_ = null; } return fileHashesBuilder_; } private com.google.cloudbuild.v1.TimeSpan pushTiming_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> pushTimingBuilder_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ public boolean hasPushTiming() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ public com.google.cloudbuild.v1.TimeSpan getPushTiming() { if (pushTimingBuilder_ == null) { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } else { return pushTimingBuilder_.getMessage(); } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } pushTiming_ = value; } else { pushTimingBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan.Builder builderForValue) { if (pushTimingBuilder_ == null) { pushTiming_ = builderForValue.build(); } else { pushTimingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergePushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && pushTiming_ != null && pushTiming_ != com.google.cloudbuild.v1.TimeSpan.getDefaultInstance()) { getPushTimingBuilder().mergeFrom(value); } else { pushTiming_ = value; } } else { pushTimingBuilder_.mergeFrom(value); } if (pushTiming_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearPushTiming() { bitField0_ = (bitField0_ & ~0x00000004); pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpan.Builder getPushTimingBuilder() { bitField0_ |= 0x00000004; onChanged(); return getPushTimingFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { if (pushTimingBuilder_ != null) { return pushTimingBuilder_.getMessageOrBuilder(); } else { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> getPushTimingFieldBuilder() { if (pushTimingBuilder_ == null) { pushTimingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder>( getPushTiming(), getParentForChildren(), isClean()); pushTiming_ = null; } return pushTimingBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.UploadedPythonPackage) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UploadedPythonPackage) private static final com.google.cloudbuild.v1.UploadedPythonPackage DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.UploadedPythonPackage(); } public static com.google.cloudbuild.v1.UploadedPythonPackage getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UploadedPythonPackage> PARSER = new com.google.protobuf.AbstractParser<UploadedPythonPackage>() { @java.lang.Override public UploadedPythonPackage parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UploadedPythonPackage> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UploadedPythonPackage> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.UploadedPythonPackage getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/derby
35,824
java/org.apache.derby.engine/org/apache/derby/impl/sql/depend/BasicDependencyManager.java
/* Derby - Class org.apache.derby.impl.sql.depend.BasicDependencyManager Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.depend; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import org.apache.derby.catalog.DependableFinder; import org.apache.derby.catalog.UUID; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.iapi.services.context.ContextManager; import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.sql.conn.LanguageConnectionContext; import org.apache.derby.iapi.sql.conn.StatementContext; import org.apache.derby.iapi.sql.depend.Dependency; import org.apache.derby.iapi.sql.depend.DependencyManager; import org.apache.derby.iapi.sql.depend.Dependent; import org.apache.derby.iapi.sql.depend.Provider; import org.apache.derby.iapi.sql.depend.ProviderInfo; import org.apache.derby.iapi.sql.depend.ProviderList; import org.apache.derby.iapi.sql.dictionary.DataDictionary; import org.apache.derby.iapi.sql.dictionary.DependencyDescriptor; import org.apache.derby.iapi.sql.dictionary.TableDescriptor; import org.apache.derby.iapi.sql.dictionary.ViewDescriptor; import org.apache.derby.iapi.store.access.TransactionController; /** * The dependency manager tracks needs that dependents have of providers. * <p> * A dependency can be either persistent or non-persistent. Persistent * dependencies are stored in the data dictionary, and non-persistent * dependencies are stored within the dependency manager itself (in memory). * <p> * <em>Synchronization:</em> The need for synchronization is different depending * on whether the dependency is an in-memory dependency or a stored dependency. * When accessing and modifying in-memory dependencies, Java synchronization * must be used (specifically, we synchronize on {@code this}). When accessing * and modifying stored dependencies, which are stored in the data dictionary, * we expect that the locking protocols will provide the synchronization needed. * Note that stored dependencies should not be accessed while holding the * monitor of {@code this}, as this may result in deadlocks. So far the need * for synchronization across both in-memory and stored dependencies hasn't * occurred. */ public class BasicDependencyManager implements DependencyManager { /** * DataDictionary for this database. */ private final DataDictionary dd; /** * Map of in-memory dependencies for Dependents. * In-memory means that one or both of the Dependent * or Provider are non-persistent (isPersistent() returns false). * * Key is the UUID of the Dependent (from getObjectID()). * Value is a List containing Dependency objects, each * of whihc links the same Dependent to a Provider. * Dependency objects in the List are unique. * */ //@GuardedBy("this") private final Map<UUID,List<Dependency>> dependents = new HashMap<UUID,List<Dependency>>(); /** * Map of in-memory dependencies for Providers. * In-memory means that one or both of the Dependent * or Provider are non-persistent (isPersistent() returns false). * * Key is the UUID of the Provider (from getObjectID()). * Value is a List containing Dependency objects, each * of which links the same Provider to a Dependent. * Dependency objects in the List are unique. * */ //@GuardedBy("this") private final Map<UUID,List<Dependency>> providers = new HashMap<UUID,List<Dependency>>(); // // DependencyManager interface // /** adds a dependency from the dependent on the provider. This will be considered to be the default type of dependency, when dependency types show up. <p> Implementations of addDependency should be fast -- performing alot of extra actions to add a dependency would be a detriment. @param d the dependent @param p the provider @exception StandardException thrown if something goes wrong */ public void addDependency(Dependent d, Provider p, ContextManager cm) throws StandardException { addDependency(d, p, cm, null); } /** * Adds the dependency to the data dictionary or the in-memory dependency * map. * <p> * The action taken is detmermined by whether the dependent and/or the * provider are persistent. * * @param d the dependent * @param p the provider * @param cm context manager * @param tc transaction controller, used to determine if any transactional * operations should be attempted carried out in a nested transaction. * If {@code tc} is {@code null}, the user transaction is used. * @throws StandardException if adding the dependency fails */ private void addDependency(Dependent d, Provider p, ContextManager cm, TransactionController tc) throws StandardException { // Dependencies are either in-memory or stored, but not both. if (! d.isPersistent() || ! p.isPersistent()) { addInMemoryDependency(d, p, cm); } else { addStoredDependency(d, p, cm, tc); } } /** * Adds the dependency as an in-memory dependency. * * @param d the dependent * @param p the provider * @param cm context manager * @throws StandardException if adding the dependency fails * @see #addStoredDependency */ private synchronized void addInMemoryDependency(Dependent d, Provider p, ContextManager cm) throws StandardException { Dependency dy = new BasicDependency(d, p); // Duplicate dependencies are not added to the lists. // If we find that the dependency we are trying to add in // one list is a duplicate, then it should be a duplicate in the // other list. boolean addedToProvs = false; boolean addedToDeps = addDependencyToTable(dependents, d.getObjectID(), dy); if (addedToDeps) { addedToProvs = addDependencyToTable(providers, p.getObjectID(), dy); } else if (SanityManager.DEBUG) { addedToProvs = addDependencyToTable(providers, p.getObjectID(), dy); } // Dependency should have been added to both or neither. if (SanityManager.DEBUG) { if (addedToDeps != addedToProvs) { SanityManager.THROWASSERT( "addedToDeps (" + addedToDeps + ") and addedToProvs (" + addedToProvs + ") are expected to agree"); } } // Add the dependency to the StatementContext, so that // it can be cleared on a pre-execution error. StatementContext sc = (StatementContext) cm.getContext( org.apache.derby.shared.common.reference.ContextId.LANG_STATEMENT); sc.addDependency(dy); } /** * Adds the dependency as a stored dependency. * <p> * We expect that transactional locking (in the data dictionary) is enough * to protect us from concurrent changes when adding stored dependencies. * Adding synchronization here and accessing the data dictionary (which is * transactional) may cause deadlocks. * * @param d the dependent * @param p the provider * @param cm context manager * @param tc transaction controller (may be {@code null}) * @throws StandardException if adding the dependency fails * @see #addInMemoryDependency */ private void addStoredDependency(Dependent d, Provider p, ContextManager cm, TransactionController tc) throws StandardException { LanguageConnectionContext lcc = getLanguageConnectionContext(cm); // tc == null means do it in the user transaction TransactionController tcToUse = (tc == null) ? lcc.getTransactionExecute() : tc; // Call the DataDictionary to store the dependency. dd.addDescriptor(new DependencyDescriptor(d, p), null, DataDictionary.SYSDEPENDS_CATALOG_NUM, true, tcToUse); } /** drops a single dependency @param d the dependent @param p the provider @exception StandardException thrown if something goes wrong */ private void dropDependency(LanguageConnectionContext lcc, Dependent d, Provider p) throws StandardException { if (SanityManager.DEBUG) { // right now, this routine isn't called for in-memory dependencies if (! d.isPersistent() || ! p.isPersistent()) { SanityManager.NOTREACHED(); } } DependencyDescriptor dependencyDescriptor = new DependencyDescriptor(d, p); dd.dropStoredDependency( dependencyDescriptor, lcc.getTransactionExecute() ); } /** mark all dependencies on the named provider as invalid. When invalidation types show up, this will use the default invalidation type. The dependencies will still exist once they are marked invalid; clearDependencies should be used to remove dependencies that a dependent has or provider gives. <p> Implementations of this can take a little time, but are not really expected to recompile things against any changes made to the provider that caused the invalidation. The dependency system makes no guarantees about the state of the provider -- implementations can call this before or after actually changing the provider to its new state. <p> Implementations should throw StandardException if the invalidation should be disallowed. @param p the provider @param action The action causing the invalidate @exception StandardException thrown if unable to make it invalid */ public void invalidateFor(Provider p, int action, LanguageConnectionContext lcc) throws StandardException { /* ** Non-persistent dependencies are stored in memory, and need to ** use "synchronized" to ensure their lists don't change while ** the invalidation is taking place. Persistent dependencies are ** stored in the data dictionary, and we should *not* do anything ** transactional (like reading from a system table) from within ** a synchronized method, as it could cause deadlock. ** ** Presumably, the transactional locking in the data dictionary ** is enough to protect us, so that we don't have to put any ** synchronization in the DependencyManager. */ if (p.isPersistent()) coreInvalidateFor(p, action, lcc); else { synchronized (this) { coreInvalidateFor(p, action, lcc); } } } /** * A version of invalidateFor that does not provide synchronization among * invalidators. * * @param p the provider * @param action the action causing the invalidation * @param lcc language connection context * * @throws StandardException if something goes wrong */ private void coreInvalidateFor(Provider p, int action, LanguageConnectionContext lcc) throws StandardException { List<Dependency> list = getDependents(p); if (list.isEmpty()) { return; } // affectedCols is passed in from table descriptor provider to indicate // which columns it cares; subsetCols is affectedCols' intersection // with column bit map found in the provider of SYSDEPENDS line to // find out which columns really matter. If SYSDEPENDS line's // dependent is view (or maybe others), provider is table, yet it // doesn't have column bit map because the view was created in a // previous version of server which doesn't support column dependency, // and we really want it to have (such as in drop column), in any case // if we passed in table descriptor to this function with a bit map, // we really need this, we generate the bitmaps on the fly and update // SYSDEPENDS // // Note: Since the "previous version of server" mentioned above must // be a version that predates Derby, and we don't support upgrade from // those versions, we no longer have code to generate the column // dependency list on the fly. Instead, an assert has been added to // verify that we always have a column bitmap in SYSDEPENDS if the // affectedCols bitmap is non-null. FormatableBitSet affectedCols = null, subsetCols = null; if (p instanceof TableDescriptor) { affectedCols = ((TableDescriptor) p).getReferencedColumnMap(); if (affectedCols != null) subsetCols = new FormatableBitSet(affectedCols.getLength()); } { StandardException noInvalidate = null; // We cannot use an iterator here as the invalidations can remove // entries from this list. for (int ei = list.size() - 1; ei >= 0; ei--) { if (ei >= list.size()) continue; Dependency dependency = list.get(ei); Dependent dep = dependency.getDependent(); if (affectedCols != null) { TableDescriptor td = (TableDescriptor) dependency.getProvider(); FormatableBitSet providingCols = td.getReferencedColumnMap(); if (providingCols == null) { if (dep instanceof ViewDescriptor) { // If the table descriptor that was passed in had a // column bit map, so should the provider's table // descriptor. Views that were created with a // database version that predates Derby could lack // a bitmap in the provider and needed to // reconstruct it here by parsing and binding the // original CREATE VIEW statement. However, since // we don't support upgrade from pre-Derby versions, // this code was removed as part of DERBY-6169. if (SanityManager.DEBUG) { SanityManager.THROWASSERT("Expected view to " + "have referenced column bitmap"); } } // if dep instanceof ViewDescriptor else ((TableDescriptor) p).setReferencedColumnMap(null); } // if providingCols == null else { subsetCols.copyFrom( affectedCols ); subsetCols.and(providingCols); if (subsetCols.anySetBit() == -1) continue; ((TableDescriptor) p).setReferencedColumnMap(subsetCols); } } // generate a list of invalidations that fail. try { dep.prepareToInvalidate(p, action, lcc); } catch (StandardException sqle) { if (noInvalidate == null) { noInvalidate = sqle; } else { try { sqle.initCause(noInvalidate); noInvalidate = sqle; } catch (IllegalStateException ise) { // We weren't able to chain the exceptions. That's // OK, since we always have the first exception we // caught. Just skip the current exception. } } } if (noInvalidate == null) { if (affectedCols != null) ((TableDescriptor) p).setReferencedColumnMap(affectedCols); // REVISIT: future impl will want to mark the individual // dependency as invalid as well as the dependent... dep.makeInvalid(action, lcc); } } if (noInvalidate != null) throw noInvalidate; } } /** Erases all of the dependencies the dependent has, be they valid or invalid, of any dependency type. This action is usually performed as the first step in revalidating a dependent; it first erases all the old dependencies, then revalidates itself generating a list of new dependencies, and then marks itself valid if all its new dependencies are valid. <p> There might be a future want to clear all dependencies for a particular provider, e.g. when destroying the provider. However, at present, they are assumed to stick around and it is the responsibility of the dependent to erase them when revalidating against the new version of the provider. <p> clearDependencies will delete dependencies if they are stored; the delete is finalized at the next commit. @param d the dependent * * @exception StandardException Thrown on failure */ public void clearDependencies(LanguageConnectionContext lcc, Dependent d) throws StandardException { clearDependencies(lcc, d, null); } /** * @inheritDoc */ public void clearDependencies(LanguageConnectionContext lcc, Dependent d, TransactionController tc) throws StandardException { UUID id = d.getObjectID(); // Remove all the stored dependencies. if (d.isPersistent()) { boolean wait = (tc == null); dd.dropDependentsStoredDependencies(id, ((wait)?lcc.getTransactionExecute():tc), wait); } // Now remove the in-memory dependencies, if any. synchronized(this) { List deps = (List) dependents.get(id); if (deps != null) { Iterator depsIter = deps.iterator(); // go through the list notifying providers to remove // the dependency from their lists while (depsIter.hasNext()) { Dependency dy = (Dependency)depsIter.next(); clearProviderDependency(dy.getProviderKey(), dy); } dependents.remove(id); } } } /** * Clear the specified in memory dependency. * This is useful for clean-up when an exception occurs. * (We clear all in-memory dependencies added in the current * StatementContext.) */ public synchronized void clearInMemoryDependency(Dependency dy) { final UUID deptId = dy.getDependent().getObjectID(); final UUID provId = dy.getProviderKey(); List deps = (List) dependents.get(deptId); // NOTE - this is a NEGATIVE Sanity mode check, in sane mode we continue // to ensure the dependency manager is consistent. if (!SanityManager.DEBUG) { // dependency has already been removed if (deps == null) return; } List provs = (List) providers.get(provId); if (SanityManager.DEBUG) { // if both are null then everything is OK if ((deps != null) || (provs != null)) { // ensure that the Dependency dy is either // in both lists or in neither. Even if dy // is out of the list we can have non-null // deps and provs here because other dependencies // with the the same providers or dependents can exist // int depCount = 0; if (deps != null) { for (int ci = 0; ci < deps.size(); ci++) { if (dy.equals(deps.get(ci))) depCount++; } } int provCount = 0; if (provs != null) { for (int ci = 0; ci < provs.size(); ci++) { if (dy.equals(provs.get(ci))) provCount++; } } SanityManager.ASSERT(depCount == provCount, "Dependency count mismatch count in deps: " + depCount + ", count in provs " + provCount + ", dy.getDependent().getObjectID() = " + deptId + ", dy.getProvider().getObjectID() = " + provId); } // dependency has already been removed, // matches code that is protected by !DEBUG above if (deps == null) return; } // dependency has already been removed if (provs == null) return; deps.remove(dy); if (deps.isEmpty()) dependents.remove(deptId); provs.remove(dy); if (provs.isEmpty()) providers.remove(provId); } /** * @see DependencyManager#getPersistentProviderInfos * * @exception StandardException Thrown on error */ public ProviderInfo[] getPersistentProviderInfos(Dependent dependent) throws StandardException { List<Provider> provs = getProviders(dependent); if (provs.isEmpty()) { return EMPTY_PROVIDER_INFO; } List<ProviderInfo> pih = new ArrayList<ProviderInfo>(); for (Provider p : provs) { if (p.isPersistent()) { pih.add(new BasicProviderInfo( p.getObjectID(), p.getDependableFinder(), p.getObjectName() )); } } return (ProviderInfo[]) pih.toArray(EMPTY_PROVIDER_INFO); } private static final ProviderInfo[] EMPTY_PROVIDER_INFO = new ProviderInfo[0]; /** * @see DependencyManager#getPersistentProviderInfos * * @exception StandardException Thrown on error */ public ProviderInfo[] getPersistentProviderInfos(ProviderList pl) throws StandardException { Enumeration e = pl.elements(); int numProviders = 0; ProviderInfo[] retval; /* ** We make 2 passes - the first to count the number of persistent ** providers and the second to populate the array of ProviderInfos. */ while (e != null && e.hasMoreElements()) { Provider prov = (Provider) e.nextElement(); if (prov.isPersistent()) { numProviders++; } } e = pl.elements(); retval = new ProviderInfo[numProviders]; int piCtr = 0; while (e != null && e.hasMoreElements()) { Provider prov = (Provider) e.nextElement(); if (prov.isPersistent()) { retval[piCtr++] = new BasicProviderInfo( prov.getObjectID(), prov.getDependableFinder(), prov.getObjectName() ); } } return retval; } /** * @see DependencyManager#clearColumnInfoInProviders * * @param pl provider list * * @exception StandardException Thrown on error */ public void clearColumnInfoInProviders(ProviderList pl) throws StandardException { Enumeration e = pl.elements(); while (e.hasMoreElements()) { Provider pro = (Provider) e.nextElement(); if (pro instanceof TableDescriptor) ((TableDescriptor) pro).setReferencedColumnMap(null); } } /** * Copy dependencies from one dependent to another. * * @param copy_From the dependent to copy from * @param copyTo the dependent to copy to * @param persistentOnly only copy persistent dependencies * @param cm Current ContextManager * * @exception StandardException Thrown on error. */ public void copyDependencies(Dependent copy_From, Dependent copyTo, boolean persistentOnly, ContextManager cm) throws StandardException { copyDependencies(copy_From, copyTo, persistentOnly, cm, null); } /** * @inheritDoc */ public void copyDependencies( Dependent copy_From, Dependent copyTo, boolean persistentOnly, ContextManager cm, TransactionController tc) throws StandardException { List list = getProviders(copy_From); Iterator depsIter = list.iterator(); while (depsIter.hasNext()) { Provider provider = (Provider)depsIter.next(); if (!persistentOnly || provider.isPersistent()) { this.addDependency(copyTo, provider, cm, tc); } } } /** * Returns a string representation of the SQL action, hence no * need to internationalize, which is causing the invokation * of the Dependency Manager. * * @param action The action * * @return String The String representation */ public String getActionString(int action) { switch (action) { case ALTER_TABLE: return "ALTER TABLE"; case RENAME: //for rename table and column return "RENAME"; case RENAME_INDEX: return "RENAME INDEX"; case COMPILE_FAILED: return "COMPILE FAILED"; case DROP_TABLE: return "DROP TABLE"; case DROP_INDEX: return "DROP INDEX"; case DROP_VIEW: return "DROP VIEW"; case CREATE_INDEX: return "CREATE INDEX"; case ROLLBACK: return "ROLLBACK"; case CHANGED_CURSOR: return "CHANGED CURSOR"; case CREATE_CONSTRAINT: return "CREATE CONSTRAINT"; case DROP_CONSTRAINT: return "DROP CONSTRAINT"; case DROP_METHOD_ALIAS: return "DROP ROUTINE"; case PREPARED_STATEMENT_RELEASE: return "PREPARED STATEMENT RELEASE"; case DROP_SPS: return "DROP STORED PREPARED STATEMENT"; case USER_RECOMPILE_REQUEST: return "USER REQUESTED INVALIDATION"; case BULK_INSERT: return "BULK INSERT"; case CREATE_VIEW: return "CREATE_VIEW"; case DROP_JAR: return "DROP_JAR"; case REPLACE_JAR: return "REPLACE_JAR"; case SET_CONSTRAINTS_ENABLE: return "SET_CONSTRAINTS_ENABLE"; case SET_CONSTRAINTS_DISABLE: return "SET_CONSTRAINTS_DISABLE"; case INTERNAL_RECOMPILE_REQUEST: return "INTERNAL RECOMPILE REQUEST"; case CREATE_TRIGGER: return "CREATE TRIGGER"; case DROP_TRIGGER: return "DROP TRIGGER"; case SET_TRIGGERS_ENABLE: return "SET TRIGGERS ENABLED"; case SET_TRIGGERS_DISABLE: return "SET TRIGGERS DISABLED"; case MODIFY_COLUMN_DEFAULT: return "MODIFY COLUMN DEFAULT"; case COMPRESS_TABLE: return "COMPRESS TABLE"; case DROP_COLUMN: return "DROP COLUMN"; case DROP_COLUMN_RESTRICT: return "DROP COLUMN RESTRICT"; case DROP_STATISTICS: return "DROP STATISTICS"; case UPDATE_STATISTICS: return "UPDATE STATISTICS"; case TRUNCATE_TABLE: return "TRUNCATE TABLE"; case DROP_SYNONYM: return "DROP SYNONYM"; case REVOKE_PRIVILEGE: return "REVOKE PRIVILEGE"; case REVOKE_PRIVILEGE_RESTRICT: return "REVOKE PRIVILEGE RESTRICT"; case REVOKE_ROLE: return "REVOKE ROLE"; case RECHECK_PRIVILEGES: return "RECHECK PRIVILEGES"; case DROP_SEQUENCE: return "DROP SEQUENCE"; case DROP_UDT: return "DROP TYPE"; case DROP_AGGREGATE: return "DROP DERBY AGGREGATE"; default: if (SanityManager.DEBUG) { SanityManager.THROWASSERT("getActionString() passed an invalid value (" + action + ")"); } // NOTE: This is not internationalized because we should never // reach here. return "UNKNOWN"; } } /** * Count the number of active dependencies, both stored and in memory, * in the system. * * @return int The number of active dependencies in the system. @exception StandardException thrown if something goes wrong */ public int countDependencies() throws StandardException { // Add the stored dependencies. List<?> storedDeps = dd.getAllDependencyDescriptorsList(); int numDependencies = storedDeps.size(); synchronized(this) { Iterator<List<Dependency>> deps = dependents.values().iterator(); Iterator<List<Dependency>> provs = providers.values().iterator(); // Count the in memory dependencies. while (deps.hasNext()) { numDependencies += deps.next().size(); } while (provs.hasNext()) { numDependencies += provs.next().size(); } } return numDependencies; } // // class interface // public BasicDependencyManager(DataDictionary dd) { this.dd = dd; } // // class implementation // /** * Add a new dependency to the specified table if it does not * already exist in that table. * * @return boolean Whether or not the dependency get added. */ private boolean addDependencyToTable( Map<UUID, List<Dependency>> table, UUID key, Dependency dy) { List<Dependency> deps = table.get(key); if (deps == null) { deps = new ArrayList<Dependency>(); deps.add(dy); table.put(key, deps); } else { /* Make sure that we're not adding a duplicate dependency */ UUID provKey = dy.getProvider().getObjectID(); UUID depKey = dy.getDependent().getObjectID(); for (ListIterator<Dependency> depsIT = deps.listIterator(); depsIT.hasNext(); ) { Dependency curDY = depsIT.next(); // // no need to try to add a dependency on system objects // like builtin aggregates which implement // org.apache.derby.agg.Aggregator. those objects have no UUID anyway // if ( curDY.getProvider().getObjectID() == null ) { continue; } if (curDY.getProvider().getObjectID().equals(provKey) && curDY.getDependent().getObjectID().equals(depKey)) { return false; } } deps.add(dy); } if (SanityManager.DEBUG) { if (SanityManager.DEBUG_ON("memoryLeakTrace")) { if (table.size() > 100) System.out.println("memoryLeakTrace:BasicDependencyManager:table " + table.size()); if (deps.size() > 50) System.out.println("memoryLeakTrace:BasicDependencyManager:deps " + deps.size()); } } return true; } /** * removes a dependency for a given provider. assumes * that the dependent removal is being dealt with elsewhere. * Won't assume that the dependent only appears once in the list. */ //@GuardedBy("this") private void clearProviderDependency(UUID p, Dependency d) { List<?> deps = providers.get(p); if (deps == null) return; deps.remove(d); if (deps.isEmpty()) { providers.remove(p); } } /** * Turn a list of DependencyDescriptors into a list of Dependencies. * * @param storedList The List of DependencyDescriptors representing * stored dependencies. * @param providerForList The provider if this list is being created * for a list of dependents. Null otherwise. * * @return List The converted List * * @exception StandardException thrown if something goes wrong */ private List<Dependency> getDependencyDescriptorList( List<DependencyDescriptor> storedList, Provider providerForList) throws StandardException { List<Dependency> retval = new ArrayList<Dependency>(); if (!storedList.isEmpty()) { /* For each DependencyDescriptor, we need to instantiate * object descriptors of the appropriate type for both * the dependent and provider, create a Dependency with * that Dependent and Provider and substitute the Dependency * back into the same place in the List * so that the call gets an enumerations of Dependencys. */ for (DependencyDescriptor depDesc : storedList) { Dependent tempD; Provider tempP; DependableFinder finder = depDesc.getDependentFinder(); tempD = (Dependent) finder.getDependable(dd, depDesc.getUUID()); if (providerForList != null) { // Use the provider being passed in. tempP = providerForList; // Sanity check the object identifiers match. if (SanityManager.DEBUG) { if (!tempP.getObjectID().equals(depDesc.getProviderID())) { SanityManager.THROWASSERT("mismatch providers"); } } } else { finder = depDesc.getProviderFinder(); tempP = (Provider) finder.getDependable(dd, depDesc.getProviderID() ); } retval.add( new BasicDependency( tempD, tempP ) ); } } return retval; } /** * Returns the LanguageConnectionContext to use. * * @param cm Current ContextManager * * @return LanguageConnectionContext The LanguageConnectionContext to use. */ private LanguageConnectionContext getLanguageConnectionContext(ContextManager cm) { // find the language context. return (LanguageConnectionContext) cm.getContext(LanguageConnectionContext.CONTEXT_ID); } /** * Returns a list of all providers that this dependent has (even invalid * ones). Includes all dependency types. * * @param d the dependent * @return A list of providers (possibly empty). * @throws StandardException thrown if something goes wrong */ private List<Provider> getProviders (Dependent d) throws StandardException { List<Provider> provs = new ArrayList<Provider>(); synchronized (this) { List deps = (List) dependents.get(d.getObjectID()); if (deps != null) { Iterator depsIter = deps.iterator(); while (depsIter.hasNext()) { provs.add(((Dependency)depsIter.next()).getProvider()); } } } // If the dependent is persistent, we have to take stored dependencies // into consideration as well. if (d.isPersistent()) { List<Dependency> storedList = getDependencyDescriptorList ( dd.getDependentsDescriptorList( d.getObjectID().toString() ), (Provider) null ); Iterator<Dependency> depIter = storedList.iterator(); while (depIter.hasNext()) { provs.add((depIter.next()).getProvider()); } } return provs; } /** * Returns an enumeration of all dependencies that this * provider is supporting for any dependent at all (even * invalid ones). Includes all dependency types. * * @param p the provider * @return A list of dependents (possibly empty). * @throws StandardException if something goes wrong */ private List<Dependency> getDependents (Provider p) throws StandardException { List<Dependency> deps = new ArrayList<Dependency>(); synchronized (this) { List<Dependency> memDeps = providers.get(p.getObjectID()); if (memDeps != null) { deps.addAll(memDeps); } } // If the provider is persistent, then we have to add providers for // stored dependencies as well. if (p.isPersistent()) { List<Dependency> storedList = getDependencyDescriptorList ( dd.getProvidersDescriptorList( p.getObjectID().toString() ), p ); deps.addAll(storedList); } return deps; } }
apache/phoenix
36,989
phoenix-core-client/src/main/java/org/apache/phoenix/query/QueryConstants.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.query; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.APPEND_ONLY_SCHEMA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARG_POSITION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ARRAY_SIZE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.AUTO_PARTITION_SEQ; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.BASE_COLUMN_COUNT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.BIND_PARAMETERS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.BUFFER_LENGTH; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CACHE_SIZE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CDC_INCLUDE_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CHANGE_DETECTION_ENABLED; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CHAR_OCTET_LENGTH; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CLASS_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CLIENT_IP; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_COUNT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_DEF; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_FAMILY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_QUALIFIER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_QUALIFIER_COUNTER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.COLUMN_SIZE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CURRENT_VALUE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.CYCLE_FLAG; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DATA_TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DATA_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DECIMAL_DIGITS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DEFAULT_COLUMN_FAMILY_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DEFAULT_VALUE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.DISABLE_WAL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ENCODING_SCHEME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.EXCEPTION_TRACE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.EXPLAIN_PLAN; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.EXTERNAL_SCHEMA_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.FUNCTION_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.GLOBAL_SCAN_DETAILS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.GUIDE_POSTS_ROW_COUNT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.GUIDE_POSTS_WIDTH; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.GUIDE_POST_KEY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IMMUTABLE_ROWS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IMMUTABLE_STORAGE_SCHEME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INCREMENT_BY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_DISABLE_TIMESTAMP; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_STATE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.INDEX_WHERE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_ARRAY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_AUTOINCREMENT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_CONSTANT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_NAMESPACE_MAPPED; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_NULLABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_ROW_TIMESTAMP; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_STRICT_TTL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.IS_VIEW_REFERENCED; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.JAR_PATH; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.KEY_SEQ; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LAST_DDL_TIMESTAMP; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LAST_STATS_UPDATE_TIME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LIMIT_REACHED_FLAG; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LINK_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LOGICAL_PARENT_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.LOGICAL_TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MAX_VALUE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MIN_VALUE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.MULTI_TENANT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NEW_METADATA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NEW_PHYS_TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NO_OF_RESULTS_ITERATED; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NULLABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NUM_ARGS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.NUM_PREC_RADIX; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.OLD_METADATA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ORDINAL_POSITION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARENT_PARTITION_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARENT_PARTITION_START_TIME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARTITION_END_KEY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARTITION_END_TIME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARTITION_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARTITION_START_KEY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PARTITION_START_TIME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PHOENIX_TTL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PHOENIX_TTL_HWM; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PHYSICAL_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PHYSICAL_TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.PK_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.QUERY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.QUERY_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.QUERY_STATUS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.REF_GENERATION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.REMARKS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.RETURN_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.ROW_KEY_MATCHER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SALT_BUCKETS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SCAN_METRICS_JSON; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SCHEMA_VERSION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SCOPE_CATALOG; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SCOPE_SCHEMA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SCOPE_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SELF_REFERENCING_COL_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SEQUENCE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SEQUENCE_SCHEMA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SORT_ORDER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SOURCE_DATA_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SQL_DATA_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SQL_DATETIME_SUB; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.START_TIME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.START_WITH; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.STORE_NULLS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.STREAMING_TOPIC_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.STREAM_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.STREAM_STATUS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.STREAM_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_SCHEMA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CATALOG_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CDC_STREAM_STATUS_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CDC_STREAM_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_CHILD_LINK_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_FUNCTION_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_LOG_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_MUTEX_TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_STATS_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_TASK_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.SYSTEM_TRANSFORM_TABLE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SEQ_NUM; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_DATA; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_END_TS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_PRIORITY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_STATUS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_TABLE_TTL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_TS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TASK_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TENANT_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSACTIONAL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSACTION_PROVIDER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_FUNCTION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_JOB_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_LAST_STATE_TS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_RETRY_COUNT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_START_TS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_STATUS; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_TABLE_TTL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TRANSFORM_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TTL; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TTL_FOR_MUTEX; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE_NAME; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TYPE_SEQUENCE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.UPDATE_CACHE_FREQUENCY; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.USER; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.USE_STATS_FOR_PARALLELIZATION; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_CONSTANT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_INDEX_ID; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_INDEX_ID_DATA_TYPE; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_STATEMENT; import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.VIEW_TYPE; import java.math.BigDecimal; import java.nio.charset.StandardCharsets; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.util.Bytes; import org.apache.phoenix.coprocessorclient.MetaDataProtocol; import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr; import org.apache.phoenix.monitoring.MetricType; import org.apache.phoenix.schema.PName; import org.apache.phoenix.schema.PNameFactory; import org.apache.phoenix.schema.PTable.ImmutableStorageScheme; import org.apache.phoenix.schema.PTable.QualifierEncodingScheme; import org.apache.phoenix.schema.SortOrder; import org.apache.phoenix.schema.TableProperty; /** * Constants used during querying * @since 0.1 */ public interface QueryConstants { String NAME_SEPARATOR = "."; String NAMESPACE_SEPARATOR = ":"; String CHILD_VIEW_INDEX_NAME_SEPARATOR = "#"; byte[] NAMESPACE_SEPARATOR_BYTES = Bytes.toBytes(NAMESPACE_SEPARATOR); byte NAMESPACE_SEPARATOR_BYTE = NAMESPACE_SEPARATOR_BYTES[0]; String NAME_SEPARATOR_REGEX = "\\" + NAME_SEPARATOR; byte[] NAME_SEPARATOR_BYTES = Bytes.toBytes(NAME_SEPARATOR); byte NAME_SEPARATOR_BYTE = NAME_SEPARATOR_BYTES[0]; String NULL_DISPLAY_TEXT = "<null>"; long UNSET_TIMESTAMP = -1; enum JoinType { INNER, LEFT_OUTER } String SYSTEM_SCHEMA_NAME = "SYSTEM"; byte[] SYSTEM_SCHEMA_NAME_BYTES = Bytes.toBytes(SYSTEM_SCHEMA_NAME); String OFFSET_ROW_KEY = "_OFFSET_"; byte[] OFFSET_ROW_KEY_BYTES = Bytes.toBytes(OFFSET_ROW_KEY); String GROUPED_AGGREGATOR_VALUE = "_GA_"; byte[] GROUPED_AGGREGATOR_VALUE_BYTES = Bytes.toBytes(GROUPED_AGGREGATOR_VALUE); long AGG_TIMESTAMP = HConstants.LATEST_TIMESTAMP; /** * Key used for a single row aggregation where there is no group by */ byte[] UNGROUPED_AGG_ROW_KEY = Bytes.toBytes("a"); /** BEGIN Set of reserved column qualifiers **/ String RESERVED_COLUMN_FAMILY = "_v"; byte[] RESERVED_COLUMN_FAMILY_BYTES = Bytes.toBytes(RESERVED_COLUMN_FAMILY); byte[] VALUE_COLUMN_FAMILY = RESERVED_COLUMN_FAMILY_BYTES; byte[] VALUE_COLUMN_QUALIFIER = QualifierEncodingScheme.FOUR_BYTE_QUALIFIERS.encode(1); byte[] ARRAY_VALUE_COLUMN_FAMILY = RESERVED_COLUMN_FAMILY_BYTES; byte[] ARRAY_VALUE_COLUMN_QUALIFIER = QualifierEncodingScheme.FOUR_BYTE_QUALIFIERS.encode(2); PName SINGLE_COLUMN_NAME = PNameFactory.newNormalizedName("s"); PName SINGLE_COLUMN_FAMILY_NAME = PNameFactory.newNormalizedName("s"); byte[] SINGLE_COLUMN = SINGLE_COLUMN_NAME.getBytes(); byte[] SINGLE_COLUMN_FAMILY = SINGLE_COLUMN_FAMILY_NAME.getBytes(); /** END Set of reserved column qualifiers **/ byte[] TRUE = new byte[] { 1 }; /** * The priority property for an hbase table. This is already in HTD, but older versions of HBase * do not have this, so we re-defined it here. Once Phoenix is HBase-1.3+, we can remote. */ String PRIORITY = "PRIORITY"; /** * Separator used between variable length keys for a composite key. Variable length data types may * not use this byte value. */ byte SEPARATOR_BYTE = (byte) 0; byte[] SEPARATOR_BYTE_ARRAY = new byte[] { SEPARATOR_BYTE }; byte DESC_SEPARATOR_BYTE = SortOrder.invert(SEPARATOR_BYTE); byte[] DESC_SEPARATOR_BYTE_ARRAY = new byte[] { DESC_SEPARATOR_BYTE }; byte[] VARBINARY_ENCODED_SEPARATOR_BYTES = new byte[] { 0x00, 0x01 }; byte[] DESC_VARBINARY_ENCODED_SEPARATOR_BYTES = SortOrder.invert(VARBINARY_ENCODED_SEPARATOR_BYTES, 0, 2); byte[] ROW_KEY_VAL_ACCESSOR_NEW_FIELDS_SEPARATOR = Bytes.toBytes("_ROW_KEY_VALUE_ACCESSOR_ENCODED_SEPARATOR_"); String DEFAULT_COPROCESS_JAR_NAME = "phoenix-[version]-server.jar"; int MILLIS_IN_DAY = 1000 * 60 * 60 * 24; String EMPTY_COLUMN_NAME = "_0"; // For transactional tables, the value of our empty key value can no longer be empty // since empty values are treated as column delete markers. byte[] EMPTY_COLUMN_BYTES = Bytes.toBytes(EMPTY_COLUMN_NAME); ImmutableBytesPtr EMPTY_COLUMN_BYTES_PTR = new ImmutableBytesPtr(EMPTY_COLUMN_BYTES); Integer ENCODED_EMPTY_COLUMN_NAME = 0; byte[] ENCODED_EMPTY_COLUMN_BYTES = QualifierEncodingScheme.FOUR_BYTE_QUALIFIERS.encode(ENCODED_EMPTY_COLUMN_NAME); String EMPTY_COLUMN_VALUE = "x"; byte[] EMPTY_COLUMN_VALUE_BYTES = Bytes.toBytes(EMPTY_COLUMN_VALUE); ImmutableBytesPtr EMPTY_COLUMN_VALUE_BYTES_PTR = new ImmutableBytesPtr(EMPTY_COLUMN_VALUE_BYTES); byte[] ENCODED_EMPTY_COLUMN_VALUE_BYTES = Bytes.toBytes(EMPTY_COLUMN_VALUE); String DEFAULT_COLUMN_FAMILY = "0"; byte[] DEFAULT_COLUMN_FAMILY_BYTES = Bytes.toBytes(DEFAULT_COLUMN_FAMILY); ImmutableBytesPtr DEFAULT_COLUMN_FAMILY_BYTES_PTR = new ImmutableBytesPtr(DEFAULT_COLUMN_FAMILY_BYTES); byte VERIFIED_BYTE = 1; byte UNVERIFIED_BYTE = 2; byte[] VERIFIED_BYTES = new byte[] { VERIFIED_BYTE }; byte[] UNVERIFIED_BYTES = new byte[] { UNVERIFIED_BYTE }; ImmutableBytesPtr VERIFIED_BYTES_PTR = new ImmutableBytesPtr(VERIFIED_BYTES); ImmutableBytesPtr UNVERIFIED_BYTES_PTR = new ImmutableBytesPtr(UNVERIFIED_BYTES); // column qualifier of the single key value used to store all columns for the // COLUMNS_STORED_IN_SINGLE_CELL storage scheme String SINGLE_KEYVALUE_COLUMN_QUALIFIER = "1"; byte[] SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES = Bytes.toBytes(SINGLE_KEYVALUE_COLUMN_QUALIFIER); ImmutableBytesPtr SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES_PTR = new ImmutableBytesPtr(SINGLE_KEYVALUE_COLUMN_QUALIFIER_BYTES); String LOCAL_INDEX_COLUMN_FAMILY_PREFIX = "L#"; byte[] LOCAL_INDEX_COLUMN_FAMILY_PREFIX_BYTES = Bytes.toBytes(LOCAL_INDEX_COLUMN_FAMILY_PREFIX); String DEFAULT_LOCAL_INDEX_COLUMN_FAMILY = LOCAL_INDEX_COLUMN_FAMILY_PREFIX + DEFAULT_COLUMN_FAMILY; byte[] DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES = Bytes.toBytes(DEFAULT_LOCAL_INDEX_COLUMN_FAMILY); ImmutableBytesPtr DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES_PTR = new ImmutableBytesPtr(DEFAULT_LOCAL_INDEX_COLUMN_FAMILY_BYTES); String ALL_FAMILY_PROPERTIES_KEY = ""; String SYSTEM_TABLE_PK_NAME = "pk"; double MILLIS_TO_NANOS_CONVERTOR = Math.pow(10, 6); BigDecimal BD_MILLIS_NANOS_CONVERSION = BigDecimal.valueOf(MILLIS_TO_NANOS_CONVERTOR); BigDecimal BD_MILLIS_IN_DAY = BigDecimal.valueOf(QueryConstants.MILLIS_IN_DAY); int MAX_ALLOWED_NANOS = 999999999; int DIVERGED_VIEW_BASE_COLUMN_COUNT = -100; int BASE_TABLE_BASE_COLUMN_COUNT = -1; // String constants for the server side class names, so that we don't need the server jar // on the client side final String METADATA_SPLIT_POLICY_CLASSNAME = "org.apache.phoenix.schema.MetaDataSplitPolicy"; final String SYSTEM_STATS_SPLIT_POLICY_CLASSNAME = "org.apache.phoenix.schema.SystemStatsSplitPolicy"; final String SYSTEM_FUNCTION_SPLIT_POLICY_CLASSNAME = "org.apache.phoenix.schema.SystemFunctionSplitPolicy"; final String SYSTEM_TASK_SPLIT_POLICY_CLASSNAME = "org.apache.phoenix.schema.SystemTaskSplitPolicy"; final String INDEX_REGION_SPLIT_POLICY_CLASSNAME = "org.apache.phoenix.hbase.index.IndexRegionSplitPolicy"; final String GLOBAL_INDEX_CHECKER_CLASSNAME = "org.apache.phoenix.index.GlobalIndexChecker"; final String INDEX_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.hbase.index.IndexRegionObserver"; final String PHOENIX_TRANSACTIONAL_INDEXER_CLASSNAME = "org.apache.phoenix.index.PhoenixTransactionalIndexer"; final String LOCAL_INDEX_SPLITTER_CLASSNAME = "org.apache.hadoop.hbase.regionserver.LocalIndexSplitter"; final String INDEXER_CLASSNAME = "org.apache.phoenix.hbase.index.Indexer"; final String SCAN_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.ScanRegionObserver"; final String UNGROUPED_AGGREGATE_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver"; final String GROUPED_AGGREGATE_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver"; final String SERVER_CACHING_ENDPOINT_IMPL_CLASSNAME = "org.apache.phoenix.coprocessor.ServerCachingEndpointImpl"; final String MULTI_ROW_MUTATION_ENDPOINT_CLASSNAME = "org.apache.hadoop.hbase.coprocessor.MultiRowMutationEndpoint"; final String INDEX_HALF_STORE_FILE_READER_GENERATOR_CLASSNAME = "org.apache.hadoop.hbase.regionserver.IndexHalfStoreFileReaderGenerator"; final String META_DATA_ENDPOINT_IMPL_CLASSNAME = "org.apache.phoenix.coprocessor.MetaDataEndpointImpl"; final String META_DATA_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.MetaDataRegionObserver"; final String SEQUENCE_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.SequenceRegionObserver"; final String TASK_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.TaskRegionObserver"; final String TASK_META_DATA_ENDPOINT_CLASSNAME = "org.apache.phoenix.coprocessor.TaskMetaDataEndpoint"; final String CHILD_LINK_META_DATA_ENDPOINT_CLASSNAME = "org.apache.phoenix.coprocessor.ChildLinkMetaDataEndpoint"; final String PHOENIX_TTL_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.PhoenixTTLRegionObserver"; final String SYSTEM_CATALOG_REGION_OBSERVER_CLASSNAME = "org.apache.phoenix.coprocessor.SystemCatalogRegionObserver"; // custom TagType byte VIEW_MODIFIED_PROPERTY_TAG_TYPE = (byte) 70; String CDC_JSON_COL_NAME = "CDC JSON"; String CDC_EVENT_TYPE = "event_type"; String CDC_PRE_IMAGE = "pre_image"; String CDC_POST_IMAGE = "post_image"; String CDC_CHANGE_IMAGE = "change_image"; String CDC_UPSERT_EVENT_TYPE = "upsert"; String CDC_DELETE_EVENT_TYPE = "delete"; String SPLITS_FILE = "SPLITS_FILE"; String CDC_TTL_DELETE_EVENT_TYPE = "ttl_delete"; String CDC_IMAGE_CQ = "_CDC_IMG_"; byte[] CDC_IMAGE_CQ_BYTES = Bytes.toBytes(CDC_IMAGE_CQ); /** * We mark counter values 0 to 10 as reserved. Value 0 is used by * {@link #ENCODED_EMPTY_COLUMN_NAME}. Values 1-10 are reserved for special column qualifiers * returned by Phoenix co-processors. */ int ENCODED_CQ_COUNTER_INITIAL_VALUE = 11; String CREATE_TABLE_METADATA = // Do not use IF NOT EXISTS as we sometimes catch the TableAlreadyExists // exception and add columns to the SYSTEM.TABLE dynamically. "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CATALOG_TABLE + "\"(\n" + // PK columns TENANT_ID + " VARCHAR NULL," + TABLE_SCHEM + " VARCHAR NULL," + TABLE_NAME + " VARCHAR NOT NULL," + COLUMN_NAME + " VARCHAR NULL," + // null for table row COLUMN_FAMILY + " VARCHAR NULL," + // using for CF to uniqueness for columns // Table metadata (will be null for column rows) TABLE_SEQ_NUM + " BIGINT," + TABLE_TYPE + " CHAR(1)," + PK_NAME + " VARCHAR," + COLUMN_COUNT + " INTEGER," + SALT_BUCKETS + " INTEGER," + DATA_TABLE_NAME + " VARCHAR," + INDEX_STATE + " CHAR(1),\n" + IMMUTABLE_ROWS + " BOOLEAN,\n" + VIEW_STATEMENT + " VARCHAR,\n" + DEFAULT_COLUMN_FAMILY_NAME + " VARCHAR,\n" + DISABLE_WAL + " BOOLEAN,\n" + MULTI_TENANT + " BOOLEAN,\n" + VIEW_TYPE + " UNSIGNED_TINYINT,\n" + VIEW_INDEX_ID + " BIGINT,\n" + VIEW_INDEX_ID_DATA_TYPE + " INTEGER,\n" + PHOENIX_TTL + " BIGINT,\n" + PHOENIX_TTL_HWM + " BIGINT,\n" + LAST_DDL_TIMESTAMP + " BIGINT, \n" + CHANGE_DETECTION_ENABLED + " BOOLEAN, \n" + SCHEMA_VERSION + " VARCHAR, \n" + EXTERNAL_SCHEMA_ID + " VARCHAR, \n" + STREAMING_TOPIC_NAME + " VARCHAR, \n" + INDEX_WHERE + " VARCHAR, \n" + CDC_INCLUDE_TABLE + " VARCHAR, \n" + TTL + " VARCHAR, \n" + ROW_KEY_MATCHER + " VARBINARY_ENCODED, \n" + IS_STRICT_TTL + " BOOLEAN, \n" + // Column metadata (will be null for table row) DATA_TYPE + " INTEGER," + COLUMN_SIZE + " INTEGER," + DECIMAL_DIGITS + " INTEGER," + NULLABLE + " INTEGER," + ORDINAL_POSITION + " INTEGER," + SORT_ORDER + " INTEGER," + ARRAY_SIZE + " INTEGER,\n" + VIEW_CONSTANT + " VARBINARY,\n" + IS_VIEW_REFERENCED + " BOOLEAN,\n" + KEY_SEQ + " SMALLINT,\n" + // Link metadata (only set on rows linking table to index or view) LINK_TYPE + " UNSIGNED_TINYINT,\n" + // Unused TYPE_NAME + " VARCHAR," + REMARKS + " VARCHAR," + SELF_REFERENCING_COL_NAME + " VARCHAR," + REF_GENERATION + " VARCHAR," + BUFFER_LENGTH + " INTEGER," + NUM_PREC_RADIX + " INTEGER," + COLUMN_DEF + " VARCHAR," + SQL_DATA_TYPE + " INTEGER," + SQL_DATETIME_SUB + " INTEGER," + CHAR_OCTET_LENGTH + " INTEGER," + IS_NULLABLE + " VARCHAR," + SCOPE_CATALOG + " VARCHAR," + SCOPE_SCHEMA + " VARCHAR," + SCOPE_TABLE + " VARCHAR," + SOURCE_DATA_TYPE + " SMALLINT," + IS_AUTOINCREMENT + " VARCHAR," + INDEX_TYPE + " UNSIGNED_TINYINT," + INDEX_DISABLE_TIMESTAMP + " BIGINT," + STORE_NULLS + " BOOLEAN," + BASE_COLUMN_COUNT + " INTEGER," + // Column metadata (will be null for table row) IS_ROW_TIMESTAMP + " BOOLEAN, " + TRANSACTIONAL + " BOOLEAN," + UPDATE_CACHE_FREQUENCY + " BIGINT," + IS_NAMESPACE_MAPPED + " BOOLEAN," + AUTO_PARTITION_SEQ + " VARCHAR," + APPEND_ONLY_SCHEMA + " BOOLEAN," + GUIDE_POSTS_WIDTH + " BIGINT," + COLUMN_QUALIFIER + " VARBINARY," + IMMUTABLE_STORAGE_SCHEME + " TINYINT, " + ENCODING_SCHEME + " TINYINT, " + COLUMN_QUALIFIER_COUNTER + " INTEGER, " + USE_STATS_FOR_PARALLELIZATION + " BOOLEAN, " + TRANSACTION_PROVIDER + " TINYINT, " + PHYSICAL_TABLE_NAME + " VARCHAR," + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + "," + TABLE_SCHEM + "," + TABLE_NAME + "," + COLUMN_NAME + "," + COLUMN_FAMILY + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + // Install split policy to prevent a tenant's metadata from being split across regions. TableDescriptorBuilder.SPLIT_POLICY + "='" + METADATA_SPLIT_POLICY_CLASSNAME + "',\n" + TRANSACTIONAL + "=" + Boolean.FALSE; String CREATE_STATS_TABLE_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_STATS_TABLE + "\"(\n" + // PK columns PHYSICAL_NAME + " VARCHAR NOT NULL," + COLUMN_FAMILY + " VARCHAR," + GUIDE_POST_KEY + " VARBINARY," + GUIDE_POSTS_WIDTH + " BIGINT," + LAST_STATS_UPDATE_TIME + " DATE, " + GUIDE_POSTS_ROW_COUNT + " BIGINT, " + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + PHYSICAL_NAME + "," + COLUMN_FAMILY + "," + GUIDE_POST_KEY + "))\n" + // Install split policy to prevent a physical table's stats from being split // across regions. TableDescriptorBuilder.SPLIT_POLICY + "='" + SYSTEM_STATS_SPLIT_POLICY_CLASSNAME + "',\n" + TRANSACTIONAL + "=" + Boolean.FALSE; String CREATE_SEQUENCE_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + TYPE_SEQUENCE + "\"(\n" + TENANT_ID + " VARCHAR NULL," + SEQUENCE_SCHEMA + " VARCHAR NULL, \n" + SEQUENCE_NAME + " VARCHAR NOT NULL, \n" + START_WITH + " BIGINT, \n" + CURRENT_VALUE + " BIGINT, \n" + INCREMENT_BY + " BIGINT, \n" + CACHE_SIZE + " BIGINT, \n" + // the following three columns were added in 3.1/4.1 MIN_VALUE + " BIGINT, \n" + MAX_VALUE + " BIGINT, \n" + CYCLE_FLAG + " BOOLEAN, \n" + LIMIT_REACHED_FLAG + " BOOLEAN \n" + " CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + "," + SEQUENCE_SCHEMA + "," + SEQUENCE_NAME + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE; String UPGRADE_TABLE_SNAPSHOT_PREFIX = "_UPGRADING_TABLE_"; String CREATE_FUNCTION_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_FUNCTION_TABLE + "\"(\n" + // Pk columns TENANT_ID + " VARCHAR NULL," + FUNCTION_NAME + " VARCHAR NOT NULL, \n" + NUM_ARGS + " INTEGER, \n" + // Function metadata (will be null for argument row) CLASS_NAME + " VARCHAR, \n" + JAR_PATH + " VARCHAR, \n" + RETURN_TYPE + " VARCHAR, \n" + // Argument metadata (will be null for function row) TYPE + " VARCHAR, \n" + ARG_POSITION + " VARBINARY, \n" + IS_ARRAY + " BOOLEAN, \n" + IS_CONSTANT + " BOOLEAN, \n" + DEFAULT_VALUE + " VARCHAR, \n" + MIN_VALUE + " VARCHAR, \n" + MAX_VALUE + " VARCHAR, \n" + " CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + ", " + FUNCTION_NAME + ", " + TYPE + ", " + ARG_POSITION + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + // Install split policy to prevent a tenant's metadata from being split across regions. TableDescriptorBuilder.SPLIT_POLICY + "='" + SYSTEM_FUNCTION_SPLIT_POLICY_CLASSNAME + "',\n" + TRANSACTIONAL + "=" + Boolean.FALSE; String CREATE_LOG_METADATA = "CREATE IMMUTABLE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_LOG_TABLE + "\"(\n" + // Pk columns START_TIME + " DECIMAL, \n" + TABLE_NAME + " VARCHAR, \n" + QUERY_ID + " VARCHAR NOT NULL,\n" + TENANT_ID + " VARCHAR ," + USER + " VARCHAR , \n" + CLIENT_IP + " VARCHAR, \n" + // Function metadata (will be null for argument row) QUERY + " VARCHAR, \n" + EXPLAIN_PLAN + " VARCHAR, \n" + // Argument metadata (will be null for function row) NO_OF_RESULTS_ITERATED + " BIGINT, \n" + QUERY_STATUS + " VARCHAR, \n" + EXCEPTION_TRACE + " VARCHAR, \n" + GLOBAL_SCAN_DETAILS + " VARCHAR, \n" + BIND_PARAMETERS + " VARCHAR, \n" + SCAN_METRICS_JSON + " VARCHAR, \n" + MetricType.getMetricColumnsDetails() + "\n" + " CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (START_TIME, TABLE_NAME, QUERY_ID))\n" + SALT_BUCKETS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + ColumnFamilyDescriptorBuilder.TTL + "=" + MetaDataProtocol.DEFAULT_LOG_TTL + ",\n" + TableProperty.IMMUTABLE_STORAGE_SCHEME.toString() + " = " + ImmutableStorageScheme.SINGLE_CELL_ARRAY_WITH_OFFSETS.name() + ",\n" + TableProperty.COLUMN_ENCODED_BYTES.toString() + " = 1"; byte[] OFFSET_FAMILY = "f_offset".getBytes(StandardCharsets.UTF_8); byte[] OFFSET_COLUMN = "c_offset".getBytes(StandardCharsets.UTF_8); String LAST_SCAN = "LAST_SCAN"; String HASH_JOIN_CACHE_RETRIES = "hashjoin.client.retries.number"; int DEFAULT_HASH_JOIN_CACHE_RETRIES = 5; // Links from parent to child views are stored in a separate table for // scalability String CREATE_CHILD_LINK_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CHILD_LINK_TABLE + "\"(\n" + // PK columns TENANT_ID + " VARCHAR NULL," + TABLE_SCHEM + " VARCHAR NULL," + TABLE_NAME + " VARCHAR NOT NULL," + COLUMN_NAME + " VARCHAR NULL," + COLUMN_FAMILY + " VARCHAR NULL," + LINK_TYPE + " UNSIGNED_TINYINT,\n" + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + "," + TABLE_SCHEM + "," + TABLE_NAME + "," + COLUMN_NAME + "," + COLUMN_FAMILY + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE; String CREATE_MUTEX_METADATA = "CREATE IMMUTABLE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_MUTEX_TABLE_NAME + "\"(\n" + // Pk columns TENANT_ID + " VARCHAR NULL," + TABLE_SCHEM + " VARCHAR NULL," + TABLE_NAME + " VARCHAR NOT NULL," + COLUMN_NAME + " VARCHAR NULL," + // null for table row COLUMN_FAMILY + " VARCHAR NULL " + // using for CF to uniqueness for columns "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + "," + TABLE_SCHEM + "," + TABLE_NAME + "," + COLUMN_NAME + "," + COLUMN_FAMILY + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + ColumnFamilyDescriptorBuilder.TTL + "=" + TTL_FOR_MUTEX; String CREATE_TASK_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_TASK_TABLE + "\"(\n" + // PK columns TASK_TYPE + " UNSIGNED_TINYINT NOT NULL," + TASK_TS + " TIMESTAMP NOT NULL," + TENANT_ID + " VARCHAR NULL," + TABLE_SCHEM + " VARCHAR NULL," + TABLE_NAME + " VARCHAR NOT NULL,\n" + // Non-PK columns TASK_STATUS + " VARCHAR NULL," + TASK_END_TS + " TIMESTAMP NULL," + TASK_PRIORITY + " UNSIGNED_TINYINT NULL," + TASK_DATA + " VARCHAR NULL,\n" + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TASK_TYPE + "," + TASK_TS + " ROW_TIMESTAMP," + TENANT_ID + "," + TABLE_SCHEM + "," + TABLE_NAME + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + ColumnFamilyDescriptorBuilder.TTL + "=" + TASK_TABLE_TTL + ",\n" + // 10 days TableDescriptorBuilder.SPLIT_POLICY + "='" + SYSTEM_TASK_SPLIT_POLICY_CLASSNAME + "',\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + STORE_NULLS + "=" + Boolean.TRUE; String CREATE_TRANSFORM_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_TRANSFORM_TABLE + "\"(\n" + // PK columns TENANT_ID + " VARCHAR NULL,\n" + TABLE_SCHEM + " VARCHAR NULL," + LOGICAL_TABLE_NAME + " VARCHAR NOT NULL,\n" + // Non-PK columns NEW_PHYS_TABLE_NAME + " VARCHAR,\n" + TRANSFORM_TYPE + " INTEGER," + LOGICAL_PARENT_NAME + " VARCHAR NULL,\n" + // If this is an index, Logical_Parent_Name is the data table name. // Index name is not unique. TRANSFORM_STATUS + " VARCHAR NULL," + TRANSFORM_JOB_ID + " VARCHAR NULL," + TRANSFORM_RETRY_COUNT + " INTEGER NULL," + TRANSFORM_START_TS + " TIMESTAMP NULL," + TRANSFORM_LAST_STATE_TS + " TIMESTAMP NULL," + OLD_METADATA + " VARBINARY NULL,\n" + NEW_METADATA + " VARCHAR NULL,\n" + TRANSFORM_FUNCTION + " VARCHAR NULL\n" + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TENANT_ID + "," + TABLE_SCHEM + "," + LOGICAL_TABLE_NAME + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + ColumnFamilyDescriptorBuilder.TTL + "=" + TRANSFORM_TABLE_TTL + ",\n" + // 90 days TableDescriptorBuilder.SPLIT_POLICY + "='" + SYSTEM_TASK_SPLIT_POLICY_CLASSNAME + "',\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + STORE_NULLS + "=" + Boolean.TRUE; String CREATE_CDC_STREAM_STATUS_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CDC_STREAM_STATUS_TABLE + "\"(\n" + // PK columns TABLE_NAME + " VARCHAR NOT NULL," + STREAM_NAME + " VARCHAR NOT NULL," + // Non-PK columns STREAM_STATUS + " VARCHAR,\n" + STREAM_TYPE + " VARCHAR,\n" + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TABLE_NAME + "," + STREAM_NAME + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + UPDATE_CACHE_FREQUENCY + "=" + "7200000"; String CREATE_CDC_STREAM_METADATA = "CREATE TABLE " + SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_CDC_STREAM_TABLE + "\"(\n" + // PK columns TABLE_NAME + " VARCHAR NOT NULL," + STREAM_NAME + " VARCHAR NOT NULL," + PARTITION_ID + " VARCHAR NOT NULL," + PARENT_PARTITION_ID + " VARCHAR," + // Non-PK columns PARTITION_START_TIME + " BIGINT," + PARTITION_END_TIME + " BIGINT," + PARTITION_START_KEY + " VARBINARY_ENCODED," + PARTITION_END_KEY + " VARBINARY_ENCODED," + PARENT_PARTITION_START_TIME + " BIGINT,\n" + "CONSTRAINT " + SYSTEM_TABLE_PK_NAME + " PRIMARY KEY (" + TABLE_NAME + "," + STREAM_NAME + "," + PARTITION_ID + "," + PARENT_PARTITION_ID + "))\n" + HConstants.VERSIONS + "=%s,\n" + ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS + "=%s,\n" + TRANSACTIONAL + "=" + Boolean.FALSE + ",\n" + UPDATE_CACHE_FREQUENCY + "=" + "7200000"; }
apache/hop
37,304
ui/src/main/java/org/apache/hop/ui/hopgui/perspective/execution/WorkflowExecutionViewer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.hop.ui.hopgui.perspective.execution; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hop.core.Const; import org.apache.hop.core.Props; import org.apache.hop.core.Result; import org.apache.hop.core.RowMetaAndData; import org.apache.hop.core.gui.AreaOwner; import org.apache.hop.core.gui.DPoint; import org.apache.hop.core.gui.IGc; import org.apache.hop.core.gui.Point; import org.apache.hop.core.gui.plugin.GuiPlugin; import org.apache.hop.core.gui.plugin.GuiRegistry; import org.apache.hop.core.gui.plugin.key.GuiKeyboardShortcut; import org.apache.hop.core.gui.plugin.key.GuiOsxKeyboardShortcut; import org.apache.hop.core.gui.plugin.tab.GuiTabItem; import org.apache.hop.core.gui.plugin.toolbar.GuiToolbarElement; import org.apache.hop.core.gui.plugin.toolbar.GuiToolbarElementType; import org.apache.hop.core.logging.LogChannel; import org.apache.hop.core.metadata.SerializableMetadataProvider; import org.apache.hop.core.row.IRowMeta; import org.apache.hop.core.row.IValueMeta; import org.apache.hop.core.row.RowBuffer; import org.apache.hop.core.row.RowMetaBuilder; import org.apache.hop.core.util.Utils; import org.apache.hop.core.variables.IVariables; import org.apache.hop.core.variables.Variables; import org.apache.hop.core.xml.XmlHandler; import org.apache.hop.execution.Execution; import org.apache.hop.execution.ExecutionData; import org.apache.hop.execution.ExecutionDataBuilder; import org.apache.hop.execution.ExecutionDataSetMeta; import org.apache.hop.execution.ExecutionInfoLocation; import org.apache.hop.execution.ExecutionState; import org.apache.hop.execution.ExecutionType; import org.apache.hop.execution.IExecutionInfoLocation; import org.apache.hop.i18n.BaseMessages; import org.apache.hop.pipeline.Pipeline; import org.apache.hop.pipeline.PipelinePainter; import org.apache.hop.ui.core.PropsUi; import org.apache.hop.ui.core.dialog.ErrorDialog; import org.apache.hop.ui.core.dialog.SelectRowDialog; import org.apache.hop.ui.core.gui.GuiResource; import org.apache.hop.ui.core.gui.GuiToolbarWidgets; import org.apache.hop.ui.core.widget.ColumnInfo; import org.apache.hop.ui.core.widget.TableView; import org.apache.hop.ui.hopgui.CanvasFacade; import org.apache.hop.ui.hopgui.CanvasListener; import org.apache.hop.ui.hopgui.HopGui; import org.apache.hop.ui.hopgui.file.workflow.HopGuiWorkflowGraph; import org.apache.hop.ui.hopgui.file.workflow.HopWorkflowFileType; import org.apache.hop.ui.hopgui.perspective.TabItemHandler; import org.apache.hop.ui.hopgui.perspective.dataorch.HopDataOrchestrationPerspective; import org.apache.hop.ui.hopgui.shared.BaseExecutionViewer; import org.apache.hop.ui.hopgui.shared.SwtGc; import org.apache.hop.ui.util.EnvironmentUtils; import org.apache.hop.workflow.ActionResult; import org.apache.hop.workflow.WorkflowMeta; import org.apache.hop.workflow.WorkflowPainter; import org.apache.hop.workflow.action.ActionMeta; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabFolder; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseListener; import org.eclipse.swt.events.PaintEvent; import org.eclipse.swt.events.PaintListener; import org.eclipse.swt.graphics.GC; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Canvas; import org.eclipse.swt.widgets.Combo; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.eclipse.swt.widgets.ToolBar; import org.w3c.dom.Node; @GuiPlugin public class WorkflowExecutionViewer extends BaseExecutionViewer implements IExecutionViewer, PaintListener, MouseListener { private static final Class<?> PKG = WorkflowExecutionViewer.class; public static final String GUI_PLUGIN_TOOLBAR_PARENT_ID = "WorkflowExecutionViewer-Toolbar"; public static final String TOOLBAR_ITEM_REFRESH = "WorkflowExecutionViewer-Toolbar-10100-Refresh"; public static final String TOOLBAR_ITEM_ZOOM_LEVEL = "WorkflowExecutionViewer-ToolBar-10500-Zoom-Level"; public static final String TOOLBAR_ITEM_ZOOM_FIT_TO_SCREEN = "WorkflowExecutionViewer-ToolBar-10600-Zoom-Fit-To-Screen"; public static final String TOOLBAR_ITEM_TO_EDITOR = "WorkflowExecutionViewer-Toolbar-11100-GoToEditor"; public static final String TOOLBAR_ITEM_DRILL_DOWN = "WorkflowExecutionViewer-Toolbar-11200-DrillDown"; public static final String TOOLBAR_ITEM_GO_UP = "WorkflowExecutionViewer-Toolbar-11300-GoUp"; public static final String TOOLBAR_ITEM_VIEW_EXECUTOR = "WorkflowExecutionViewer-Toolbar-12000-ViewExecutor"; public static final String TOOLBAR_ITEM_VIEW_METADATA = "WorkflowExecutionViewer-Toolbar-12100-ViewMetadata"; public static final String WORKFLOW_EXECUTION_VIEWER_TABS = "WorkflowExecutionViewer.Tabs.ID"; public static final String CONST_ERROR = "Error"; protected final WorkflowMeta workflowMeta; protected ActionMeta selectedAction; protected ExecutionData selectedExecutionData; private CTabItem infoTab; private TableView infoView; private CTabItem logTab; private CTabItem dataTab; private SashForm dataSash; private org.eclipse.swt.widgets.List dataList; private TableView dataView; private Map<String, List<ExecutionData>> actionExecutions; public WorkflowExecutionViewer( Composite parent, HopGui hopGui, WorkflowMeta workflowMeta, String locationName, ExecutionPerspective perspective, Execution execution, ExecutionState executionState) { super(parent, hopGui, perspective, locationName, execution, executionState); this.workflowMeta = workflowMeta; actionExecutions = new HashMap<>(); // Calculate the pipeline size only once since the metadata is read-only // this.maximum = workflowMeta.getMaximum(); addWidgets(); } /** Add the widgets in the execution perspective parent tab folder */ public void addWidgets() { setLayout(new FormLayout()); // A toolbar at the top // toolBar = new ToolBar(this, SWT.WRAP | SWT.LEFT | SWT.HORIZONTAL); toolBarWidgets = new GuiToolbarWidgets(); toolBarWidgets.registerGuiPluginObject(this); toolBarWidgets.createToolbarWidgets(toolBar, GUI_PLUGIN_TOOLBAR_PARENT_ID); FormData layoutData = new FormData(); layoutData.left = new FormAttachment(0, 0); layoutData.top = new FormAttachment(0, 0); layoutData.right = new FormAttachment(100, 0); toolBar.setLayoutData(layoutData); toolBar.pack(); PropsUi.setLook(toolBar, Props.WIDGET_STYLE_TOOLBAR); // Below the toolbar we have a horizontal splitter: Canvas above and Execution tabs below // sash = new SashForm(this, SWT.VERTICAL); FormData fdSash = new FormData(); fdSash.left = new FormAttachment(0, 0); fdSash.top = new FormAttachment(toolBar, 0); fdSash.right = new FormAttachment(100, 0); fdSash.bottom = new FormAttachment(100, 0); sash.setLayoutData(fdSash); // In this sash we have a canvas at the top and a tab folder with information at the bottom // // The canvas at the top // canvas = new Canvas(sash, SWT.NO_BACKGROUND | SWT.BORDER); Listener listener = CanvasListener.getInstance(); canvas.addListener(SWT.MouseDown, listener); canvas.addListener(SWT.MouseMove, listener); canvas.addListener(SWT.MouseUp, listener); canvas.addListener(SWT.Paint, listener); FormData fdCanvas = new FormData(); fdCanvas.left = new FormAttachment(0, 0); fdCanvas.top = new FormAttachment(0, 0); fdCanvas.right = new FormAttachment(100, 0); fdCanvas.bottom = new FormAttachment(100, 0); canvas.setLayoutData(fdCanvas); canvas.addPaintListener(this); canvas.addMouseListener(this); if (!EnvironmentUtils.getInstance().isWeb()) { canvas.addMouseMoveListener(this); canvas.addMouseWheelListener(this::mouseScrolled); } // The execution information tabs at the bottom // tabFolder = new CTabFolder(sash, SWT.MULTI); PropsUi.setLook(tabFolder, Props.WIDGET_STYLE_TAB); addInfoTab(); addLogTab(); addDataTab(); addPluginTabs(); refresh(); // Add keyboard listeners from the main GUI and this class (toolbar etc) to the canvas. That's // where the focus should be // hopGui.replaceKeyboardShortcutListeners(this); tabFolder.setSelection(0); sash.setWeights(new int[] {60, 40}); } private void addInfoTab() { infoTab = new CTabItem(tabFolder, SWT.NONE); infoTab.setFont(GuiResource.getInstance().getFontDefault()); infoTab.setImage(GuiResource.getInstance().getImageInfo()); infoTab.setText(BaseMessages.getString(PKG, "WorkflowExecutionViewer.InfoTab.Title")); ColumnInfo[] infoCols = new ColumnInfo[] { new ColumnInfo("Item", ColumnInfo.COLUMN_TYPE_TEXT, false, true), new ColumnInfo("Value", ColumnInfo.COLUMN_TYPE_TEXT, false, true), }; // Let's simply add a table view with all the details on it. // infoView = new TableView( hopGui.getVariables(), tabFolder, SWT.H_SCROLL | SWT.V_SCROLL, infoCols, 1, true, null, props); PropsUi.setLook(infoView); infoTab.setControl(infoView); } private void refreshStatus() { try { infoView.clearAll(); ExecutionInfoLocation location = perspective.getLocationMap().get(locationName); if (location == null) { return; } IExecutionInfoLocation iLocation = location.getExecutionInfoLocation(); // Force re-load of the execution information // iLocation.unBuffer(execution.getId()); // Don't load execution logging since that can be a lot of data at times. // executionState = iLocation.getExecutionState(execution.getId(), false); if (executionState == null) { return; } // Calculate information staleness // String statusDescription = executionState.getStatusDescription(); if (Pipeline.STRING_RUNNING.equalsIgnoreCase(statusDescription) || Pipeline.STRING_INITIALIZING.equalsIgnoreCase(statusDescription)) { long loggingInterval = Const.toLong(location.getDataLoggingInterval(), 20000); if (System.currentTimeMillis() - executionState.getUpdateTime().getTime() > loggingInterval) { // The information is stale, not getting updates! // TableItem item = infoView.add("Update state", STRING_STATE_STALE); item.setBackground(GuiResource.getInstance().getColorLightBlue()); item.setForeground(GuiResource.getInstance().getColorWhite()); } } infoView.add("Name", execution.getName()); infoView.add("Type", execution.getExecutionType().name()); infoView.add("Filename", execution.getFilename()); infoView.add("ID", execution.getId()); infoView.add("Parent ID", execution.getParentId()); infoView.add("Registration", formatDate(execution.getRegistrationDate())); infoView.add("Start", formatDate(execution.getExecutionStartDate())); infoView.add("Type", executionState.getExecutionType().name()); infoView.add("Status", statusDescription); infoView.add("Status Last updated", formatDate(executionState.getUpdateTime())); infoView.add("Container ID", executionState.getContainerId()); infoView.optimizeTableView(); // Cache the information of all the executed actions... // actionExecutions.clear(); List<String> childIds = iLocation.findChildIds(ExecutionType.Workflow, execution.getId()); if (childIds != null) { for (String id : childIds) { ExecutionData actionData = iLocation.getExecutionData(execution.getId(), id); ExecutionDataSetMeta dataSetMeta = actionData.getDataSetMeta(); if (dataSetMeta != null) { String actionName = dataSetMeta.getName(); // Add this one under that name // List<ExecutionData> executionDataList = actionExecutions.computeIfAbsent(actionName, k -> new ArrayList<>()); executionDataList.add(actionData); } } } } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error refreshing pipeline status", e); } } private void addDataTab() { dataTab = new CTabItem(tabFolder, SWT.NONE); dataTab.setFont(GuiResource.getInstance().getFontDefault()); dataTab.setImage(GuiResource.getInstance().getImageData()); dataTab.setText(BaseMessages.getString(PKG, "WorkflowExecutionViewer.DataTab.Title")); dataSash = new SashForm(tabFolder, SWT.HORIZONTAL); // The list of available data on the left-hand side. // dataList = new org.eclipse.swt.widgets.List( dataSash, SWT.SINGLE | SWT.LEFT | SWT.V_SCROLL | SWT.H_SCROLL); PropsUi.setLook(dataList); dataList.addListener(SWT.Selection, e -> showDataRows()); // An empty table view on the right. This will be populated during a refresh. // ColumnInfo[] dataColumns = new ColumnInfo[] {}; dataView = new TableView( hopGui.getVariables(), dataSash, SWT.H_SCROLL | SWT.V_SCROLL, dataColumns, 0, true, null, props); PropsUi.setLook(dataView); dataView.optimizeTableView(); dataSash.setWeights(new int[] {15, 85}); dataTab.setControl(dataSash); } /** An entry is selected in the data list. Show the corresponding rows. */ private void showDataRows() { if (selectedExecutionData == null) { return; } int[] weights = dataSash.getWeights(); try { String[] selection = dataList.getSelection(); if (selection.length != 1) { return; } String setDescription = selection[0]; // Look up the key in the metadata cache (built previously)... // for (ExecutionDataSetMeta setMeta : selectedExecutionData.getSetMetaData().values()) { if (setDescription.equals(setMeta.getDescription())) { // What's the data for this metadata? // RowBuffer rowBuffer = selectedExecutionData.getDataSets().get(setMeta.getSetKey()); List<ColumnInfo> columns = new ArrayList<>(); IRowMeta rowMeta = rowBuffer.getRowMeta(); // Add a column for every for (IValueMeta valueMeta : rowMeta.getValueMetaList()) { ColumnInfo columnInfo = new ColumnInfo( valueMeta.getName(), ColumnInfo.COLUMN_TYPE_TEXT, valueMeta.isNumeric()); columnInfo.setValueMeta(valueMeta); columnInfo.setToolTip(valueMeta.toStringMeta()); columns.add(columnInfo); } // Dispose of the old table view // dataView.dispose(); // Create a new one // dataView = new TableView( hopGui.getVariables(), dataSash, SWT.H_SCROLL | SWT.V_SCROLL, columns.toArray(new ColumnInfo[0]), rowBuffer.size(), true, null, props); for (int r = 0; r < rowBuffer.size(); r++) { Object[] row = rowBuffer.getBuffer().get(r); TableItem item = dataView.table.getItem(r); item.setText(0, Integer.toString(r + 1)); for (int c = 0; c < rowMeta.size(); c++) { String value = rowMeta.getString(row, c); if (value == null) { value = ""; } item.setText(c + 1, value); } } dataView.optWidth(true); break; } } } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error showing transform data rows", e); } finally { layout(true, true); dataSash.setWeights(weights); } redraw(); } private void addLogTab() { logTab = new CTabItem(tabFolder, SWT.NONE); logTab.setFont(GuiResource.getInstance().getFontDefault()); logTab.setImage(GuiResource.getInstance().getImageShowLog()); logTab.setText(BaseMessages.getString(PKG, "WorkflowExecutionViewer.LogTab.Title")); loggingText = new Text(tabFolder, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.READ_ONLY); PropsUi.setLook(loggingText); logTab.setControl(loggingText); // When the logging tab comes into focus, re-load the logging text // tabFolder.addListener( SWT.Selection, e -> { if (tabFolder.getSelection() == logTab) { refreshLoggingText(); } }); } private void addPluginTabs() { GuiRegistry guiRegistry = GuiRegistry.getInstance(); List<GuiTabItem> tabsList = guiRegistry.getGuiTabsMap().get(WORKFLOW_EXECUTION_VIEWER_TABS); if (tabsList != null) { tabsList.sort(Comparator.comparing(GuiTabItem::getId)); for (GuiTabItem tabItem : tabsList) { try { Class<?> pluginTabClass = tabItem.getMethod().getDeclaringClass(); boolean showTab = true; try { // Invoke static method showTab(WorkflowExecutionViewer) // Method showTabMethod = pluginTabClass.getMethod("showTab", WorkflowExecutionViewer.class); showTab = (boolean) showTabMethod.invoke(null, this); } catch (NoSuchMethodException noSuchMethodException) { // Just show the tab } if (showTab) { Constructor<?> constructor = pluginTabClass.getConstructor(WorkflowExecutionViewer.class); Object object = constructor.newInstance(this); tabItem.getMethod().invoke(object, tabFolder); } } catch (Exception e) { new ErrorDialog( hopGui.getActiveShell(), CONST_ERROR, "Hop was unable to invoke @GuiTab method " + tabItem.getMethod().getName() + " with the parent composite as argument", e); } } tabFolder.layout(); } } @Override public Image getTitleImage() { return GuiResource.getInstance().getImageWorkflow(); } @Override public String getTitleToolTip() { return workflowMeta.getDescription(); } @Override public void paintControl(PaintEvent e) { Point area = getArea(); if (area.x == 0 || area.y == 0) { return; // nothing to do! } // Do double buffering to prevent flickering on Windows // boolean needsDoubleBuffering = Const.isWindows() && "GUI".equalsIgnoreCase(Const.getHopPlatformRuntime()); Image image = null; GC swtGc = e.gc; if (needsDoubleBuffering) { image = new Image(hopDisplay(), area.x, area.y); swtGc = new GC(image); } drawWorkflowImage(swtGc, area.x, area.y, magnification); if (needsDoubleBuffering) { // Draw the image onto the canvas and get rid of the resources // e.gc.drawImage(image, 0, 0); swtGc.dispose(); image.dispose(); } setZoomLabel(); } public void setZoomLabel() { Combo combo = (Combo) toolBarWidgets.getWidgetsMap().get(TOOLBAR_ITEM_ZOOM_LEVEL); if (combo == null || combo.isDisposed()) { return; } String newString = Math.round(magnification * 100) + "%"; String oldString = combo.getText(); if (!newString.equals(oldString)) { combo.setText(Math.round(magnification * 100) + "%"); } } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_ZOOM_FIT_TO_SCREEN, toolTip = "i18n::ExecutionViewer.GuiAction.ZoomFitToScreen.Tooltip", type = GuiToolbarElementType.BUTTON, image = "ui/images/zoom-fit.svg") @Override public void zoomFitToScreen() { super.zoomFitToScreen(); } @Override protected Point getArea() { org.eclipse.swt.graphics.Rectangle rect = canvas.getClientArea(); return new Point(rect.width, rect.height); } public void drawWorkflowImage(GC swtGc, int width, int height, float magnificationFactor) { IGc gc = new SwtGc(swtGc, width, height, iconSize); try { PropsUi propsUi = PropsUi.getInstance(); int gridSize = propsUi.isShowCanvasGridEnabled() ? propsUi.getCanvasGridSize() : 1; WorkflowPainter workflowPainter = new WorkflowPainter( gc, hopGui.getVariables(), workflowMeta, new Point(width, height), new DPoint(0, 0), null, null, areaOwners, propsUi.getIconSize(), propsUi.getLineWidth(), gridSize, propsUi.getNoteFont().getName(), propsUi.getNoteFont().getHeight(), propsUi.getZoomFactor(), false, null); // correct the magnification with the overall zoom factor // float correctedMagnification = (float) (magnificationFactor * propsUi.getZoomFactor()); workflowPainter.setMagnification(correctedMagnification); workflowPainter.setOffset(offset); // Draw the navigation viewport at the bottom right // workflowPainter.setMaximum(maximum); workflowPainter.setShowingNavigationView(true); workflowPainter.setScreenMagnification(magnification); // See if we can get status' information for the actions... // List<ActionResult> actionResults = new ArrayList<>(); List<ActionMeta> activeActions = new ArrayList<>(); if (actionExecutions != null) { for (String actionName : actionExecutions.keySet()) { List<ExecutionData> executionDataList = actionExecutions.get(actionName); if (!Utils.isEmpty(executionDataList)) { // Just consider the first // ExecutionData executionData = executionDataList.get(0); RowBuffer rowBuffer = executionData.getDataSets().get(ExecutionDataBuilder.KEY_RESULT); if (rowBuffer != null) { IRowMeta rowMeta = rowBuffer.getRowMeta(); Result result = new Result(); for (Object[] row : rowBuffer.getBuffer()) { try { if (rowMeta.getString(row, 0).equals(ExecutionDataBuilder.RESULT_KEY_RESULT)) { result.setResult("true".equalsIgnoreCase(rowMeta.getString(row, 1))); } if (rowMeta.getString(row, 0).equals(ExecutionDataBuilder.RESULT_KEY_ERRORS)) { result.setNrErrors(Long.parseLong(rowMeta.getString(row, 1))); } if (rowMeta.getString(row, 0).equals(ExecutionDataBuilder.RESULT_KEY_STOPPED)) { result.setStopped("true".equalsIgnoreCase(rowMeta.getString(row, 1))); } } catch (Exception e) { LogChannel.UI.logError("Error getting action result information", e); } } ActionResult actionResult = new ActionResult(); actionResult.setResult(result); actionResult.setActionName(actionName); actionResults.add(actionResult); } if (!executionData.isFinished()) { ActionMeta actionMeta = workflowMeta.findAction(actionName); if (actionMeta != null) { activeActions.add(actionMeta); } } } } } workflowPainter.setActionResults(actionResults); workflowPainter.setActiveActions(activeActions); try { workflowPainter.drawWorkflow(); viewPort = workflowPainter.getViewPort(); graphPort = workflowPainter.getGraphPort(); } catch (Exception e) { new ErrorDialog(hopGui.getActiveShell(), CONST_ERROR, "Error drawing workflow image", e); } } finally { gc.dispose(); } CanvasFacade.setData(canvas, magnification, offset, workflowMeta); } @Override public Control getControl() { return this; } /** Refresh the information in the execution panes */ @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_REFRESH, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.Refresh.Tooltip", image = "ui/images/refresh.svg") @GuiKeyboardShortcut(key = SWT.F5) @GuiOsxKeyboardShortcut(key = SWT.F5) public void refresh() { refreshStatus(); refreshActionData(); setFocus(); } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_ZOOM_LEVEL, label = "i18n:org.apache.hop.ui.hopgui:HopGui.Toolbar.Zoom", toolTip = "i18n::HopGuiPipelineGraph.GuiAction.ZoomInOut.Tooltip", type = GuiToolbarElementType.COMBO, alignRight = true, comboValuesMethod = "getZoomLevels") public void zoomLevel() { readMagnification(); redraw(); } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_TO_EDITOR, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.NavigateToEditor.Tooltip", image = "ui/images/data_orch.svg") public void navigateToEditor() { try { // First try to see if this workflow is running in Hop GUI... // HopDataOrchestrationPerspective perspective = HopGui.getDataOrchestrationPerspective(); TabItemHandler item = perspective.findWorkflow(execution.getId()); if (item != null) { perspective.switchToTab(item); perspective.activate(); return; } // Now see if we have a filename to match with // String filename = execution.getFilename(); if (filename != null) { hopGui.fileDelegate.fileOpen(filename); return; } } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error navigating to workflow in Hop GUI", e); } } public List<String> getZoomLevels() { return Arrays.asList(PipelinePainter.magnificationDescriptions); } /** Allows for magnifying to any percentage entered by the user... */ private void readMagnification() { Combo zoomLabel = (Combo) toolBarWidgets.getWidgetsMap().get(TOOLBAR_ITEM_ZOOM_LEVEL); if (zoomLabel == null) { return; } String possibleText = zoomLabel.getText().replace("%", ""); float possibleFloatMagnification; try { possibleFloatMagnification = Float.parseFloat(possibleText) / 100; magnification = possibleFloatMagnification; if (zoomLabel.getText().indexOf('%') < 0) { zoomLabel.setText(zoomLabel.getText().concat("%")); } } catch (Exception e) { // Ignore } refresh(); } /** * Gets name * * @return value of name */ public String getName() { return workflowMeta.getName(); } /** * Gets logChannelId * * @return value of logChannelId */ @Override public String getLogChannelId() { return execution.getId(); } @Override public String getActiveId() { if (selectedAction != null) { if (selectedExecutionData.getOwnerId() == null) { return selectedExecutionData.getParentId(); } else { return selectedExecutionData.getOwnerId(); } } return getLogChannelId(); } @Override public void mouseDown(MouseEvent event) { workflowMeta.unselectAll(); Point real = screen2real(event.x, event.y); lastClick = new Point(real.x, real.y); boolean control = (event.stateMask & SWT.MOD1) != 0; if (setupDragView(event.button, control, new Point(event.x, event.y))) { return; } AreaOwner areaOwner = getVisibleAreaOwner(real.x, real.y); if (areaOwner == null) { return; } switch (areaOwner.getAreaType()) { case ACTION_ICON: // Show the data for this action // selectedAction = (ActionMeta) areaOwner.getOwner(); refreshActionData(); break; case ACTION_NAME: // Show the data for this action // selectedAction = (ActionMeta) areaOwner.getParent(); refreshActionData(); break; } redraw(); } public void refreshActionData() { if (selectedAction != null) { showActionData(selectedAction); } } private void showActionData(ActionMeta actionMeta) { actionMeta.setSelected(true); // Remember which entry in the list was selected. // String previousListSelection = null; if (dataList.getSelectionCount() == 1) { previousListSelection = dataList.getSelection()[0]; } dataList.removeAll(); try { // Find the data for the selected action... // List<ExecutionData> executionDataList = actionExecutions.get(actionMeta.getName()); if (Utils.isEmpty(executionDataList)) { return; } // Get any execution data for the selected action // selectedExecutionData = executionDataList.get(0); Map<String, ExecutionDataSetMeta> setMetaData = selectedExecutionData.getSetMetaData(); List<String> items = new ArrayList<>(); for (String key : setMetaData.keySet()) { ExecutionDataSetMeta setMeta = setMetaData.get(key); if (actionMeta.getName().equals(setMeta.getName())) { // We're in the right place. We can have different types of data though. // We list the types in the List on the left in the data tab. // items.add(setMeta.getDescription()); } } Collections.sort(items); dataList.setItems(items.toArray(new String[0])); tabFolder.setSelection(dataTab); if (previousListSelection != null && items.contains(previousListSelection)) { dataList.setSelection(items.indexOf(previousListSelection)); } else { dataList.setSelection(0); } showDataRows(); } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error showing transform data", e); } } public void drillDownOnLocation(Point location) { if (location == null) { return; } AreaOwner areaOwner = getVisibleAreaOwner((int) location.x, (int) location.y); if (areaOwner == null) { return; } if (areaOwner.getAreaType() == AreaOwner.AreaType.ACTION_ICON) { // Show the data for this action // selectedAction = (ActionMeta) areaOwner.getOwner(); refreshActionData(); // Now drill down // drillDown(); } } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_DRILL_DOWN, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.DrillDown.Tooltip", image = "ui/images/down.svg") public void drillDown() { if (selectedExecutionData == null) { return; } ExecutionInfoLocation location = perspective.getLocationMap().get(locationName); if (location == null) { return; } IExecutionInfoLocation iLocation = location.getExecutionInfoLocation(); // We need to look up a pipeline or workflow execution where the parent is the ID of the action // try { String id = selectedExecutionData.getOwnerId(); List<Execution> childExecutions = iLocation.findExecutions(id); if (childExecutions.isEmpty()) { return; } Execution child; if (childExecutions.size() == 1) { child = childExecutions.get(0); } else { // Select the execution... // IRowMeta rowMeta = new RowMetaBuilder().addString("Name").addString("Type").addDate("Start date").build(); List<RowMetaAndData> rows = new ArrayList<>(); for (Execution childExecution : childExecutions) { rows.add( new RowMetaAndData( rowMeta, childExecution.getName(), childExecution.getExecutionType().name(), childExecution.getExecutionStartDate())); } SelectRowDialog dialog = new SelectRowDialog( getShell(), hopGui.getVariables(), SWT.SINGLE | SWT.V_SCROLL | SWT.H_SCROLL, rows); RowMetaAndData selectedRow = dialog.open(); if (selectedRow == null) { // Operation is canceled return; } int index = rows.indexOf(selectedRow); if (index < 0) { return; } child = childExecutions.get(index); } // Open this execution with state. Don't load execution logging. // // ExecutionState state = iLocation.getExecutionState(child.getId(), false); perspective.createExecutionViewer(locationName, child, state); } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error drilling down into selected action", e); } } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_GO_UP, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.GoUp.Tooltip", image = "ui/images/up.svg") public void goUp() { try { String parentId = execution.getParentId(); if (parentId == null) { return; } ExecutionInfoLocation location = perspective.getLocationMap().get(locationName); if (location == null) { return; } IExecutionInfoLocation iLocation = location.getExecutionInfoLocation(); // This parent ID is the ID of the transform or action. // We need to find the parent of this child // String grandParentId = iLocation.findParentId(parentId); if (grandParentId == null) { return; } Execution grandParent = iLocation.getExecution(grandParentId); // Open this execution with state. Don't load the logging text. // ExecutionState state = iLocation.getExecutionState(grandParent.getId(), false); perspective.createExecutionViewer(locationName, grandParent, state); } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error navigating up to parent execution", e); } } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_VIEW_EXECUTOR, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.ViewExecutor.Tooltip", image = "ui/images/view.svg", separator = true) public void viewExecutor() { try { String workflowXml = execution.getExecutorXml(); Node workflowNode = XmlHandler.loadXmlString(workflowXml, WorkflowMeta.XML_TAG); // Also inflate the metadata // String metadataJson = execution.getMetadataJson(); SerializableMetadataProvider metadataProvider = new SerializableMetadataProvider(metadataJson); // The variables set // IVariables variables = Variables.getADefaultVariableSpace(); variables.setVariables(execution.getVariableValues()); variables.setVariables(execution.getParameterValues()); WorkflowMeta workflowMeta = new WorkflowMeta(workflowNode, metadataProvider, variables); HopDataOrchestrationPerspective p = HopGui.getDataOrchestrationPerspective(); HopGuiWorkflowGraph graph = (HopGuiWorkflowGraph) p.addWorkflow(hopGui, workflowMeta, new HopWorkflowFileType<>()); graph.setVariables(variables); p.activate(); } catch (Exception e) { new ErrorDialog(getShell(), CONST_ERROR, "Error viewing the executor", e); } } @GuiToolbarElement( root = GUI_PLUGIN_TOOLBAR_PARENT_ID, id = TOOLBAR_ITEM_VIEW_METADATA, toolTip = "i18n::WorkflowExecutionViewer.ToolbarElement.ViewMetadata.Tooltip", image = "ui/images/metadata.svg") public void viewMetadata() { super.viewMetadata(execution); } }
apache/ignite
37,015
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/IgniteBaselineAffinityTopologyActivationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cluster.BaselineNode; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.cluster.ClusterState; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.cluster.DetachedClusterNode; import org.apache.ignite.internal.managers.communication.GridIoMessage; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.GridDhtPartitionsSingleMessage; import org.apache.ignite.internal.processors.cluster.BaselineTopology; import org.apache.ignite.internal.processors.cluster.BaselineTopologyHistory; import org.apache.ignite.internal.processors.cluster.BaselineTopologyHistoryItem; import org.apache.ignite.internal.processors.cluster.DiscoveryDataClusterState; import org.apache.ignite.internal.util.lang.GridAbsPredicate; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.plugin.extensions.communication.Message; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Assert; import org.junit.Test; import static org.apache.ignite.cache.CacheMode.PARTITIONED; /** * */ public class IgniteBaselineAffinityTopologyActivationTest extends GridCommonAbstractTest { /** */ private String consId; /** Entries count to add to cache. */ private static final int ENTRIES_COUNT = 100; /** */ private static final String CACHE_NAME = "dfltCache"; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); if (consId != null) cfg.setConsistentId(consId); cfg.setDataStorageConfiguration( new DataStorageConfiguration().setDefaultDataRegionConfiguration( new DataRegionConfiguration() .setPersistenceEnabled(true).setMaxSize(10L * 1024 * 1024) ).setWalMode(WALMode.LOG_ONLY) ); cfg.setCommunicationSpi(new SingleMessageInterceptorCommunicationSpi()); cfg.setCacheConfiguration(new CacheConfiguration<Integer, Integer>() .setName(CACHE_NAME) .setCacheMode(PARTITIONED) .setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL) .setBackups(1) .setAffinity(new RendezvousAffinityFunction(32, null)) ); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(false); cleanPersistenceDir(); } /** * Verifies that when old but compatible node * (it is the node that once wasn't presented in branchingHistory but hasn't participated in any branching point) * joins the cluster after restart, cluster gets activated. */ @Test public void testAutoActivationWithCompatibleOldNode() throws Exception { startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C").cluster().state(ClusterState.ACTIVE); stopAllGrids(false); startGridWithConsistentId("A"); startGridWithConsistentId("B").cluster().state(ClusterState.ACTIVE); { IgniteEx nodeA = grid("A"); assertNotNull(nodeA.cluster().currentBaselineTopology()); assertEquals(3, nodeA.cluster().currentBaselineTopology().size()); assertTrue(nodeA.cluster().state().active()); } stopAllGrids(false); startGridWithConsistentId("A"); startGridWithConsistentId("B"); { IgniteEx nodeA = grid("A"); assertNotNull(nodeA.cluster().currentBaselineTopology()); assertEquals(3, nodeA.cluster().currentBaselineTopology().size()); assertFalse(nodeA.cluster().state().active()); } final Ignite nodeC = startGridWithConsistentId("C"); boolean active = GridTestUtils.waitForCondition( new GridAbsPredicate() { @Override public boolean apply() { return nodeC.cluster().state().active(); } }, 10_000 ); assertTrue(active); } /** * IgniteCluster::setBaselineTopology(long topVer) should throw an exception * when online node from current BaselineTopology is not presented in topology version. */ @Test public void testBltChangeTopVerRemoveOnlineNodeFails() throws Exception { Ignite ignite = startGridWithConsistentId("A"); ignite.cluster().baselineAutoAdjustEnabled(false); ignite.cluster().state(ClusterState.ACTIVE); long singleNodeTopVer = ignite.cluster().topologyVersion(); startGridWithConsistentId("OnlineConsID"); ignite.cluster().setBaselineTopology(baselineNodes(ignite.cluster().forServers().nodes())); boolean expectedExThrown = false; try { ignite.cluster().setBaselineTopology(singleNodeTopVer); } catch (IgniteException e) { String errMsg = e.getMessage(); assertTrue(errMsg.startsWith("Removing online nodes")); assertTrue(errMsg.contains("[OnlineConsID]")); expectedExThrown = true; } assertTrue(expectedExThrown); } /** * Verifies that online nodes cannot be removed from BaselineTopology (this may change in future). */ @Test public void testOnlineNodesCannotBeRemovedFromBaselineTopology() throws Exception { Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("OnlineConsID"); nodeC.cluster().baselineAutoAdjustEnabled(false); nodeC.cluster().state(ClusterState.ACTIVE); boolean expectedExIsThrown = false; try { nodeC.cluster().setBaselineTopology(Arrays.asList((BaselineNode)nodeA.cluster().localNode(), nodeB.cluster().localNode())); } catch (IgniteException e) { String errMsg = e.getMessage(); assertTrue(errMsg.startsWith("Removing online nodes")); assertTrue(errMsg.contains("[OnlineConsID]")); expectedExIsThrown = true; } assertTrue(expectedExIsThrown); } /** * */ @Test public void testNodeFailsToJoinWithIncompatiblePreviousBaselineTopology() throws Exception { startGridWithConsistentId("A"); startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeC.cluster().baselineAutoAdjustEnabled(false); nodeC.cluster().state(ClusterState.ACTIVE); stopAllGrids(false); Ignite nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B").cluster().state(ClusterState.ACTIVE); nodeA.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); stopAllGrids(false); startGridWithConsistentId("C").cluster().state(ClusterState.ACTIVE); stopGrid("C", false); startGridWithConsistentId("A"); startGridWithConsistentId("B"); boolean expectedExThrown = false; try { startGridWithConsistentId("C"); } catch (IgniteCheckedException e) { expectedExThrown = true; if (e.getCause() != null && e.getCause().getCause() != null) { Throwable rootCause = e.getCause().getCause(); if (!(rootCause instanceof IgniteSpiException) || !rootCause.getMessage().contains("not compatible")) Assert.fail("Unexpected ignite exception was thrown: " + e); } else throw e; } assertTrue("Expected exception wasn't thrown.", expectedExThrown); } /** * Verifies scenario when parts of grid were activated independently they are not allowed to join * into the same grid again (due to risks of incompatible data modifications). */ @Test public void testIncompatibleBltNodeIsProhibitedToJoinCluster() throws Exception { startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C").cluster().state(ClusterState.ACTIVE); stopAllGrids(false); startGridWithConsistentId("A"); startGridWithConsistentId("B").cluster().state(ClusterState.ACTIVE); stopAllGrids(false); startGridWithConsistentId("C").cluster().state(ClusterState.ACTIVE); stopAllGrids(false); startGridWithConsistentId("A"); startGridWithConsistentId("B"); boolean expectedExThrown = false; try { startGridWithConsistentId("C"); } catch (IgniteCheckedException e) { expectedExThrown = true; if (e.getCause() != null && e.getCause().getCause() != null) { Throwable rootCause = e.getCause().getCause(); if (!(rootCause instanceof IgniteSpiException) || !rootCause.getMessage().contains("not compatible")) Assert.fail("Unexpected ignite exception was thrown: " + e); } else throw e; } assertTrue("Expected exception wasn't thrown.", expectedExThrown); } /** * Test verifies that node with out-of-data but still compatible Baseline Topology is allowed to join the cluster. */ @Test public void testNodeWithOldBltIsAllowedToJoinCluster() throws Exception { final long expectedHash1 = (long)"A".hashCode() + "B".hashCode() + "C".hashCode(); BaselineTopologyVerifier verifier1 = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNotNull(blt); assertEquals(3, blt.consistentIds().size()); long activationHash = U.field(blt, "branchingPntHash"); assertEquals(expectedHash1, activationHash); } }; final long expectedHash2 = (long)"A".hashCode() + "B".hashCode(); BaselineTopologyVerifier verifier2 = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNotNull(blt); assertEquals(3, blt.consistentIds().size()); long activationHash = U.field(blt, "branchingPntHash"); assertEquals(expectedHash2, activationHash); } }; Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeC.cluster().state(ClusterState.ACTIVE); verifyBaselineTopologyOnNodes(verifier1, new Ignite[] {nodeA, nodeB, nodeC}); stopAllGrids(false); nodeA = startGridWithConsistentId("A"); nodeB = startGridWithConsistentId("B"); nodeB.cluster().state(ClusterState.ACTIVE); verifyBaselineTopologyOnNodes(verifier2, new Ignite[] {nodeA, nodeB}); stopAllGrids(false); nodeA = startGridWithConsistentId("A"); nodeB = startGridWithConsistentId("B"); nodeC = startGridWithConsistentId("C"); awaitPartitionMapExchange(); verifyBaselineTopologyOnNodes(verifier1, new Ignite[] {nodeA, nodeB, nodeC}); } /** * * Test verifies that restart node from baseline when PME and BLT change processes * are taking place in the cluster simultaneously doesn't lead to shut down of alive cluster nodes. * * @throws Exception If failed. */ @Test public void testNodeJoinsDuringPartitionMapExchange() throws Exception { startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C"); IgniteEx grid = grid("B"); grid.cluster().baselineAutoAdjustEnabled(false); grid.cluster().state(ClusterState.ACTIVE); IgniteCache<Object, Object> cache = grid.getOrCreateCache(CACHE_NAME); for (int i = 0; i < 100; i++) cache.put(i, i * 2); awaitPartitionMapExchange(); final long topVer = grid.cluster().topologyVersion() + 1; final CountDownLatch latch = new CountDownLatch(1); SingleMessageInterceptorCommunicationSpi commSpi = (SingleMessageInterceptorCommunicationSpi)grid .configuration().getCommunicationSpi(); commSpi.blockMsgsWithLatch(latch); try { GridTestUtils.runAsync( () -> startGridWithConsistentId("D") ).get(20_000); } catch (Exception ignored) { // timeout exception is expected here } try { GridTestUtils.runAsync( () -> grid.cluster().setBaselineTopology(topVer) ).get(10_000); } catch (Exception ignored) { // timeout exception is expected here } IgniteInternalFuture restartFut = GridTestUtils.runAsync( () -> { try { stopGrid("C", true); startGridWithConsistentId("C"); } catch (Exception ignored) { //ignored } } ); latch.countDown(); restartFut.get(); awaitPartitionMapExchange(); long expActivationHash = (long)"A".hashCode() + "B".hashCode() + "C".hashCode(); checkBaselineTopologyOnNode(grid("A"), 1, 1, 1, expActivationHash); checkBaselineTopologyOnNode(grid("B"), 1, 1, 1, expActivationHash); checkBaselineTopologyOnNode(grid("C"), 1, 1, 1, expActivationHash); checkBaselineTopologyOnNode(grid("D"), 1, 1, 1, expActivationHash); } /** * @param ig Ignite. * @param expBltId Expected BaselineTopology ID. * @param expBltHistSize Expected Baseline history size. * @param expBranchingHistSize Expected branching history size. * @param expActivationHash Expected activation hash. */ private void checkBaselineTopologyOnNode( Ignite ig, int expBltId, int expBltHistSize, int expBranchingHistSize, long expActivationHash) { BaselineTopology blt = getBaselineTopology(ig); BaselineTopologyHistory bltHist = getBaselineTopologyHistory(ig); assertNotNull(bltHist); assertEquals(expBltId, blt.id()); assertEquals(expBltHistSize, bltHist.history().size()); BaselineTopologyHistoryItem histItem = bltHist.history().get(0); assertEquals(expBranchingHistSize, histItem.branchingHistory().size()); assertEquals(expActivationHash, (long)histItem.branchingHistory().get(0)); } /** * Test verifies that node with set up BaselineTopology is not allowed to join the cluster * in the process of on-going first activation. * * @throws Exception If failed. */ @Test public void testNodeWithBltIsNotAllowedToJoinClusterDuringFirstActivation() throws Exception { Ignite nodeC = startGridWithConsistentId("C"); nodeC.cluster().state(ClusterState.ACTIVE); stopGrid("C", false); Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); final CountDownLatch latch = new CountDownLatch(1); SingleMessageInterceptorCommunicationSpi commSpi = (SingleMessageInterceptorCommunicationSpi)nodeB .configuration().getCommunicationSpi(); commSpi.blockMsgsWithLatch(latch); GridTestUtils.runAsync( () -> { try { nodeA.cluster().state(ClusterState.ACTIVE); } catch (Exception e) { log.warning("Exception during activation", e); } }); try { startGridWithConsistentId("C"); } catch (Exception e) { Throwable cause = e.getCause(); while (!(cause instanceof IgniteSpiException)) cause = cause.getCause(); assertNotNull(cause); String msg = cause.getMessage(); assertNotNull(msg); assertTrue(msg.startsWith("Node with set up BaselineTopology is not allowed " + "to join cluster in the process of first activation:")); } latch.countDown(); } /** * Verifies that when new node outside of baseline topology joins active cluster with BLT already set * it receives BLT from the cluster and stores it locally. */ @Test public void testNewNodeJoinsToActiveCluster() throws Exception { Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeC.cluster().state(ClusterState.ACTIVE); BaselineTopologyVerifier verifier1 = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNotNull(blt); assertEquals(3, blt.consistentIds().size()); } }; verifyBaselineTopologyOnNodes(verifier1, new Ignite[] {nodeA, nodeB, nodeC}); Ignite nodeD = startGridWithConsistentId("D"); verifyBaselineTopologyOnNodes(verifier1, new Ignite[] {nodeD}); stopAllGrids(false); nodeD = startGridWithConsistentId("D"); assertFalse(nodeD.cluster().state().active()); verifyBaselineTopologyOnNodes(verifier1, new Ignite[] {nodeD}); } /** * */ @Test public void testRemoveNodeFromBaselineTopology() throws Exception { final long expectedActivationHash = (long)"A".hashCode() + "C".hashCode(); BaselineTopologyVerifier verifier = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNotNull(blt); assertEquals(2, blt.consistentIds().size()); long activationHash = U.field(blt, "branchingPntHash"); assertEquals(expectedActivationHash, activationHash); } }; Ignite nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeA.cluster().baselineAutoAdjustEnabled(false); nodeC.cluster().state(ClusterState.ACTIVE); stopGrid("B", false); nodeA.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); boolean activated = GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return grid("A").cluster().state().active(); } }, 10_000); assertEquals(true, activated); verifyBaselineTopologyOnNodes(verifier, new Ignite[] {nodeA, nodeC}); stopAllGrids(false); nodeA = startGridWithConsistentId("A"); nodeC = startGridWithConsistentId("C"); activated = GridTestUtils.waitForCondition(new GridAbsPredicate() { @Override public boolean apply() { return grid("A").cluster().state().active(); } }, 10_000); assertTrue(activated); verifyBaselineTopologyOnNodes(verifier, new Ignite[] {nodeA, nodeC}); } /** * */ @Test public void testAddNodeToBaselineTopology() throws Exception { final long expectedActivationHash = (long)"A".hashCode() + "B".hashCode() + "C".hashCode() + "D".hashCode(); BaselineTopologyVerifier verifier = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNotNull(blt); assertEquals(4, blt.consistentIds().size()); long activationHash = U.field(blt, "branchingPntHash"); assertEquals(expectedActivationHash, activationHash); } }; Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeA.cluster().baselineAutoAdjustEnabled(false); nodeC.cluster().state(ClusterState.ACTIVE); IgniteEx nodeD = (IgniteEx)startGridWithConsistentId("D"); nodeD.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); verifyBaselineTopologyOnNodes(verifier, new Ignite[]{nodeA, nodeB, nodeC, nodeD}); } /** * Verifies that baseline topology is removed successfully through baseline changing API. */ @Test public void testRemoveBaselineTopology() throws Exception { BaselineTopologyVerifier verifier = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNull(blt); } }; Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); Ignite nodeC = startGridWithConsistentId("C"); nodeA.cluster().baselineAutoAdjustEnabled(false); nodeA.cluster().state(ClusterState.ACTIVE); nodeA.cluster().setBaselineTopology(null); verifyBaselineTopologyOnNodes(verifier, new Ignite[] {nodeA, nodeB, nodeC}); } /** */ private interface BaselineTopologyVerifier { /** */ public void verify(BaselineTopology blt); } /** */ private interface BaselineTopologyHistoryVerifier { /** */ public void verify(BaselineTopologyHistory bltHist); } /** */ private void verifyBaselineTopologyOnNodes(BaselineTopologyVerifier bltVerifier, Ignite[] igs) { for (Ignite ig : igs) { BaselineTopology blt = getBaselineTopology(ig); bltVerifier.verify(blt); } } /** */ private void verifyBaselineTopologyHistoryOnNodes(BaselineTopologyHistoryVerifier bltHistVerifier, Ignite[] igs) { for (Ignite ig : igs) { BaselineTopologyHistory blt = getBaselineTopologyHistory(ig); bltHistVerifier.verify(blt); } } /** */ private Ignite startGridWithConsistentId(String consId) throws Exception { this.consId = consId; return startGrid(consId); } /** * Verifies that when new node joins already active cluster and new activation request is issued, * no changes to BaselineTopology branching history happen. */ @Test public void testActivationHashIsNotUpdatedOnMultipleActivationRequests() throws Exception { final long expectedActivationHash = (long)"A".hashCode(); BaselineTopologyVerifier verifier = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { long activationHash = U.field(blt, "branchingPntHash"); assertEquals(expectedActivationHash, activationHash); } }; Ignite nodeA = startGridWithConsistentId("A"); nodeA.cluster().state(ClusterState.ACTIVE); Ignite nodeB = startGridWithConsistentId("B"); nodeA.cluster().state(ClusterState.ACTIVE); verifyBaselineTopologyOnNodes(verifier, new Ignite[] {nodeA, nodeB}); } /** * Creates BaselineNode with specific attribute indicating that this node is not client. */ private BaselineNode createBaselineNodeWithConsId(String consId) { Map<String, Object> attrs = new HashMap<>(); attrs.put("org.apache.ignite.cache.client", false); return new DetachedClusterNode(consId, attrs); } /** */ @Test public void testAutoActivationSimple() throws Exception { startGrids(3); IgniteEx srv = grid(0); srv.cluster().state(ClusterState.ACTIVE); createAndFillCache(srv); // TODO: final implementation should work with cancel == true. stopAllGrids(); //note: no call for activation after grid restart startGrids(3); final Ignite ig = grid(0); boolean clusterActive = GridTestUtils.waitForCondition( new GridAbsPredicate() { @Override public boolean apply() { return ig.cluster().state().active(); } }, 10_000); assertTrue(clusterActive); checkDataInCache((IgniteEx)ig); } /** * */ @Test public void testNoAutoActivationOnJoinNewNodeToInactiveCluster() throws Exception { startGrids(2); IgniteEx srv = grid(0); srv.cluster().state(ClusterState.ACTIVE); awaitPartitionMapExchange(); assertTrue(srv.cluster().state().active()); srv.cluster().state(ClusterState.INACTIVE); assertFalse(srv.cluster().state().active()); startGrid(2); Thread.sleep(3_000); assertFalse(srv.cluster().state().active()); } /** * Verifies that neither BaselineTopology nor BaselineTopologyHistory are changed when cluster is deactivated. */ @Test public void testBaselineTopologyRemainsTheSameOnClusterDeactivation() throws Exception { startGrids(2); IgniteEx srv = grid(0); srv.cluster().state(ClusterState.ACTIVE); awaitPartitionMapExchange(); assertTrue(srv.cluster().state().active()); srv.cluster().state(ClusterState.INACTIVE); BaselineTopology blt = getBaselineTopology(srv); BaselineTopologyHistory bltHist = getBaselineTopologyHistory(srv); assertEquals(0, blt.id()); assertEquals(2, blt.consistentIds().size()); assertEquals(1, blt.branchingHistory().size()); assertEquals(0, bltHist.history().size()); } /** * */ @Test public void testBaselineHistorySyncWithNewNode() throws Exception { final long expectedBranchingHash = "A".hashCode() + "B".hashCode() + "C".hashCode(); BaselineTopologyHistoryVerifier verifier = new BaselineTopologyHistoryVerifier() { @Override public void verify(BaselineTopologyHistory bltHist) { assertNotNull(bltHist); assertEquals(1, bltHist.history().size()); BaselineTopologyHistoryItem histItem = bltHist.history().get(0); assertEquals(1, histItem.branchingHistory().size()); long actualBranchingHash = histItem.branchingHistory().get(0); assertEquals(expectedBranchingHash, actualBranchingHash); } }; Ignite nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C"); nodeA.cluster().baselineAutoAdjustEnabled(false); nodeA.cluster().state(ClusterState.ACTIVE); stopGrid("C", false); nodeA.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); startGridWithConsistentId("D"); stopAllGrids(false); Ignite nodeD = startGridWithConsistentId("D"); verifyBaselineTopologyHistoryOnNodes(verifier, new Ignite[] {nodeD}); } /** * */ @Test public void testBaselineHistorySyncWithOldNodeWithCompatibleHistory() throws Exception { final long expectedBranchingHash0 = "A".hashCode() + "B".hashCode() + "C".hashCode(); final long expectedBranchingHash1 = "A".hashCode() + "B".hashCode(); BaselineTopologyHistoryVerifier verifier = new BaselineTopologyHistoryVerifier() { @Override public void verify(BaselineTopologyHistory bltHist) { assertNotNull(bltHist); assertEquals(2, bltHist.history().size()); BaselineTopologyHistoryItem histItem = bltHist.history().get(0); assertEquals(1, histItem.branchingHistory().size()); long actualBranchingHash0 = histItem.branchingHistory().get(0); assertEquals(expectedBranchingHash0, actualBranchingHash0); histItem = bltHist.history().get(1); assertEquals(1, histItem.branchingHistory().size()); long actualBranchingHash1 = histItem.branchingHistory().get(0); assertEquals(expectedBranchingHash1, actualBranchingHash1); } }; Ignite nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C"); nodeA.cluster().baselineAutoAdjustEnabled(false); nodeA.cluster().state(ClusterState.ACTIVE); stopGrid("C", false); nodeA.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); stopGrid("B", false); nodeA.cluster().setBaselineTopology(baselineNodes(nodeA.cluster().forServers().nodes())); startGridWithConsistentId("B"); stopAllGrids(false); startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); verifyBaselineTopologyHistoryOnNodes(verifier, new Ignite[] {nodeB}); } /** * @throws Exception if failed. */ @Test public void testBaselineNotDeletedOnDeactivation() throws Exception { Ignite nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C"); nodeA.cluster().state(ClusterState.ACTIVE); assertNotNull(nodeA.cluster().currentBaselineTopology()); nodeA.cluster().state(ClusterState.INACTIVE); stopAllGrids(); nodeA = startGridWithConsistentId("A"); startGridWithConsistentId("B"); startGridWithConsistentId("C"); final Ignite ig = nodeA; boolean clusterActive = GridTestUtils.waitForCondition( new GridAbsPredicate() { @Override public boolean apply() { return ig.cluster().state().active(); } }, 10_000); assertNotNull(nodeA.cluster().currentBaselineTopology()); assertTrue(clusterActive); } /** * */ @Test public void testNodeWithBltIsProhibitedToJoinNewCluster() throws Exception { BaselineTopologyVerifier nullVerifier = new BaselineTopologyVerifier() { @Override public void verify(BaselineTopology blt) { assertNull(blt); } }; Ignite nodeC = startGridWithConsistentId("C"); nodeC.cluster().state(ClusterState.ACTIVE); stopGrid("C", false); Ignite nodeA = startGridWithConsistentId("A"); Ignite nodeB = startGridWithConsistentId("B"); verifyBaselineTopologyOnNodes(nullVerifier, new Ignite[] {nodeA, nodeB}); boolean expectedExThrown = false; try { startGridWithConsistentId("C"); } catch (IgniteCheckedException e) { expectedExThrown = true; if (e.getCause() != null && e.getCause().getCause() != null) { Throwable rootCause = e.getCause().getCause(); if (!(rootCause instanceof IgniteSpiException) || !rootCause.getMessage().contains("Node with set up BaselineTopology")) Assert.fail("Unexpected ignite exception was thrown: " + e); } else throw e; } assertTrue("Expected exception wasn't thrown.", expectedExThrown); } /** * Retrieves baseline topology from ignite node instance. * * @param ig Ig. */ private BaselineTopology getBaselineTopology(Ignite ig) { return ((DiscoveryDataClusterState)U.field( (Object)U.field( (Object)U.field(ig, "ctx"), "stateProc"), "globalState")) .baselineTopology(); } /** */ private BaselineTopologyHistory getBaselineTopologyHistory(Ignite ig) { return U.field( (Object)U.field( (Object)U.field(ig, "ctx"), "stateProc"), "bltHist"); } /** */ private void checkDataInCache(IgniteEx srv) { IgniteCache<Object, Object> cache = srv.cache(DEFAULT_CACHE_NAME); for (int i = 0; i < ENTRIES_COUNT; i++) { TestValue testVal = (TestValue)cache.get(i); assertNotNull(testVal); assertEquals(i, testVal.id); } } /** */ private void createAndFillCache(Ignite srv) { IgniteCache cache = srv.getOrCreateCache(cacheConfiguration()); for (int i = 0; i < ENTRIES_COUNT; i++) cache.put(i, new TestValue(i, "str" + i)); } /** */ private CacheConfiguration cacheConfiguration() { return new CacheConfiguration() .setName(DEFAULT_CACHE_NAME) .setCacheMode(PARTITIONED) .setAtomicityMode(CacheAtomicityMode.ATOMIC) .setBackups(2) .setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); } /** * TcpCommunicationSpi aimed to delay {@link GridDhtPartitionsSingleMessage} to emulate PME hanging. */ private static class SingleMessageInterceptorCommunicationSpi extends TcpCommunicationSpi { /** */ private volatile CountDownLatch singleMsgSndLatch; /** {@inheritDoc} */ @Override public void sendMessage( ClusterNode node, Message msg, IgniteInClosure<IgniteException> ackC ) throws IgniteSpiException { if (((GridIoMessage)msg).message() instanceof GridDhtPartitionsSingleMessage) { try { if (singleMsgSndLatch != null) singleMsgSndLatch.await(); } catch (Exception ignored) { // Ignore. } } super.sendMessage(node, msg, ackC); } /** */ void blockMsgsWithLatch(CountDownLatch latch) { singleMsgSndLatch = latch; } } /** */ private static final class TestValue { /** */ private final int id; /** */ private final String strId; /** */ private TestValue(int id, String strId) { this.id = id; this.strId = strId; } } /** */ private Collection<BaselineNode> baselineNodes(Collection<ClusterNode> clNodes) { Collection<BaselineNode> res = new ArrayList<>(clNodes.size()); for (ClusterNode clN : clNodes) res.add(clN); return res; } }
googleapis/google-cloud-java
37,272
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/CreateIndexRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Message for creating an Index. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.CreateIndexRequest} */ public final class CreateIndexRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.CreateIndexRequest) CreateIndexRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateIndexRequest.newBuilder() to construct. private CreateIndexRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateIndexRequest() { parent_ = ""; indexId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateIndexRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_CreateIndexRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_CreateIndexRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.CreateIndexRequest.class, com.google.cloud.visionai.v1.CreateIndexRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INDEX_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object indexId_ = ""; /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The indexId. */ @java.lang.Override public java.lang.String getIndexId() { java.lang.Object ref = indexId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); indexId_ = s; return s; } } /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for indexId. */ @java.lang.Override public com.google.protobuf.ByteString getIndexIdBytes() { java.lang.Object ref = indexId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); indexId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INDEX_FIELD_NUMBER = 3; private com.google.cloud.visionai.v1.Index index_; /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the index field is set. */ @java.lang.Override public boolean hasIndex() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The index. */ @java.lang.Override public com.google.cloud.visionai.v1.Index getIndex() { return index_ == null ? com.google.cloud.visionai.v1.Index.getDefaultInstance() : index_; } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.visionai.v1.IndexOrBuilder getIndexOrBuilder() { return index_ == null ? com.google.cloud.visionai.v1.Index.getDefaultInstance() : index_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, indexId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getIndex()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(indexId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, indexId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getIndex()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.CreateIndexRequest)) { return super.equals(obj); } com.google.cloud.visionai.v1.CreateIndexRequest other = (com.google.cloud.visionai.v1.CreateIndexRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getIndexId().equals(other.getIndexId())) return false; if (hasIndex() != other.hasIndex()) return false; if (hasIndex()) { if (!getIndex().equals(other.getIndex())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + INDEX_ID_FIELD_NUMBER; hash = (53 * hash) + getIndexId().hashCode(); if (hasIndex()) { hash = (37 * hash) + INDEX_FIELD_NUMBER; hash = (53 * hash) + getIndex().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.CreateIndexRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.CreateIndexRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for creating an Index. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.CreateIndexRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.CreateIndexRequest) com.google.cloud.visionai.v1.CreateIndexRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_CreateIndexRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_CreateIndexRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.CreateIndexRequest.class, com.google.cloud.visionai.v1.CreateIndexRequest.Builder.class); } // Construct using com.google.cloud.visionai.v1.CreateIndexRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getIndexFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; indexId_ = ""; index_ = null; if (indexBuilder_ != null) { indexBuilder_.dispose(); indexBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_CreateIndexRequest_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.CreateIndexRequest getDefaultInstanceForType() { return com.google.cloud.visionai.v1.CreateIndexRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.CreateIndexRequest build() { com.google.cloud.visionai.v1.CreateIndexRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.CreateIndexRequest buildPartial() { com.google.cloud.visionai.v1.CreateIndexRequest result = new com.google.cloud.visionai.v1.CreateIndexRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.visionai.v1.CreateIndexRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.indexId_ = indexId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.index_ = indexBuilder_ == null ? index_ : indexBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.CreateIndexRequest) { return mergeFrom((com.google.cloud.visionai.v1.CreateIndexRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.CreateIndexRequest other) { if (other == com.google.cloud.visionai.v1.CreateIndexRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getIndexId().isEmpty()) { indexId_ = other.indexId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasIndex()) { mergeIndex(other.getIndex()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { indexId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getIndexFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Value for the parent. The resource name of the Corpus under which * this index is created. Format: * `projects/{project_number}/locations/{location_id}/corpora/{corpus_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object indexId_ = ""; /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The indexId. */ public java.lang.String getIndexId() { java.lang.Object ref = indexId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); indexId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for indexId. */ public com.google.protobuf.ByteString getIndexIdBytes() { java.lang.Object ref = indexId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); indexId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The indexId to set. * @return This builder for chaining. */ public Builder setIndexId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } indexId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearIndexId() { indexId_ = getDefaultInstance().getIndexId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The ID for the index. This will become the final resource name * for the index. If the user does not specify this value, it will be * generated by system. * * This value should be up to 63 characters, and valid characters * are /[a-z][0-9]-/. The first character must be a letter, the last could be * a letter or a number. * </pre> * * <code>string index_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for indexId to set. * @return This builder for chaining. */ public Builder setIndexIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); indexId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.visionai.v1.Index index_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder> indexBuilder_; /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the index field is set. */ public boolean hasIndex() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The index. */ public com.google.cloud.visionai.v1.Index getIndex() { if (indexBuilder_ == null) { return index_ == null ? com.google.cloud.visionai.v1.Index.getDefaultInstance() : index_; } else { return indexBuilder_.getMessage(); } } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIndex(com.google.cloud.visionai.v1.Index value) { if (indexBuilder_ == null) { if (value == null) { throw new NullPointerException(); } index_ = value; } else { indexBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setIndex(com.google.cloud.visionai.v1.Index.Builder builderForValue) { if (indexBuilder_ == null) { index_ = builderForValue.build(); } else { indexBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeIndex(com.google.cloud.visionai.v1.Index value) { if (indexBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && index_ != null && index_ != com.google.cloud.visionai.v1.Index.getDefaultInstance()) { getIndexBuilder().mergeFrom(value); } else { index_ = value; } } else { indexBuilder_.mergeFrom(value); } if (index_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearIndex() { bitField0_ = (bitField0_ & ~0x00000004); index_ = null; if (indexBuilder_ != null) { indexBuilder_.dispose(); indexBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.Index.Builder getIndexBuilder() { bitField0_ |= 0x00000004; onChanged(); return getIndexFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.visionai.v1.IndexOrBuilder getIndexOrBuilder() { if (indexBuilder_ != null) { return indexBuilder_.getMessageOrBuilder(); } else { return index_ == null ? com.google.cloud.visionai.v1.Index.getDefaultInstance() : index_; } } /** * * * <pre> * Required. The index being created. * </pre> * * <code>.google.cloud.visionai.v1.Index index = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder> getIndexFieldBuilder() { if (indexBuilder_ == null) { indexBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder>( getIndex(), getParentForChildren(), isClean()); index_ = null; } return indexBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.CreateIndexRequest) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.CreateIndexRequest) private static final com.google.cloud.visionai.v1.CreateIndexRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.CreateIndexRequest(); } public static com.google.cloud.visionai.v1.CreateIndexRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateIndexRequest> PARSER = new com.google.protobuf.AbstractParser<CreateIndexRequest>() { @java.lang.Override public CreateIndexRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateIndexRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateIndexRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.CreateIndexRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,245
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/UploadedMavenArtifact.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * A Maven artifact uploaded using the MavenArtifact directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedMavenArtifact} */ public final class UploadedMavenArtifact extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.UploadedMavenArtifact) UploadedMavenArtifactOrBuilder { private static final long serialVersionUID = 0L; // Use UploadedMavenArtifact.newBuilder() to construct. private UploadedMavenArtifact(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UploadedMavenArtifact() { uri_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UploadedMavenArtifact(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedMavenArtifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedMavenArtifact_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedMavenArtifact.class, com.google.cloudbuild.v1.UploadedMavenArtifact.Builder.class); } private int bitField0_; public static final int URI_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ @java.lang.Override public java.lang.String getUri() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ @java.lang.Override public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILE_HASHES_FIELD_NUMBER = 2; private com.google.cloudbuild.v1.FileHashes fileHashes_; /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ @java.lang.Override public boolean hasFileHashes() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ @java.lang.Override public com.google.cloudbuild.v1.FileHashes getFileHashes() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ @java.lang.Override public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } public static final int PUSH_TIMING_FIELD_NUMBER = 3; private com.google.cloudbuild.v1.TimeSpan pushTiming_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ @java.lang.Override public boolean hasPushTiming() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpan getPushTiming() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getPushTiming()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uri_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFileHashes()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPushTiming()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.UploadedMavenArtifact)) { return super.equals(obj); } com.google.cloudbuild.v1.UploadedMavenArtifact other = (com.google.cloudbuild.v1.UploadedMavenArtifact) obj; if (!getUri().equals(other.getUri())) return false; if (hasFileHashes() != other.hasFileHashes()) return false; if (hasFileHashes()) { if (!getFileHashes().equals(other.getFileHashes())) return false; } if (hasPushTiming() != other.hasPushTiming()) return false; if (hasPushTiming()) { if (!getPushTiming().equals(other.getPushTiming())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + URI_FIELD_NUMBER; hash = (53 * hash) + getUri().hashCode(); if (hasFileHashes()) { hash = (37 * hash) + FILE_HASHES_FIELD_NUMBER; hash = (53 * hash) + getFileHashes().hashCode(); } if (hasPushTiming()) { hash = (37 * hash) + PUSH_TIMING_FIELD_NUMBER; hash = (53 * hash) + getPushTiming().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.UploadedMavenArtifact parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.UploadedMavenArtifact prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A Maven artifact uploaded using the MavenArtifact directive. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.UploadedMavenArtifact} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.UploadedMavenArtifact) com.google.cloudbuild.v1.UploadedMavenArtifactOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedMavenArtifact_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedMavenArtifact_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.UploadedMavenArtifact.class, com.google.cloudbuild.v1.UploadedMavenArtifact.Builder.class); } // Construct using com.google.cloudbuild.v1.UploadedMavenArtifact.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFileHashesFieldBuilder(); getPushTimingFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uri_ = ""; fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_UploadedMavenArtifact_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.UploadedMavenArtifact getDefaultInstanceForType() { return com.google.cloudbuild.v1.UploadedMavenArtifact.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.UploadedMavenArtifact build() { com.google.cloudbuild.v1.UploadedMavenArtifact result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.UploadedMavenArtifact buildPartial() { com.google.cloudbuild.v1.UploadedMavenArtifact result = new com.google.cloudbuild.v1.UploadedMavenArtifact(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v1.UploadedMavenArtifact result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.uri_ = uri_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.fileHashes_ = fileHashesBuilder_ == null ? fileHashes_ : fileHashesBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pushTiming_ = pushTimingBuilder_ == null ? pushTiming_ : pushTimingBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.UploadedMavenArtifact) { return mergeFrom((com.google.cloudbuild.v1.UploadedMavenArtifact) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.UploadedMavenArtifact other) { if (other == com.google.cloudbuild.v1.UploadedMavenArtifact.getDefaultInstance()) return this; if (!other.getUri().isEmpty()) { uri_ = other.uri_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasFileHashes()) { mergeFileHashes(other.getFileHashes()); } if (other.hasPushTiming()) { mergePushTiming(other.getPushTiming()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { uri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getFileHashesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getPushTimingFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object uri_ = ""; /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The uri. */ public java.lang.String getUri() { java.lang.Object ref = uri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); uri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return The bytes for uri. */ public com.google.protobuf.ByteString getUriBytes() { java.lang.Object ref = uri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); uri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @param value The uri to set. * @return This builder for chaining. */ public Builder setUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @return This builder for chaining. */ public Builder clearUri() { uri_ = getDefaultInstance().getUri(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * URI of the uploaded artifact. * </pre> * * <code>string uri = 1;</code> * * @param value The bytes for uri to set. * @return This builder for chaining. */ public Builder setUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uri_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloudbuild.v1.FileHashes fileHashes_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> fileHashesBuilder_; /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return Whether the fileHashes field is set. */ public boolean hasFileHashes() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> * * @return The fileHashes. */ public com.google.cloudbuild.v1.FileHashes getFileHashes() { if (fileHashesBuilder_ == null) { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } else { return fileHashesBuilder_.getMessage(); } } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } fileHashes_ = value; } else { fileHashesBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder setFileHashes(com.google.cloudbuild.v1.FileHashes.Builder builderForValue) { if (fileHashesBuilder_ == null) { fileHashes_ = builderForValue.build(); } else { fileHashesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder mergeFileHashes(com.google.cloudbuild.v1.FileHashes value) { if (fileHashesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && fileHashes_ != null && fileHashes_ != com.google.cloudbuild.v1.FileHashes.getDefaultInstance()) { getFileHashesBuilder().mergeFrom(value); } else { fileHashes_ = value; } } else { fileHashesBuilder_.mergeFrom(value); } if (fileHashes_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public Builder clearFileHashes() { bitField0_ = (bitField0_ & ~0x00000002); fileHashes_ = null; if (fileHashesBuilder_ != null) { fileHashesBuilder_.dispose(); fileHashesBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashes.Builder getFileHashesBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFileHashesFieldBuilder().getBuilder(); } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ public com.google.cloudbuild.v1.FileHashesOrBuilder getFileHashesOrBuilder() { if (fileHashesBuilder_ != null) { return fileHashesBuilder_.getMessageOrBuilder(); } else { return fileHashes_ == null ? com.google.cloudbuild.v1.FileHashes.getDefaultInstance() : fileHashes_; } } /** * * * <pre> * Hash types and values of the Maven Artifact. * </pre> * * <code>.google.devtools.cloudbuild.v1.FileHashes file_hashes = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder> getFileHashesFieldBuilder() { if (fileHashesBuilder_ == null) { fileHashesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.FileHashes, com.google.cloudbuild.v1.FileHashes.Builder, com.google.cloudbuild.v1.FileHashesOrBuilder>( getFileHashes(), getParentForChildren(), isClean()); fileHashes_ = null; } return fileHashesBuilder_; } private com.google.cloudbuild.v1.TimeSpan pushTiming_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> pushTimingBuilder_; /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the pushTiming field is set. */ public boolean hasPushTiming() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pushTiming. */ public com.google.cloudbuild.v1.TimeSpan getPushTiming() { if (pushTimingBuilder_ == null) { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } else { return pushTimingBuilder_.getMessage(); } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (value == null) { throw new NullPointerException(); } pushTiming_ = value; } else { pushTimingBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setPushTiming(com.google.cloudbuild.v1.TimeSpan.Builder builderForValue) { if (pushTimingBuilder_ == null) { pushTiming_ = builderForValue.build(); } else { pushTimingBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergePushTiming(com.google.cloudbuild.v1.TimeSpan value) { if (pushTimingBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && pushTiming_ != null && pushTiming_ != com.google.cloudbuild.v1.TimeSpan.getDefaultInstance()) { getPushTimingBuilder().mergeFrom(value); } else { pushTiming_ = value; } } else { pushTimingBuilder_.mergeFrom(value); } if (pushTiming_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearPushTiming() { bitField0_ = (bitField0_ & ~0x00000004); pushTiming_ = null; if (pushTimingBuilder_ != null) { pushTimingBuilder_.dispose(); pushTimingBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpan.Builder getPushTimingBuilder() { bitField0_ |= 0x00000004; onChanged(); return getPushTimingFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloudbuild.v1.TimeSpanOrBuilder getPushTimingOrBuilder() { if (pushTimingBuilder_ != null) { return pushTimingBuilder_.getMessageOrBuilder(); } else { return pushTiming_ == null ? com.google.cloudbuild.v1.TimeSpan.getDefaultInstance() : pushTiming_; } } /** * * * <pre> * Output only. Stores timing information for pushing the specified artifact. * </pre> * * <code> * .google.devtools.cloudbuild.v1.TimeSpan push_timing = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder> getPushTimingFieldBuilder() { if (pushTimingBuilder_ == null) { pushTimingBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.TimeSpan, com.google.cloudbuild.v1.TimeSpan.Builder, com.google.cloudbuild.v1.TimeSpanOrBuilder>( getPushTiming(), getParentForChildren(), isClean()); pushTiming_ = null; } return pushTimingBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.UploadedMavenArtifact) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UploadedMavenArtifact) private static final com.google.cloudbuild.v1.UploadedMavenArtifact DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.UploadedMavenArtifact(); } public static com.google.cloudbuild.v1.UploadedMavenArtifact getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UploadedMavenArtifact> PARSER = new com.google.protobuf.AbstractParser<UploadedMavenArtifact>() { @java.lang.Override public UploadedMavenArtifact parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UploadedMavenArtifact> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UploadedMavenArtifact> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.UploadedMavenArtifact getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,403
java-texttospeech/proto-google-cloud-texttospeech-v1/src/main/java/com/google/cloud/texttospeech/v1/MultiSpeakerVoiceConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/texttospeech/v1/cloud_tts.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.texttospeech.v1; /** * * * <pre> * Configuration for a multi-speaker text-to-speech setup. Enables the use of up * to two distinct voices in a single synthesis request. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig} */ public final class MultiSpeakerVoiceConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) MultiSpeakerVoiceConfigOrBuilder { private static final long serialVersionUID = 0L; // Use MultiSpeakerVoiceConfig.newBuilder() to construct. private MultiSpeakerVoiceConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MultiSpeakerVoiceConfig() { speakerVoiceConfigs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new MultiSpeakerVoiceConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_MultiSpeakerVoiceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_MultiSpeakerVoiceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.class, com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.Builder.class); } public static final int SPEAKER_VOICE_CONFIGS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice> speakerVoiceConfigs_; /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice> getSpeakerVoiceConfigsList() { return speakerVoiceConfigs_; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder> getSpeakerVoiceConfigsOrBuilderList() { return speakerVoiceConfigs_; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getSpeakerVoiceConfigsCount() { return speakerVoiceConfigs_.size(); } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice getSpeakerVoiceConfigs( int index) { return speakerVoiceConfigs_.get(index); } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder getSpeakerVoiceConfigsOrBuilder(int index) { return speakerVoiceConfigs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < speakerVoiceConfigs_.size(); i++) { output.writeMessage(2, speakerVoiceConfigs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < speakerVoiceConfigs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, speakerVoiceConfigs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig)) { return super.equals(obj); } com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig other = (com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) obj; if (!getSpeakerVoiceConfigsList().equals(other.getSpeakerVoiceConfigsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSpeakerVoiceConfigsCount() > 0) { hash = (37 * hash) + SPEAKER_VOICE_CONFIGS_FIELD_NUMBER; hash = (53 * hash) + getSpeakerVoiceConfigsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration for a multi-speaker text-to-speech setup. Enables the use of up * to two distinct voices in a single synthesis request. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_MultiSpeakerVoiceConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_MultiSpeakerVoiceConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.class, com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.Builder.class); } // Construct using com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (speakerVoiceConfigsBuilder_ == null) { speakerVoiceConfigs_ = java.util.Collections.emptyList(); } else { speakerVoiceConfigs_ = null; speakerVoiceConfigsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.texttospeech.v1.TextToSpeechProto .internal_static_google_cloud_texttospeech_v1_MultiSpeakerVoiceConfig_descriptor; } @java.lang.Override public com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig getDefaultInstanceForType() { return com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig build() { com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig buildPartial() { com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig result = new com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig result) { if (speakerVoiceConfigsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { speakerVoiceConfigs_ = java.util.Collections.unmodifiableList(speakerVoiceConfigs_); bitField0_ = (bitField0_ & ~0x00000001); } result.speakerVoiceConfigs_ = speakerVoiceConfigs_; } else { result.speakerVoiceConfigs_ = speakerVoiceConfigsBuilder_.build(); } } private void buildPartial0(com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) { return mergeFrom((com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig other) { if (other == com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig.getDefaultInstance()) return this; if (speakerVoiceConfigsBuilder_ == null) { if (!other.speakerVoiceConfigs_.isEmpty()) { if (speakerVoiceConfigs_.isEmpty()) { speakerVoiceConfigs_ = other.speakerVoiceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.addAll(other.speakerVoiceConfigs_); } onChanged(); } } else { if (!other.speakerVoiceConfigs_.isEmpty()) { if (speakerVoiceConfigsBuilder_.isEmpty()) { speakerVoiceConfigsBuilder_.dispose(); speakerVoiceConfigsBuilder_ = null; speakerVoiceConfigs_ = other.speakerVoiceConfigs_; bitField0_ = (bitField0_ & ~0x00000001); speakerVoiceConfigsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSpeakerVoiceConfigsFieldBuilder() : null; } else { speakerVoiceConfigsBuilder_.addAllMessages(other.speakerVoiceConfigs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice m = input.readMessage( com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.parser(), extensionRegistry); if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.add(m); } else { speakerVoiceConfigsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice> speakerVoiceConfigs_ = java.util.Collections.emptyList(); private void ensureSpeakerVoiceConfigsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { speakerVoiceConfigs_ = new java.util.ArrayList<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice>( speakerVoiceConfigs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder> speakerVoiceConfigsBuilder_; /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice> getSpeakerVoiceConfigsList() { if (speakerVoiceConfigsBuilder_ == null) { return java.util.Collections.unmodifiableList(speakerVoiceConfigs_); } else { return speakerVoiceConfigsBuilder_.getMessageList(); } } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getSpeakerVoiceConfigsCount() { if (speakerVoiceConfigsBuilder_ == null) { return speakerVoiceConfigs_.size(); } else { return speakerVoiceConfigsBuilder_.getCount(); } } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice getSpeakerVoiceConfigs( int index) { if (speakerVoiceConfigsBuilder_ == null) { return speakerVoiceConfigs_.get(index); } else { return speakerVoiceConfigsBuilder_.getMessage(index); } } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpeakerVoiceConfigs( int index, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice value) { if (speakerVoiceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.set(index, value); onChanged(); } else { speakerVoiceConfigsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpeakerVoiceConfigs( int index, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder builderForValue) { if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.set(index, builderForValue.build()); onChanged(); } else { speakerVoiceConfigsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addSpeakerVoiceConfigs( com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice value) { if (speakerVoiceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.add(value); onChanged(); } else { speakerVoiceConfigsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addSpeakerVoiceConfigs( int index, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice value) { if (speakerVoiceConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.add(index, value); onChanged(); } else { speakerVoiceConfigsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addSpeakerVoiceConfigs( com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder builderForValue) { if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.add(builderForValue.build()); onChanged(); } else { speakerVoiceConfigsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addSpeakerVoiceConfigs( int index, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder builderForValue) { if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.add(index, builderForValue.build()); onChanged(); } else { speakerVoiceConfigsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllSpeakerVoiceConfigs( java.lang.Iterable<? extends com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice> values) { if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, speakerVoiceConfigs_); onChanged(); } else { speakerVoiceConfigsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSpeakerVoiceConfigs() { if (speakerVoiceConfigsBuilder_ == null) { speakerVoiceConfigs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { speakerVoiceConfigsBuilder_.clear(); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeSpeakerVoiceConfigs(int index) { if (speakerVoiceConfigsBuilder_ == null) { ensureSpeakerVoiceConfigsIsMutable(); speakerVoiceConfigs_.remove(index); onChanged(); } else { speakerVoiceConfigsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder getSpeakerVoiceConfigsBuilder(int index) { return getSpeakerVoiceConfigsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder getSpeakerVoiceConfigsOrBuilder(int index) { if (speakerVoiceConfigsBuilder_ == null) { return speakerVoiceConfigs_.get(index); } else { return speakerVoiceConfigsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List< ? extends com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder> getSpeakerVoiceConfigsOrBuilderList() { if (speakerVoiceConfigsBuilder_ != null) { return speakerVoiceConfigsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(speakerVoiceConfigs_); } } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder addSpeakerVoiceConfigsBuilder() { return getSpeakerVoiceConfigsFieldBuilder() .addBuilder( com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.getDefaultInstance()); } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder addSpeakerVoiceConfigsBuilder(int index) { return getSpeakerVoiceConfigsFieldBuilder() .addBuilder( index, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.getDefaultInstance()); } /** * * * <pre> * Required. A list of configurations for the voices of the speakers. Exactly * two speaker voice configurations must be provided. * </pre> * * <code> * repeated .google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice speaker_voice_configs = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder> getSpeakerVoiceConfigsBuilderList() { return getSpeakerVoiceConfigsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder> getSpeakerVoiceConfigsFieldBuilder() { if (speakerVoiceConfigsBuilder_ == null) { speakerVoiceConfigsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoice.Builder, com.google.cloud.texttospeech.v1.MultispeakerPrebuiltVoiceOrBuilder>( speakerVoiceConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); speakerVoiceConfigs_ = null; } return speakerVoiceConfigsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) } // @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig) private static final com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig(); } public static com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<MultiSpeakerVoiceConfig> PARSER = new com.google.protobuf.AbstractParser<MultiSpeakerVoiceConfig>() { @java.lang.Override public MultiSpeakerVoiceConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<MultiSpeakerVoiceConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MultiSpeakerVoiceConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.texttospeech.v1.MultiSpeakerVoiceConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/storm
37,766
storm-server/src/test/java/org/apache/storm/daemon/supervisor/SlotTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.daemon.supervisor; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.storm.daemon.supervisor.Slot.DynamicState; import org.apache.storm.daemon.supervisor.Slot.MachineState; import org.apache.storm.daemon.supervisor.Slot.StaticState; import org.apache.storm.daemon.supervisor.Slot.TopoProfileAction; import org.apache.storm.generated.ExecutorInfo; import org.apache.storm.generated.LSWorkerHeartbeat; import org.apache.storm.generated.LocalAssignment; import org.apache.storm.generated.NodeInfo; import org.apache.storm.generated.ProfileAction; import org.apache.storm.generated.ProfileRequest; import org.apache.storm.generated.WorkerResources; import org.apache.storm.localizer.AsyncLocalizer; import org.apache.storm.localizer.BlobChangingCallback; import org.apache.storm.localizer.GoodToGo; import org.apache.storm.localizer.LocallyCachedBlob; import org.apache.storm.scheduler.ISupervisor; import org.apache.storm.utils.LocalState; import org.apache.storm.utils.Time; import org.apache.storm.utils.Time.SimulatedTime; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.lessThan; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.*; import java.util.concurrent.ExecutionException; import org.apache.storm.metric.StormMetricsRegistry; public class SlotTest { static WorkerResources mkWorkerResources(Double cpu, Double mem_on_heap, Double mem_off_heap) { WorkerResources resources = new WorkerResources(); if (cpu != null) { resources.set_cpu(cpu); } if (mem_on_heap != null) { resources.set_mem_on_heap(mem_on_heap); } if (mem_off_heap != null) { resources.set_mem_off_heap(mem_off_heap); } return resources; } static LSWorkerHeartbeat mkWorkerHB(String id, int port, List<ExecutorInfo> exec, Integer timeSecs) { LSWorkerHeartbeat ret = new LSWorkerHeartbeat(); ret.set_topology_id(id); ret.set_port(port); ret.set_executors(exec); ret.set_time_secs(timeSecs); return ret; } static List<ExecutorInfo> mkExecutorInfoList(int... executors) { ArrayList<ExecutorInfo> ret = new ArrayList<>(executors.length); for (int exec : executors) { ExecutorInfo execInfo = new ExecutorInfo(); execInfo.set_task_start(exec); execInfo.set_task_end(exec); ret.add(execInfo); } return ret; } static LocalAssignment mkLocalAssignment(String id, List<ExecutorInfo> exec, WorkerResources resources) { LocalAssignment ret = new LocalAssignment(); ret.set_topology_id(id); ret.set_executors(exec); if (resources != null) { ret.set_resources(resources); } return ret; } @Test public void testForSameTopology() { LocalAssignment a = mkLocalAssignment("A", mkExecutorInfoList(1, 2, 3, 4, 5), mkWorkerResources(100.0, 100.0, 100.0)); LocalAssignment aResized = mkLocalAssignment("A", mkExecutorInfoList(1, 2, 3, 4, 5), mkWorkerResources(100.0, 200.0, 100.0)); LocalAssignment b = mkLocalAssignment("B", mkExecutorInfoList(1, 2, 3, 4, 5, 6), mkWorkerResources(100.0, 100.0, 100.0)); LocalAssignment bReordered = mkLocalAssignment("B", mkExecutorInfoList(6, 5, 4, 3, 2, 1), mkWorkerResources(100.0, 100.0, 100.0)); assertTrue(Slot.forSameTopology(null, null)); assertTrue(Slot.forSameTopology(a, a)); assertTrue(Slot.forSameTopology(a, aResized)); assertTrue(Slot.forSameTopology(aResized, a)); assertTrue(Slot.forSameTopology(b, bReordered)); assertTrue(Slot.forSameTopology(bReordered, b)); assertFalse(Slot.forSameTopology(a, null)); assertFalse(Slot.forSameTopology(null, b)); assertFalse(Slot.forSameTopology(a, b)); } @Test public void testEmptyToEmpty() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { AsyncLocalizer localizer = mock(AsyncLocalizer.class); LocalState state = mock(LocalState.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 1000, 1000, 1000, 1000, containerLauncher, "localhost", 8080, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(null, null, null, slotMetrics); DynamicState nextState = Slot.handleEmpty(dynamicState, staticState); assertEquals(MachineState.EMPTY, nextState.state); assertTrue(Time.currentTimeMillis() > 1000); } } @Test public void testLaunchContainerFromEmpty() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String topoId = "NEW"; List<ExecutorInfo> execList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment newAssignment = mkLocalAssignment(topoId, execList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container container = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, newAssignment, state)).thenReturn(container); LSWorkerHeartbeat hb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs()); when(container.readHeartbeat()).thenReturn(hb, hb); @SuppressWarnings("unchecked") CompletableFuture<Void> blobFuture = mock(CompletableFuture.class); when(localizer.requestDownloadTopologyBlobs(newAssignment, port, cb)).thenReturn(blobFuture); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(null, null, null, slotMetrics) .withNewAssignment(newAssignment); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); verify(blobFuture).get(1000, TimeUnit.MILLISECONDS); verify(containerLauncher).launchContainer(port, newAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertEquals(0, Time.currentTimeMillis()); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); assertTrue(Time.currentTimeMillis() > 2000); } } @Test public void testErrorHandlingWhenLocalizationFails() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String topoId = "NEW"; List<ExecutorInfo> execList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment newAssignment = mkLocalAssignment(topoId, execList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container container = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, newAssignment, state)).thenReturn(container); LSWorkerHeartbeat hb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs()); when(container.readHeartbeat()).thenReturn(hb, hb); @SuppressWarnings("unchecked") CompletableFuture<Void> blobFuture = mock(CompletableFuture.class); CompletableFuture<Void> secondBlobFuture = mock(CompletableFuture.class); when(secondBlobFuture.get(anyLong(), any())).thenThrow(new ExecutionException(new RuntimeException("Localization failure"))); CompletableFuture<Void> thirdBlobFuture = mock(CompletableFuture.class); when(localizer.requestDownloadTopologyBlobs(newAssignment, port, cb)) .thenReturn(blobFuture) .thenReturn(secondBlobFuture) .thenReturn(thirdBlobFuture); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(null, null, null, slotMetrics) .withNewAssignment(newAssignment); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertEquals(0, Time.currentTimeMillis()); //Assignment has changed nextState = Slot.stateMachineStep(nextState.withNewAssignment(null), staticState); assertThat(nextState.state, is(MachineState.EMPTY)); assertThat(nextState.pendingChangingBlobs, is(Collections.emptySet())); assertThat(nextState.pendingChangingBlobsAssignment, nullValue()); assertThat(nextState.pendingLocalization, nullValue()); assertThat(nextState.pendingDownload, nullValue()); clearInvocations(localizer); nextState = Slot.stateMachineStep(dynamicState.withNewAssignment(newAssignment), staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(secondBlobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); //Error occurs, but assignment has not changed clearInvocations(localizer); nextState = Slot.stateMachineStep(nextState, staticState); verify(localizer).requestDownloadTopologyBlobs(newAssignment, port, cb); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); assertSame(thirdBlobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(newAssignment, nextState.pendingLocalization); assertThat(Time.currentTimeMillis(), greaterThan(3L)); nextState = Slot.stateMachineStep(nextState, staticState); verify(thirdBlobFuture).get(1000, TimeUnit.MILLISECONDS); verify(containerLauncher).launchContainer(port, newAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(newAssignment, nextState.currentAssignment); assertSame(container, nextState.container); } } @Test public void testRelaunch() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String topoId = "CURRENT"; List<ExecutorInfo> execList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment assignment = mkLocalAssignment(topoId, execList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container container = mock(Container.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); LSWorkerHeartbeat oldhb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs() - 10); LSWorkerHeartbeat goodhb = mkWorkerHB(topoId, port, execList, Time.currentTimeSecs()); when(container.readHeartbeat()).thenReturn(oldhb, oldhb, goodhb, goodhb); when(container.areAllProcessesDead()).thenReturn(false, false, true); ISupervisor iSuper = mock(ISupervisor.class); LocalState state = mock(LocalState.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(assignment, container, assignment, slotMetrics); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); assertEquals(MachineState.KILL_AND_RELAUNCH, nextState.state); verify(container).kill(); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.KILL_AND_RELAUNCH, nextState.state); verify(container).forceKill(); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); verify(container).relaunch(); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertTrue(Time.currentTimeMillis() > 3000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); } } @Test public void testReschedule() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String cTopoId = "CURRENT"; List<ExecutorInfo> cExecList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment cAssignment = mkLocalAssignment(cTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0)); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container cContainer = mock(Container.class); LSWorkerHeartbeat chb = mkWorkerHB(cTopoId, port, cExecList, Time.currentTimeSecs()); when(cContainer.readHeartbeat()).thenReturn(chb); when(cContainer.areAllProcessesDead()).thenReturn(false, false, true); String nTopoId = "NEW"; List<ExecutorInfo> nExecList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment nAssignment = mkLocalAssignment(nTopoId, nExecList, mkWorkerResources(100.0, 100.0, 100.0)); AsyncLocalizer localizer = mock(AsyncLocalizer.class); Container nContainer = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, nAssignment, state)).thenReturn(nContainer); LSWorkerHeartbeat nhb = mkWorkerHB(nTopoId, 100, nExecList, Time.currentTimeSecs()); when(nContainer.readHeartbeat()).thenReturn(nhb, nhb); @SuppressWarnings("unchecked") CompletableFuture<Void> blobFuture = mock(CompletableFuture.class); when(localizer.requestDownloadTopologyBlobs(nAssignment, port, cb)).thenReturn(blobFuture); ISupervisor iSuper = mock(ISupervisor.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(cAssignment, cContainer, nAssignment, slotMetrics); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); assertEquals(MachineState.KILL, nextState.state); verify(cContainer).kill(); verify(localizer).requestDownloadTopologyBlobs(nAssignment, port, cb); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(nAssignment, nextState.pendingLocalization); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.KILL, nextState.state); verify(cContainer).forceKill(); assertSame(blobFuture, nextState.pendingDownload, "pendingDownload not set properly"); assertEquals(nAssignment, nextState.pendingLocalization); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.WAITING_FOR_BLOB_LOCALIZATION, nextState.state); verify(cContainer).cleanUp(); verify(localizer).releaseSlotFor(cAssignment, port); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); verify(blobFuture).get(1000, TimeUnit.MILLISECONDS); verify(containerLauncher).launchContainer(port, nAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(nAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(nAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(nAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 3000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingDownload, "pendingDownload is not null"); assertNull(nextState.pendingLocalization); assertSame(nAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 4000); } } @Test public void testRunningToEmpty() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String cTopoId = "CURRENT"; List<ExecutorInfo> cExecList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment cAssignment = mkLocalAssignment(cTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0)); Container cContainer = mock(Container.class); LSWorkerHeartbeat chb = mkWorkerHB(cTopoId, port, cExecList, Time.currentTimeSecs()); when(cContainer.readHeartbeat()).thenReturn(chb); when(cContainer.areAllProcessesDead()).thenReturn(false, false, true); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); ISupervisor iSuper = mock(ISupervisor.class); LocalState state = mock(LocalState.class); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, slotMetrics); DynamicState dynamicState = new DynamicState(cAssignment, cContainer, null, slotMetrics); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); assertEquals(MachineState.KILL, nextState.state); verify(cContainer).kill(); verify(localizer, never()).requestDownloadTopologyBlobs(null, port, cb); assertNull(nextState.pendingDownload, "pendingDownload not set properly"); assertNull(nextState.pendingLocalization); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.KILL, nextState.state); verify(cContainer).forceKill(); assertNull(nextState.pendingDownload, "pendingDownload not set properly"); assertNull(nextState.pendingLocalization); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.EMPTY, nextState.state); verify(cContainer).cleanUp(); verify(localizer).releaseSlotFor(cAssignment, port); assertNull(nextState.container); assertNull(nextState.currentAssignment); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.EMPTY, nextState.state); assertNull(nextState.container); assertNull(nextState.currentAssignment); assertTrue(Time.currentTimeMillis() > 3000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.EMPTY, nextState.state); assertNull(nextState.container); assertNull(nextState.currentAssignment); assertTrue(Time.currentTimeMillis() > 3000); } } @Test public void testRunWithProfileActions() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String cTopoId = "CURRENT"; List<ExecutorInfo> cExecList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment cAssignment = mkLocalAssignment(cTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0)); Container cContainer = mock(Container.class); LSWorkerHeartbeat chb = mkWorkerHB(cTopoId, port, cExecList, Time.currentTimeSecs() + 100); //NOT going to timeout for a while when(cContainer.readHeartbeat()).thenReturn(chb, chb, chb, chb, chb, chb); when(cContainer.runProfiling(any(ProfileRequest.class), anyBoolean())).thenReturn(true); AsyncLocalizer localizer = mock(AsyncLocalizer.class); BlobChangingCallback cb = mock(BlobChangingCallback.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); ISupervisor iSuper = mock(ISupervisor.class); LocalState state = mock(LocalState.class); StaticState staticState = new StaticState(localizer, 5000, 120000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, new SlotMetrics(new StormMetricsRegistry())); Set<TopoProfileAction> profileActions = new HashSet<>(); ProfileRequest request = new ProfileRequest(); request.set_action(ProfileAction.JPROFILE_STOP); NodeInfo info = new NodeInfo(); info.set_node("localhost"); info.add_to_port(port); request.set_nodeInfo(info); request.set_time_stamp(Time.currentTimeMillis() + 3000);//3 seconds from now TopoProfileAction profile = new TopoProfileAction(cTopoId, request); profileActions.add(profile); Set<TopoProfileAction> expectedPending = new HashSet<>(); expectedPending.add(profile); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); DynamicState dynamicState = new DynamicState(cAssignment, cContainer, cAssignment, slotMetrics) .withProfileActions(profileActions, Collections.emptySet()); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); assertEquals(MachineState.RUNNING, nextState.state); verify(cContainer).runProfiling(request, false); assertEquals(expectedPending, nextState.pendingStopProfileActions); assertEquals(expectedPending, nextState.profileActions); assertTrue(Time.currentTimeMillis() > 1000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertEquals(expectedPending, nextState.pendingStopProfileActions); assertEquals(expectedPending, nextState.profileActions); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertEquals(expectedPending, nextState.pendingStopProfileActions); assertEquals(expectedPending, nextState.profileActions); assertTrue(Time.currentTimeMillis() > 3000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); verify(cContainer).runProfiling(request, true); assertEquals(Collections.<TopoProfileAction>emptySet(), nextState.pendingStopProfileActions); assertEquals(Collections.<TopoProfileAction>emptySet(), nextState.profileActions); assertTrue(Time.currentTimeMillis() > 4000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertEquals(Collections.<TopoProfileAction>emptySet(), nextState.pendingStopProfileActions); assertEquals(Collections.<TopoProfileAction>emptySet(), nextState.profileActions); assertTrue(Time.currentTimeMillis() > 5000); } } @Test public void testResourcesChangedFiltered() throws Exception { try (SimulatedTime ignored = new SimulatedTime(1010)) { int port = 8080; String cTopoId = "CURRENT"; List<ExecutorInfo> cExecList = mkExecutorInfoList(1, 2, 3, 4, 5); LocalAssignment cAssignment = mkLocalAssignment(cTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0)); String otherTopoId = "OTHER"; LocalAssignment otherAssignment = mkLocalAssignment(otherTopoId, cExecList, mkWorkerResources(100.0, 100.0, 100.0)); BlobChangingCallback cb = mock(BlobChangingCallback.class); Container cContainer = mock(Container.class); LSWorkerHeartbeat chb = mkWorkerHB(cTopoId, port, cExecList, Time.currentTimeSecs()); when(cContainer.readHeartbeat()).thenReturn(chb); when(cContainer.areAllProcessesDead()).thenReturn(false, false, true); AsyncLocalizer localizer = mock(AsyncLocalizer.class); Container nContainer = mock(Container.class); LocalState state = mock(LocalState.class); ContainerLauncher containerLauncher = mock(ContainerLauncher.class); when(containerLauncher.launchContainer(port, cAssignment, state)).thenReturn(nContainer); when(nContainer.readHeartbeat()).thenReturn(chb, chb); ISupervisor iSuper = mock(ISupervisor.class); long heartbeatTimeoutMs = 5000; StaticState staticState = new StaticState(localizer, heartbeatTimeoutMs, 120_000, 1000, 1000, containerLauncher, "localhost", port, iSuper, state, cb, null, null, new SlotMetrics(new StormMetricsRegistry())); Set<Slot.BlobChanging> changing = new HashSet<>(); LocallyCachedBlob stormJar = mock(LocallyCachedBlob.class); GoodToGo.GoodToGoLatch stormJarLatch = mock(GoodToGo.GoodToGoLatch.class); CompletableFuture<Void> stormJarLatchFuture = mock(CompletableFuture.class); when(stormJarLatch.countDown()).thenReturn(stormJarLatchFuture); changing.add(new Slot.BlobChanging(cAssignment, stormJar, stormJarLatch)); Set<Slot.BlobChanging> desired = new HashSet<>(changing); LocallyCachedBlob otherJar = mock(LocallyCachedBlob.class); GoodToGo.GoodToGoLatch otherJarLatch = mock(GoodToGo.GoodToGoLatch.class); changing.add(new Slot.BlobChanging(otherAssignment, otherJar, otherJarLatch)); SlotMetrics slotMetrics = new SlotMetrics(new StormMetricsRegistry()); DynamicState dynamicState = new DynamicState(cAssignment, cContainer, cAssignment, slotMetrics).withChangingBlobs(changing); DynamicState nextState = Slot.stateMachineStep(dynamicState, staticState); assertEquals(MachineState.KILL_BLOB_UPDATE, nextState.state); verify(iSuper).killedWorker(port); verify(cContainer).kill(); verify(localizer, never()).requestDownloadTopologyBlobs(any(), anyInt(), any()); verify(stormJarLatch, never()).countDown(); verify(otherJarLatch, times(1)).countDown(); assertNull(nextState.pendingDownload); assertNull(nextState.pendingLocalization); assertEquals(desired, nextState.changingBlobs); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertNull(nextState.pendingChangingBlobsAssignment); assertThat(Time.currentTimeMillis(), greaterThan(1000L)); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.KILL_BLOB_UPDATE, nextState.state); verify(cContainer).forceKill(); assertNull(nextState.pendingDownload); assertNull(nextState.pendingLocalization); assertEquals(desired, nextState.changingBlobs); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertNull(nextState.pendingChangingBlobsAssignment); assertThat(Time.currentTimeMillis(), greaterThan(2000L)); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.WAITING_FOR_BLOB_UPDATE, nextState.state); verify(cContainer).cleanUp(); assertThat(Time.currentTimeMillis(), greaterThan(2000L)); nextState = Slot.stateMachineStep(nextState, staticState); verify(stormJarLatchFuture).get(anyLong(), any()); verify(containerLauncher).launchContainer(port, cAssignment, state); assertEquals(MachineState.WAITING_FOR_WORKER_START, nextState.state); assertNull(nextState.pendingChangingBlobsAssignment); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertSame(cAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertThat(Time.currentTimeMillis(), greaterThan(2000L)); assertThat(Time.currentTimeMillis(), lessThan(heartbeatTimeoutMs)); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingChangingBlobsAssignment); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertSame(cAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 2000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingChangingBlobsAssignment); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertSame(cAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 3000); nextState = Slot.stateMachineStep(nextState, staticState); assertEquals(MachineState.RUNNING, nextState.state); assertNull(nextState.pendingChangingBlobsAssignment); assertTrue(nextState.pendingChangingBlobs.isEmpty()); assertSame(cAssignment, nextState.currentAssignment); assertSame(nContainer, nextState.container); assertTrue(Time.currentTimeMillis() > 4000); } } }
googleapis/google-cloud-java
37,222
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/ImportUserEventsMetadata.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/import_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Metadata related to the progress of the Import operation. This is * returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata} */ public final class ImportUserEventsMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) ImportUserEventsMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use ImportUserEventsMetadata.newBuilder() to construct. private ImportUserEventsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportUserEventsMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportUserEventsMetadata(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1alpha_ImportUserEventsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1alpha_ImportUserEventsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.class, com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.Builder.class); } private int bitField0_; public static final int CREATE_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp createTime_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ @java.lang.Override public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ @java.lang.Override public com.google.protobuf.Timestamp getCreateTime() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } public static final int UPDATE_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp updateTime_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ @java.lang.Override public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ @java.lang.Override public com.google.protobuf.Timestamp getUpdateTime() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } public static final int SUCCESS_COUNT_FIELD_NUMBER = 3; private long successCount_ = 0L; /** * * * <pre> * Count of entries that were processed successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } public static final int FAILURE_COUNT_FIELD_NUMBER = 4; private long failureCount_ = 0L; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateTime()); } if (successCount_ != 0L) { output.writeInt64(3, successCount_); } if (failureCount_ != 0L) { output.writeInt64(4, failureCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCreateTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime()); } if (successCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, successCount_); } if (failureCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, failureCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata other = (com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) obj; if (hasCreateTime() != other.hasCreateTime()) return false; if (hasCreateTime()) { if (!getCreateTime().equals(other.getCreateTime())) return false; } if (hasUpdateTime() != other.hasUpdateTime()) return false; if (hasUpdateTime()) { if (!getUpdateTime().equals(other.getUpdateTime())) return false; } if (getSuccessCount() != other.getSuccessCount()) return false; if (getFailureCount() != other.getFailureCount()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCreateTime()) { hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getCreateTime().hashCode(); } if (hasUpdateTime()) { hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getUpdateTime().hashCode(); } hash = (37 * hash) + SUCCESS_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSuccessCount()); hash = (37 * hash) + FAILURE_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailureCount()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Metadata related to the progress of the Import operation. This is * returned by the google.longrunning.Operation.metadata field. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1alpha_ImportUserEventsMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1alpha_ImportUserEventsMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.class, com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.Builder.class); } // Construct using // com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCreateTimeFieldBuilder(); getUpdateTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } successCount_ = 0L; failureCount_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1alpha_ImportUserEventsMetadata_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata build() { com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata buildPartial() { com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata result = new com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.successCount_ = successCount_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.failureCount_ = failureCount_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata other) { if (other == com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata.getDefaultInstance()) return this; if (other.hasCreateTime()) { mergeCreateTime(other.getCreateTime()); } if (other.hasUpdateTime()) { mergeUpdateTime(other.getUpdateTime()); } if (other.getSuccessCount() != 0L) { setSuccessCount(other.getSuccessCount()); } if (other.getFailureCount() != 0L) { setFailureCount(other.getFailureCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { successCount_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 32: { failureCount_ = input.readInt64(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp createTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> createTimeBuilder_; /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return Whether the createTime field is set. */ public boolean hasCreateTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> * * @return The createTime. */ public com.google.protobuf.Timestamp getCreateTime() { if (createTimeBuilder_ == null) { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } else { return createTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } createTime_ = value; } else { createTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (createTimeBuilder_ == null) { createTime_ = builderForValue.build(); } else { createTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder mergeCreateTime(com.google.protobuf.Timestamp value) { if (createTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && createTime_ != null && createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getCreateTimeBuilder().mergeFrom(value); } else { createTime_ = value; } } else { createTimeBuilder_.mergeFrom(value); } if (createTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public Builder clearCreateTime() { bitField0_ = (bitField0_ & ~0x00000001); createTime_ = null; if (createTimeBuilder_ != null) { createTimeBuilder_.dispose(); createTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getCreateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() { if (createTimeBuilder_ != null) { return createTimeBuilder_.getMessageOrBuilder(); } else { return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_; } } /** * * * <pre> * Operation create time. * </pre> * * <code>.google.protobuf.Timestamp create_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getCreateTimeFieldBuilder() { if (createTimeBuilder_ == null) { createTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getCreateTime(), getParentForChildren(), isClean()); createTime_ = null; } return createTimeBuilder_; } private com.google.protobuf.Timestamp updateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> updateTimeBuilder_; /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return Whether the updateTime field is set. */ public boolean hasUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> * * @return The updateTime. */ public com.google.protobuf.Timestamp getUpdateTime() { if (updateTimeBuilder_ == null) { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } else { return updateTimeBuilder_.getMessage(); } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateTime_ = value; } else { updateTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (updateTimeBuilder_ == null) { updateTime_ = builderForValue.build(); } else { updateTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) { if (updateTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateTime_ != null && updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getUpdateTimeBuilder().mergeFrom(value); } else { updateTime_ = value; } } else { updateTimeBuilder_.mergeFrom(value); } if (updateTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public Builder clearUpdateTime() { bitField0_ = (bitField0_ & ~0x00000002); updateTime_ = null; if (updateTimeBuilder_ != null) { updateTimeBuilder_.dispose(); updateTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() { if (updateTimeBuilder_ != null) { return updateTimeBuilder_.getMessageOrBuilder(); } else { return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_; } } /** * * * <pre> * Operation last update time. If the operation is done, this is also the * finish time. * </pre> * * <code>.google.protobuf.Timestamp update_time = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getUpdateTimeFieldBuilder() { if (updateTimeBuilder_ == null) { updateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getUpdateTime(), getParentForChildren(), isClean()); updateTime_ = null; } return updateTimeBuilder_; } private long successCount_; /** * * * <pre> * Count of entries that were processed successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return The successCount. */ @java.lang.Override public long getSuccessCount() { return successCount_; } /** * * * <pre> * Count of entries that were processed successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @param value The successCount to set. * @return This builder for chaining. */ public Builder setSuccessCount(long value) { successCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Count of entries that were processed successfully. * </pre> * * <code>int64 success_count = 3;</code> * * @return This builder for chaining. */ public Builder clearSuccessCount() { bitField0_ = (bitField0_ & ~0x00000004); successCount_ = 0L; onChanged(); return this; } private long failureCount_; /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return The failureCount. */ @java.lang.Override public long getFailureCount() { return failureCount_; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @param value The failureCount to set. * @return This builder for chaining. */ public Builder setFailureCount(long value) { failureCount_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Count of entries that encountered errors while processing. * </pre> * * <code>int64 failure_count = 4;</code> * * @return This builder for chaining. */ public Builder clearFailureCount() { bitField0_ = (bitField0_ & ~0x00000008); failureCount_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata) private static final com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata(); } public static com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportUserEventsMetadata> PARSER = new com.google.protobuf.AbstractParser<ImportUserEventsMetadata>() { @java.lang.Override public ImportUserEventsMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportUserEventsMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportUserEventsMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.ImportUserEventsMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
37,326
java-iam/proto-google-iam-v3/src/main/java/com/google/iam/v3/SearchTargetPolicyBindingsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/iam/v3/policy_bindings_service.proto // Protobuf Java Version: 3.25.8 package com.google.iam.v3; /** * * * <pre> * Response message for SearchTargetPolicyBindings method. * </pre> * * Protobuf type {@code google.iam.v3.SearchTargetPolicyBindingsResponse} */ public final class SearchTargetPolicyBindingsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.iam.v3.SearchTargetPolicyBindingsResponse) SearchTargetPolicyBindingsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SearchTargetPolicyBindingsResponse.newBuilder() to construct. private SearchTargetPolicyBindingsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchTargetPolicyBindingsResponse() { policyBindings_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchTargetPolicyBindingsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_SearchTargetPolicyBindingsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_SearchTargetPolicyBindingsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v3.SearchTargetPolicyBindingsResponse.class, com.google.iam.v3.SearchTargetPolicyBindingsResponse.Builder.class); } public static final int POLICY_BINDINGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.iam.v3.PolicyBinding> policyBindings_; /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public java.util.List<com.google.iam.v3.PolicyBinding> getPolicyBindingsList() { return policyBindings_; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsOrBuilderList() { return policyBindings_; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public int getPolicyBindingsCount() { return policyBindings_.size(); } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public com.google.iam.v3.PolicyBinding getPolicyBindings(int index) { return policyBindings_.get(index); } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ @java.lang.Override public com.google.iam.v3.PolicyBindingOrBuilder getPolicyBindingsOrBuilder(int index) { return policyBindings_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyBindings_.size(); i++) { output.writeMessage(1, policyBindings_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyBindings_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, policyBindings_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.iam.v3.SearchTargetPolicyBindingsResponse)) { return super.equals(obj); } com.google.iam.v3.SearchTargetPolicyBindingsResponse other = (com.google.iam.v3.SearchTargetPolicyBindingsResponse) obj; if (!getPolicyBindingsList().equals(other.getPolicyBindingsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyBindingsCount() > 0) { hash = (37 * hash) + POLICY_BINDINGS_FIELD_NUMBER; hash = (53 * hash) + getPolicyBindingsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.iam.v3.SearchTargetPolicyBindingsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for SearchTargetPolicyBindings method. * </pre> * * Protobuf type {@code google.iam.v3.SearchTargetPolicyBindingsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.iam.v3.SearchTargetPolicyBindingsResponse) com.google.iam.v3.SearchTargetPolicyBindingsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_SearchTargetPolicyBindingsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_SearchTargetPolicyBindingsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.iam.v3.SearchTargetPolicyBindingsResponse.class, com.google.iam.v3.SearchTargetPolicyBindingsResponse.Builder.class); } // Construct using com.google.iam.v3.SearchTargetPolicyBindingsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyBindingsBuilder_ == null) { policyBindings_ = java.util.Collections.emptyList(); } else { policyBindings_ = null; policyBindingsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.iam.v3.PolicyBindingsServiceProto .internal_static_google_iam_v3_SearchTargetPolicyBindingsResponse_descriptor; } @java.lang.Override public com.google.iam.v3.SearchTargetPolicyBindingsResponse getDefaultInstanceForType() { return com.google.iam.v3.SearchTargetPolicyBindingsResponse.getDefaultInstance(); } @java.lang.Override public com.google.iam.v3.SearchTargetPolicyBindingsResponse build() { com.google.iam.v3.SearchTargetPolicyBindingsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.iam.v3.SearchTargetPolicyBindingsResponse buildPartial() { com.google.iam.v3.SearchTargetPolicyBindingsResponse result = new com.google.iam.v3.SearchTargetPolicyBindingsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.iam.v3.SearchTargetPolicyBindingsResponse result) { if (policyBindingsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyBindings_ = java.util.Collections.unmodifiableList(policyBindings_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyBindings_ = policyBindings_; } else { result.policyBindings_ = policyBindingsBuilder_.build(); } } private void buildPartial0(com.google.iam.v3.SearchTargetPolicyBindingsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.iam.v3.SearchTargetPolicyBindingsResponse) { return mergeFrom((com.google.iam.v3.SearchTargetPolicyBindingsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.iam.v3.SearchTargetPolicyBindingsResponse other) { if (other == com.google.iam.v3.SearchTargetPolicyBindingsResponse.getDefaultInstance()) return this; if (policyBindingsBuilder_ == null) { if (!other.policyBindings_.isEmpty()) { if (policyBindings_.isEmpty()) { policyBindings_ = other.policyBindings_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyBindingsIsMutable(); policyBindings_.addAll(other.policyBindings_); } onChanged(); } } else { if (!other.policyBindings_.isEmpty()) { if (policyBindingsBuilder_.isEmpty()) { policyBindingsBuilder_.dispose(); policyBindingsBuilder_ = null; policyBindings_ = other.policyBindings_; bitField0_ = (bitField0_ & ~0x00000001); policyBindingsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyBindingsFieldBuilder() : null; } else { policyBindingsBuilder_.addAllMessages(other.policyBindings_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.iam.v3.PolicyBinding m = input.readMessage(com.google.iam.v3.PolicyBinding.parser(), extensionRegistry); if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(m); } else { policyBindingsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.iam.v3.PolicyBinding> policyBindings_ = java.util.Collections.emptyList(); private void ensurePolicyBindingsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyBindings_ = new java.util.ArrayList<com.google.iam.v3.PolicyBinding>(policyBindings_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder> policyBindingsBuilder_; /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<com.google.iam.v3.PolicyBinding> getPolicyBindingsList() { if (policyBindingsBuilder_ == null) { return java.util.Collections.unmodifiableList(policyBindings_); } else { return policyBindingsBuilder_.getMessageList(); } } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public int getPolicyBindingsCount() { if (policyBindingsBuilder_ == null) { return policyBindings_.size(); } else { return policyBindingsBuilder_.getCount(); } } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding getPolicyBindings(int index) { if (policyBindingsBuilder_ == null) { return policyBindings_.get(index); } else { return policyBindingsBuilder_.getMessage(index); } } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder setPolicyBindings(int index, com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.set(index, value); onChanged(); } else { policyBindingsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder setPolicyBindings( int index, com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.set(index, builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.add(value); onChanged(); } else { policyBindingsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(int index, com.google.iam.v3.PolicyBinding value) { if (policyBindingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyBindingsIsMutable(); policyBindings_.add(index, value); onChanged(); } else { policyBindingsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings(com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addPolicyBindings( int index, com.google.iam.v3.PolicyBinding.Builder builderForValue) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.add(index, builderForValue.build()); onChanged(); } else { policyBindingsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder addAllPolicyBindings( java.lang.Iterable<? extends com.google.iam.v3.PolicyBinding> values) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, policyBindings_); onChanged(); } else { policyBindingsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder clearPolicyBindings() { if (policyBindingsBuilder_ == null) { policyBindings_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyBindingsBuilder_.clear(); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public Builder removePolicyBindings(int index) { if (policyBindingsBuilder_ == null) { ensurePolicyBindingsIsMutable(); policyBindings_.remove(index); onChanged(); } else { policyBindingsBuilder_.remove(index); } return this; } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder getPolicyBindingsBuilder(int index) { return getPolicyBindingsFieldBuilder().getBuilder(index); } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBindingOrBuilder getPolicyBindingsOrBuilder(int index) { if (policyBindingsBuilder_ == null) { return policyBindings_.get(index); } else { return policyBindingsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<? extends com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsOrBuilderList() { if (policyBindingsBuilder_ != null) { return policyBindingsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyBindings_); } } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder addPolicyBindingsBuilder() { return getPolicyBindingsFieldBuilder() .addBuilder(com.google.iam.v3.PolicyBinding.getDefaultInstance()); } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public com.google.iam.v3.PolicyBinding.Builder addPolicyBindingsBuilder(int index) { return getPolicyBindingsFieldBuilder() .addBuilder(index, com.google.iam.v3.PolicyBinding.getDefaultInstance()); } /** * * * <pre> * The policy bindings bound to the specified target. * </pre> * * <code>repeated .google.iam.v3.PolicyBinding policy_bindings = 1;</code> */ public java.util.List<com.google.iam.v3.PolicyBinding.Builder> getPolicyBindingsBuilderList() { return getPolicyBindingsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder> getPolicyBindingsFieldBuilder() { if (policyBindingsBuilder_ == null) { policyBindingsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.iam.v3.PolicyBinding, com.google.iam.v3.PolicyBinding.Builder, com.google.iam.v3.PolicyBindingOrBuilder>( policyBindings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyBindings_ = null; } return policyBindingsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. A token, which can be sent as `page_token` to retrieve the next * page. If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.iam.v3.SearchTargetPolicyBindingsResponse) } // @@protoc_insertion_point(class_scope:google.iam.v3.SearchTargetPolicyBindingsResponse) private static final com.google.iam.v3.SearchTargetPolicyBindingsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.iam.v3.SearchTargetPolicyBindingsResponse(); } public static com.google.iam.v3.SearchTargetPolicyBindingsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchTargetPolicyBindingsResponse> PARSER = new com.google.protobuf.AbstractParser<SearchTargetPolicyBindingsResponse>() { @java.lang.Override public SearchTargetPolicyBindingsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchTargetPolicyBindingsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchTargetPolicyBindingsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.iam.v3.SearchTargetPolicyBindingsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,264
java-service-management/proto-google-cloud-service-management-v1/src/main/java/com/google/api/servicemanagement/v1/ConfigSource.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/servicemanagement/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.api.servicemanagement.v1; /** * * * <pre> * Represents a source file which is used to generate the service configuration * defined by `google.api.Service`. * </pre> * * Protobuf type {@code google.api.servicemanagement.v1.ConfigSource} */ public final class ConfigSource extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.servicemanagement.v1.ConfigSource) ConfigSourceOrBuilder { private static final long serialVersionUID = 0L; // Use ConfigSource.newBuilder() to construct. private ConfigSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConfigSource() { id_ = ""; files_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ConfigSource(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.servicemanagement.v1.ResourcesProto .internal_static_google_api_servicemanagement_v1_ConfigSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.servicemanagement.v1.ResourcesProto .internal_static_google_api_servicemanagement_v1_ConfigSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.servicemanagement.v1.ConfigSource.class, com.google.api.servicemanagement.v1.ConfigSource.Builder.class); } public static final int ID_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object id_ = ""; /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @return The id. */ @java.lang.Override public java.lang.String getId() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } } /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @return The bytes for id. */ @java.lang.Override public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.api.servicemanagement.v1.ConfigFile> files_; /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ @java.lang.Override public java.util.List<com.google.api.servicemanagement.v1.ConfigFile> getFilesList() { return files_; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.api.servicemanagement.v1.ConfigFileOrBuilder> getFilesOrBuilderList() { return files_; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ @java.lang.Override public int getFilesCount() { return files_.size(); } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ @java.lang.Override public com.google.api.servicemanagement.v1.ConfigFile getFiles(int index) { return files_.get(index); } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ @java.lang.Override public com.google.api.servicemanagement.v1.ConfigFileOrBuilder getFilesOrBuilder(int index) { return files_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < files_.size(); i++) { output.writeMessage(2, files_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, id_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < files_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, files_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, id_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.servicemanagement.v1.ConfigSource)) { return super.equals(obj); } com.google.api.servicemanagement.v1.ConfigSource other = (com.google.api.servicemanagement.v1.ConfigSource) obj; if (!getId().equals(other.getId())) return false; if (!getFilesList().equals(other.getFilesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + ID_FIELD_NUMBER; hash = (53 * hash) + getId().hashCode(); if (getFilesCount() > 0) { hash = (37 * hash) + FILES_FIELD_NUMBER; hash = (53 * hash) + getFilesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.servicemanagement.v1.ConfigSource parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ConfigSource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.servicemanagement.v1.ConfigSource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.api.servicemanagement.v1.ConfigSource prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents a source file which is used to generate the service configuration * defined by `google.api.Service`. * </pre> * * Protobuf type {@code google.api.servicemanagement.v1.ConfigSource} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.servicemanagement.v1.ConfigSource) com.google.api.servicemanagement.v1.ConfigSourceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.servicemanagement.v1.ResourcesProto .internal_static_google_api_servicemanagement_v1_ConfigSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.servicemanagement.v1.ResourcesProto .internal_static_google_api_servicemanagement_v1_ConfigSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.servicemanagement.v1.ConfigSource.class, com.google.api.servicemanagement.v1.ConfigSource.Builder.class); } // Construct using com.google.api.servicemanagement.v1.ConfigSource.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; id_ = ""; if (filesBuilder_ == null) { files_ = java.util.Collections.emptyList(); } else { files_ = null; filesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.servicemanagement.v1.ResourcesProto .internal_static_google_api_servicemanagement_v1_ConfigSource_descriptor; } @java.lang.Override public com.google.api.servicemanagement.v1.ConfigSource getDefaultInstanceForType() { return com.google.api.servicemanagement.v1.ConfigSource.getDefaultInstance(); } @java.lang.Override public com.google.api.servicemanagement.v1.ConfigSource build() { com.google.api.servicemanagement.v1.ConfigSource result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.servicemanagement.v1.ConfigSource buildPartial() { com.google.api.servicemanagement.v1.ConfigSource result = new com.google.api.servicemanagement.v1.ConfigSource(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.api.servicemanagement.v1.ConfigSource result) { if (filesBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { files_ = java.util.Collections.unmodifiableList(files_); bitField0_ = (bitField0_ & ~0x00000002); } result.files_ = files_; } else { result.files_ = filesBuilder_.build(); } } private void buildPartial0(com.google.api.servicemanagement.v1.ConfigSource result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.id_ = id_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.servicemanagement.v1.ConfigSource) { return mergeFrom((com.google.api.servicemanagement.v1.ConfigSource) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.api.servicemanagement.v1.ConfigSource other) { if (other == com.google.api.servicemanagement.v1.ConfigSource.getDefaultInstance()) return this; if (!other.getId().isEmpty()) { id_ = other.id_; bitField0_ |= 0x00000001; onChanged(); } if (filesBuilder_ == null) { if (!other.files_.isEmpty()) { if (files_.isEmpty()) { files_ = other.files_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFilesIsMutable(); files_.addAll(other.files_); } onChanged(); } } else { if (!other.files_.isEmpty()) { if (filesBuilder_.isEmpty()) { filesBuilder_.dispose(); filesBuilder_ = null; files_ = other.files_; bitField0_ = (bitField0_ & ~0x00000002); filesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFilesFieldBuilder() : null; } else { filesBuilder_.addAllMessages(other.files_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { com.google.api.servicemanagement.v1.ConfigFile m = input.readMessage( com.google.api.servicemanagement.v1.ConfigFile.parser(), extensionRegistry); if (filesBuilder_ == null) { ensureFilesIsMutable(); files_.add(m); } else { filesBuilder_.addMessage(m); } break; } // case 18 case 42: { id_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object id_ = ""; /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @return The id. */ public java.lang.String getId() { java.lang.Object ref = id_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); id_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @return The bytes for id. */ public com.google.protobuf.ByteString getIdBytes() { java.lang.Object ref = id_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); id_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @param value The id to set. * @return This builder for chaining. */ public Builder setId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @return This builder for chaining. */ public Builder clearId() { id_ = getDefaultInstance().getId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * A unique ID for a specific instance of this message, typically assigned * by the client for tracking purpose. If empty, the server may choose to * generate one instead. * </pre> * * <code>string id = 5;</code> * * @param value The bytes for id to set. * @return This builder for chaining. */ public Builder setIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); id_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.api.servicemanagement.v1.ConfigFile> files_ = java.util.Collections.emptyList(); private void ensureFilesIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { files_ = new java.util.ArrayList<com.google.api.servicemanagement.v1.ConfigFile>(files_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.ConfigFile, com.google.api.servicemanagement.v1.ConfigFile.Builder, com.google.api.servicemanagement.v1.ConfigFileOrBuilder> filesBuilder_; /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public java.util.List<com.google.api.servicemanagement.v1.ConfigFile> getFilesList() { if (filesBuilder_ == null) { return java.util.Collections.unmodifiableList(files_); } else { return filesBuilder_.getMessageList(); } } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public int getFilesCount() { if (filesBuilder_ == null) { return files_.size(); } else { return filesBuilder_.getCount(); } } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public com.google.api.servicemanagement.v1.ConfigFile getFiles(int index) { if (filesBuilder_ == null) { return files_.get(index); } else { return filesBuilder_.getMessage(index); } } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder setFiles(int index, com.google.api.servicemanagement.v1.ConfigFile value) { if (filesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilesIsMutable(); files_.set(index, value); onChanged(); } else { filesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder setFiles( int index, com.google.api.servicemanagement.v1.ConfigFile.Builder builderForValue) { if (filesBuilder_ == null) { ensureFilesIsMutable(); files_.set(index, builderForValue.build()); onChanged(); } else { filesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder addFiles(com.google.api.servicemanagement.v1.ConfigFile value) { if (filesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilesIsMutable(); files_.add(value); onChanged(); } else { filesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder addFiles(int index, com.google.api.servicemanagement.v1.ConfigFile value) { if (filesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFilesIsMutable(); files_.add(index, value); onChanged(); } else { filesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder addFiles( com.google.api.servicemanagement.v1.ConfigFile.Builder builderForValue) { if (filesBuilder_ == null) { ensureFilesIsMutable(); files_.add(builderForValue.build()); onChanged(); } else { filesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder addFiles( int index, com.google.api.servicemanagement.v1.ConfigFile.Builder builderForValue) { if (filesBuilder_ == null) { ensureFilesIsMutable(); files_.add(index, builderForValue.build()); onChanged(); } else { filesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder addAllFiles( java.lang.Iterable<? extends com.google.api.servicemanagement.v1.ConfigFile> values) { if (filesBuilder_ == null) { ensureFilesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, files_); onChanged(); } else { filesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder clearFiles() { if (filesBuilder_ == null) { files_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { filesBuilder_.clear(); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public Builder removeFiles(int index) { if (filesBuilder_ == null) { ensureFilesIsMutable(); files_.remove(index); onChanged(); } else { filesBuilder_.remove(index); } return this; } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public com.google.api.servicemanagement.v1.ConfigFile.Builder getFilesBuilder(int index) { return getFilesFieldBuilder().getBuilder(index); } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public com.google.api.servicemanagement.v1.ConfigFileOrBuilder getFilesOrBuilder(int index) { if (filesBuilder_ == null) { return files_.get(index); } else { return filesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public java.util.List<? extends com.google.api.servicemanagement.v1.ConfigFileOrBuilder> getFilesOrBuilderList() { if (filesBuilder_ != null) { return filesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(files_); } } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public com.google.api.servicemanagement.v1.ConfigFile.Builder addFilesBuilder() { return getFilesFieldBuilder() .addBuilder(com.google.api.servicemanagement.v1.ConfigFile.getDefaultInstance()); } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public com.google.api.servicemanagement.v1.ConfigFile.Builder addFilesBuilder(int index) { return getFilesFieldBuilder() .addBuilder(index, com.google.api.servicemanagement.v1.ConfigFile.getDefaultInstance()); } /** * * * <pre> * Set of source configuration files that are used to generate a service * configuration (`google.api.Service`). * </pre> * * <code>repeated .google.api.servicemanagement.v1.ConfigFile files = 2;</code> */ public java.util.List<com.google.api.servicemanagement.v1.ConfigFile.Builder> getFilesBuilderList() { return getFilesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.ConfigFile, com.google.api.servicemanagement.v1.ConfigFile.Builder, com.google.api.servicemanagement.v1.ConfigFileOrBuilder> getFilesFieldBuilder() { if (filesBuilder_ == null) { filesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.servicemanagement.v1.ConfigFile, com.google.api.servicemanagement.v1.ConfigFile.Builder, com.google.api.servicemanagement.v1.ConfigFileOrBuilder>( files_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); files_ = null; } return filesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.servicemanagement.v1.ConfigSource) } // @@protoc_insertion_point(class_scope:google.api.servicemanagement.v1.ConfigSource) private static final com.google.api.servicemanagement.v1.ConfigSource DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.servicemanagement.v1.ConfigSource(); } public static com.google.api.servicemanagement.v1.ConfigSource getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConfigSource> PARSER = new com.google.protobuf.AbstractParser<ConfigSource>() { @java.lang.Override public ConfigSource parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConfigSource> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConfigSource> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.servicemanagement.v1.ConfigSource getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,350
java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/UpdateMeasurementProtocolSecretRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/admin/v1alpha/analytics_admin.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.admin.v1alpha; /** * * * <pre> * Request message for UpdateMeasurementProtocolSecret RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest} */ public final class UpdateMeasurementProtocolSecretRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) UpdateMeasurementProtocolSecretRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateMeasurementProtocolSecretRequest.newBuilder() to construct. private UpdateMeasurementProtocolSecretRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateMeasurementProtocolSecretRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateMeasurementProtocolSecretRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_UpdateMeasurementProtocolSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest.class, com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest.Builder .class); } private int bitField0_; public static final int MEASUREMENT_PROTOCOL_SECRET_FIELD_NUMBER = 1; private com.google.analytics.admin.v1alpha.MeasurementProtocolSecret measurementProtocolSecret_; /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the measurementProtocolSecret field is set. */ @java.lang.Override public boolean hasMeasurementProtocolSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The measurementProtocolSecret. */ @java.lang.Override public com.google.analytics.admin.v1alpha.MeasurementProtocolSecret getMeasurementProtocolSecret() { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.analytics.admin.v1alpha.MeasurementProtocolSecretOrBuilder getMeasurementProtocolSecretOrBuilder() { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMeasurementProtocolSecret()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, getMeasurementProtocolSecret()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest)) { return super.equals(obj); } com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest other = (com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) obj; if (hasMeasurementProtocolSecret() != other.hasMeasurementProtocolSecret()) return false; if (hasMeasurementProtocolSecret()) { if (!getMeasurementProtocolSecret().equals(other.getMeasurementProtocolSecret())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMeasurementProtocolSecret()) { hash = (37 * hash) + MEASUREMENT_PROTOCOL_SECRET_FIELD_NUMBER; hash = (53 * hash) + getMeasurementProtocolSecret().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for UpdateMeasurementProtocolSecret RPC * </pre> * * Protobuf type {@code google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_UpdateMeasurementProtocolSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest.class, com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest.Builder .class); } // Construct using // com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMeasurementProtocolSecretFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; measurementProtocolSecret_ = null; if (measurementProtocolSecretBuilder_ != null) { measurementProtocolSecretBuilder_.dispose(); measurementProtocolSecretBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.admin.v1alpha.AnalyticsAdminProto .internal_static_google_analytics_admin_v1alpha_UpdateMeasurementProtocolSecretRequest_descriptor; } @java.lang.Override public com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest getDefaultInstanceForType() { return com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest .getDefaultInstance(); } @java.lang.Override public com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest build() { com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest buildPartial() { com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest result = new com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.measurementProtocolSecret_ = measurementProtocolSecretBuilder_ == null ? measurementProtocolSecret_ : measurementProtocolSecretBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) { return mergeFrom( (com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest other) { if (other == com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest .getDefaultInstance()) return this; if (other.hasMeasurementProtocolSecret()) { mergeMeasurementProtocolSecret(other.getMeasurementProtocolSecret()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getMeasurementProtocolSecretFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.analytics.admin.v1alpha.MeasurementProtocolSecret measurementProtocolSecret_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.MeasurementProtocolSecret, com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1alpha.MeasurementProtocolSecretOrBuilder> measurementProtocolSecretBuilder_; /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the measurementProtocolSecret field is set. */ public boolean hasMeasurementProtocolSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The measurementProtocolSecret. */ public com.google.analytics.admin.v1alpha.MeasurementProtocolSecret getMeasurementProtocolSecret() { if (measurementProtocolSecretBuilder_ == null) { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } else { return measurementProtocolSecretBuilder_.getMessage(); } } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMeasurementProtocolSecret( com.google.analytics.admin.v1alpha.MeasurementProtocolSecret value) { if (measurementProtocolSecretBuilder_ == null) { if (value == null) { throw new NullPointerException(); } measurementProtocolSecret_ = value; } else { measurementProtocolSecretBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMeasurementProtocolSecret( com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.Builder builderForValue) { if (measurementProtocolSecretBuilder_ == null) { measurementProtocolSecret_ = builderForValue.build(); } else { measurementProtocolSecretBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMeasurementProtocolSecret( com.google.analytics.admin.v1alpha.MeasurementProtocolSecret value) { if (measurementProtocolSecretBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && measurementProtocolSecret_ != null && measurementProtocolSecret_ != com.google.analytics.admin.v1alpha.MeasurementProtocolSecret .getDefaultInstance()) { getMeasurementProtocolSecretBuilder().mergeFrom(value); } else { measurementProtocolSecret_ = value; } } else { measurementProtocolSecretBuilder_.mergeFrom(value); } if (measurementProtocolSecret_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMeasurementProtocolSecret() { bitField0_ = (bitField0_ & ~0x00000001); measurementProtocolSecret_ = null; if (measurementProtocolSecretBuilder_ != null) { measurementProtocolSecretBuilder_.dispose(); measurementProtocolSecretBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.Builder getMeasurementProtocolSecretBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMeasurementProtocolSecretFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.analytics.admin.v1alpha.MeasurementProtocolSecretOrBuilder getMeasurementProtocolSecretOrBuilder() { if (measurementProtocolSecretBuilder_ != null) { return measurementProtocolSecretBuilder_.getMessageOrBuilder(); } else { return measurementProtocolSecret_ == null ? com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.getDefaultInstance() : measurementProtocolSecret_; } } /** * * * <pre> * Required. The measurement protocol secret to update. * </pre> * * <code> * .google.analytics.admin.v1alpha.MeasurementProtocolSecret measurement_protocol_secret = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.MeasurementProtocolSecret, com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1alpha.MeasurementProtocolSecretOrBuilder> getMeasurementProtocolSecretFieldBuilder() { if (measurementProtocolSecretBuilder_ == null) { measurementProtocolSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.admin.v1alpha.MeasurementProtocolSecret, com.google.analytics.admin.v1alpha.MeasurementProtocolSecret.Builder, com.google.analytics.admin.v1alpha.MeasurementProtocolSecretOrBuilder>( getMeasurementProtocolSecret(), getParentForChildren(), isClean()); measurementProtocolSecret_ = null; } return measurementProtocolSecretBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to be updated. Omitted fields will not be * updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) } // @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest) private static final com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest(); } public static com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateMeasurementProtocolSecretRequest>() { @java.lang.Override public UpdateMeasurementProtocolSecretRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateMeasurementProtocolSecretRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.admin.v1alpha.UpdateMeasurementProtocolSecretRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kylin
37,205
src/metadata-server/src/test/java/org/apache/kylin/rest/controller/open/OpenSegmentControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.rest.controller.open; import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON; import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_NOT_EXIST; import static org.apache.kylin.common.exception.code.ErrorCodeServer.PROJECT_MULTI_PARTITION_DISABLE; import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_CONFLICT_PARAMETER; import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_INDEX_CONFLICT_PARAMETER; import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_INDEX_STATUS_INVALID; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.when; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.KylinConfigExt; import org.apache.kylin.common.exception.KylinException; import org.apache.kylin.common.msg.MsgPicker; import org.apache.kylin.common.util.JsonUtil; import org.apache.kylin.common.util.NLocalFileMetadataTestCase; import org.apache.kylin.common.util.Pair; import org.apache.kylin.common.util.RandomUtil; import org.apache.kylin.guava30.shaded.common.collect.Lists; import org.apache.kylin.guava30.shaded.common.collect.Maps; import org.apache.kylin.metadata.cube.model.IndexEntity; import org.apache.kylin.metadata.model.NDataModel; import org.apache.kylin.metadata.model.NDataModelManager; import org.apache.kylin.metadata.model.Segments; import org.apache.kylin.metadata.project.NProjectManager; import org.apache.kylin.metadata.project.ProjectInstance; import org.apache.kylin.rest.constant.Constant; import org.apache.kylin.rest.controller.SegmentController; import org.apache.kylin.rest.request.BuildIndexRequest; import org.apache.kylin.rest.request.BuildSegmentsRequest; import org.apache.kylin.rest.request.CheckSegmentRequest; import org.apache.kylin.rest.request.IndexesToSegmentsRequest; import org.apache.kylin.rest.request.PartitionsBuildRequest; import org.apache.kylin.rest.request.PartitionsRefreshRequest; import org.apache.kylin.rest.request.SegmentsRequest; import org.apache.kylin.rest.response.DataResult; import org.apache.kylin.rest.response.EnvelopeResponse; import org.apache.kylin.rest.response.IndexResponse; import org.apache.kylin.rest.response.NDataModelResponse; import org.apache.kylin.rest.response.NDataSegmentResponse; import org.apache.kylin.rest.response.SegmentPartitionResponse; import org.apache.kylin.rest.service.FusionIndexService; import org.apache.kylin.rest.service.FusionModelService; import org.apache.kylin.rest.service.ModelService; import org.apache.kylin.rest.service.params.IndexPlanParams; import org.apache.kylin.rest.service.params.PaginationParams; import org.apache.kylin.rest.util.AclEvaluate; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunnerDelegate; import org.springframework.http.MediaType; import org.springframework.security.authentication.TestingAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.MvcResult; import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; import org.springframework.test.web.servlet.result.MockMvcResultMatchers; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import shaded.parquet.com.fasterxml.jackson.core.JsonProcessingException; import shaded.parquet.com.fasterxml.jackson.databind.ObjectMapper; @RunWith(PowerMockRunner.class) @PowerMockRunnerDelegate(JUnit4.class) @PrepareForTest({ KylinConfig.class, NProjectManager.class }) @PowerMockIgnore({ "com.sun.security.*", "org.w3c.*", "javax.xml.*", "org.xml.*", "org.apache.cxf.*", "javax.management.*", "javax.script.*", "org.apache.hadoop.*", "javax.security.*", "java.security.*", "javax.crypto.*", "javax.net.ssl.*", "org.apache.kylin.profiler.AsyncProfiler" }) public class OpenSegmentControllerTest extends NLocalFileMetadataTestCase { @InjectMocks private final OpenSegmentController openSegmentController = Mockito.spy(new OpenSegmentController()); private final Authentication authentication = new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN); private MockMvc mockMvc; @Mock private SegmentController nModelController; @Mock private ModelService modelService; @Mock private FusionModelService fusionModelService; @Mock private AclEvaluate aclEvaluate; @Mock private FusionIndexService fusionIndexService; private ObjectMapper objectMapper = new ObjectMapper(); @Before public void setup() { MockitoAnnotations.openMocks(this); mockMvc = MockMvcBuilders.standaloneSetup(openSegmentController).defaultRequest(MockMvcRequestBuilders.get("/")) .defaultResponseCharacterEncoding(StandardCharsets.UTF_8).build(); SecurityContextHolder.getContext().setAuthentication(authentication); lenient().doReturn(true).when(aclEvaluate).hasProjectWritePermission(Mockito.any()); lenient().doReturn(true).when(aclEvaluate).hasProjectOperationPermission(Mockito.any()); } @Before public void setupResource() { overwriteSystemProp("HADOOP_USER_NAME", "root"); createTestMetadata(); } @After public void tearDown() { cleanupTestMetadata(); } private List<NDataModel> mockModels() { final List<NDataModel> models = new ArrayList<>(); NDataModel model = new NDataModel(); model.setUuid("model1"); models.add(new NDataModelResponse(model)); NDataModel model1 = new NDataModel(); model1.setUuid("model2"); models.add(new NDataModelResponse(model1)); NDataModel model2 = new NDataModel(); model2.setUuid("model3"); models.add(new NDataModelResponse(model2)); NDataModel model3 = new NDataModel(); model3.setUuid("model4"); models.add(new NDataModelResponse(model3)); return models; } private Segments<NDataSegmentResponse> mockSegments() { final Segments<NDataSegmentResponse> nDataSegments = new Segments<>(); NDataSegmentResponse segment = new NDataSegmentResponse(); segment.setId(RandomUtil.randomUUIDStr()); segment.setName("seg1"); nDataSegments.add(segment); return nDataSegments; } private NDataModelResponse mockGetModelName(String modelName, String project, String modelId) { NDataModelResponse model = new NDataModelResponse(); model.setUuid(modelId); Mockito.doReturn(model).when(modelService).getModel(modelName, project); return model; } @Test public void testGetSegments() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; mockGetModelName(modelName, project, modelId); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).param("page_offset", "1").param("project", project) .param("page_size", "5").param("start", "432").param("end", "2234").param("sort_by", "end_time") .param("reverse", "true").param("status", "") .param("statuses", "") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()).andReturn(); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).param("page_offset", "1").param("project", project) .param("page_size", "-5").param("start", "432").param("end", "2234").param("sort_by", "end_time") .param("reverse", "true").param("status", "") .param("statuses", "") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().is5xxServerError()); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).param("page_offset", "1").param("project", project) .param("page_size", "a").param("start", "432").param("end", "2234").param("sort_by", "end_time") .param("reverse", "true").param("status", "") .param("statuses", "") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isBadRequest()); } private List<IndexResponse> getIndexResponses() throws Exception { IndexResponse index = new IndexResponse(); index.setId(1L); index.setRelatedTables(Lists.newArrayList("table1", "table2")); return Lists.newArrayList(index); } @Test public void testBuildSegments() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; mockGetModelName(modelName, project, modelId); BuildSegmentsRequest request = new BuildSegmentsRequest(); request.setProject("default"); request.setStart("0"); request.setEnd("100"); Mockito.doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .buildSegmentsManually(modelId, request); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).buildSegmentsManually(eq(modelName), Mockito.any(BuildSegmentsRequest.class)); } @Test public void testRefreshSegmentsById() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; mockGetModelName(modelName, project, modelId); SegmentsRequest request = new SegmentsRequest(); request.setProject(project); request.setType(SegmentsRequest.SegmentsRequestType.REFRESH); request.setIds(new String[] { "1", "2" }); Mockito.doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .refreshOrMergeSegments(modelId, request); mockMvc.perform(MockMvcRequestBuilders.put("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); HashMap<String, String> req = Maps.newHashMap(); req.put("project", project); req.put("type", SegmentsRequest.SegmentsRequestType.REFRESH.name().toLowerCase(Locale.ROOT)); mockMvc.perform(MockMvcRequestBuilders.put("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(req)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().is5xxServerError()); req.put("priority", "a"); req.put("type", SegmentsRequest.SegmentsRequestType.REFRESH.name()); mockMvc.perform(MockMvcRequestBuilders.put("/api/models/{model_name}/segments", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(req)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().is5xxServerError()); Mockito.verify(openSegmentController).refreshOrMergeSegments(eq(modelName), Mockito.any(SegmentsRequest.class)); } @Test public void testCompleteSegments() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; String[] ids = { "ef5e0663-feba-4ed2-b71c-21958122bbff" }; IndexesToSegmentsRequest req = new IndexesToSegmentsRequest(); req.setProject(project); req.setParallelBuildBySegment(false); req.setSegmentIds(Lists.newArrayList(ids)); mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .addIndexesToSegments(modelId, req); Mockito.doReturn(new Pair("model_id", ids)).when(fusionModelService).convertSegmentIdWithName(modelId, project, ids, null); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).completeSegments(modelName, project, false, ids, null, null, false, 3, null, null, null, false); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("priority", "0").accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).completeSegments(modelName, project, false, ids, null, null, false, 0, null, null, null, false); } @Test public void testCompleteSegmentsPartialBuild() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; String[] ids = { "ef5e0663-feba-4ed2-b71c-21958122bbff" }; Pair pair = new Pair<>(modelId, ids); List<Long> batchIndexIds = Lists.newArrayList(1L, 2L); IndexesToSegmentsRequest req = new IndexesToSegmentsRequest(); req.setProject(project); req.setParallelBuildBySegment(false); req.setSegmentIds(Lists.newArrayList(ids)); mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .addIndexesToSegments(modelId, req); Mockito.doReturn(pair).when(fusionModelService).convertSegmentIdWithName(modelId, project, ids, null); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("partial_build", "true") // .param("batch_index_ids", "1,2") // .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).completeSegments(modelName, project, false, ids, null, batchIndexIds, true, 3, null, null, null, false); } @Test public void testCompleteSegmentsWithoutIdsAndNames() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; IndexesToSegmentsRequest req = new IndexesToSegmentsRequest(); req.setProject(project); req.setParallelBuildBySegment(false); mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .addIndexesToSegments(modelId, req); MvcResult result = mockMvc .perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andReturn(); String contentAsString = result.getResponse().getContentAsString(); Assert.assertTrue(contentAsString.contains(MsgPicker.getMsg().getEmptySegmentParameter())); } @Test public void testCompleteSegmentsWithIdsAndNames() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; String[] ids = { "ef5e0663-feba-4ed2-b71c-21958122bbff" }; String[] names = { "ef5e0663-feba-4ed2-b71c-21958122bbff" }; IndexesToSegmentsRequest req = new IndexesToSegmentsRequest(); req.setProject(project); req.setParallelBuildBySegment(false); mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .addIndexesToSegments(modelId, req); MvcResult result = mockMvc .perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", names) // .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andReturn(); String contentAsString = result.getResponse().getContentAsString(); Assert.assertTrue(contentAsString.contains(SEGMENT_CONFLICT_PARAMETER.getMsg())); } @Test public void testCompleteSegmentsByIndexStatus() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; String[] ids = { "ef5e0663-feba-4ed2-b71c-21958122bbff" }; Pair<String, String[]> pair = new Pair<>(modelId, ids); IndexesToSegmentsRequest req = new IndexesToSegmentsRequest(); req.setProject(project); req.setParallelBuildBySegment(false); req.setSegmentIds(Lists.newArrayList(ids)); mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .addIndexesToSegments(modelId, req); Mockito.doReturn(pair).when(fusionModelService).convertSegmentIdWithName(modelId, project, ids, null); MvcResult result = mockMvc .perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("batch_index_ids", "1,2") // .param("index_status", "NO_BUILD") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andReturn(); String contentAsString = result.getResponse().getContentAsString(); Assert.assertTrue(contentAsString.contains(SEGMENT_INDEX_CONFLICT_PARAMETER.getMsg())); MvcResult result1 = mockMvc.perform(MockMvcRequestBuilders .post("/api/models/{model_name}/segments/completion", modelName).param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("index_status", "LOCKED").accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andReturn(); String content1 = result1.getResponse().getContentAsString(); Assert.assertTrue(content1.contains(SEGMENT_INDEX_STATUS_INVALID.getMsg())); IndexPlanParams indexPlanParams = new IndexPlanParams(project, modelId, null, null, Collections.emptyList(), Collections.singletonList(IndexEntity.Status.NO_BUILD), null); PaginationParams paginationParams = new PaginationParams(null, null, null, false); List<IndexResponse> emptyIndex = new ArrayList<>(); Mockito.doReturn(emptyIndex).when(fusionIndexService).getIndexes(indexPlanParams, paginationParams, null); MvcResult result2 = mockMvc .perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("index_status", "NO_BUILD") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()).andReturn(); String content2 = result2.getResponse().getContentAsString(); Assert.assertNull(getJobId(content2)); List<IndexResponse> indexResponseList = new ArrayList<>(); IndexResponse indexResponse = new IndexResponse(); indexResponse.setId(1L); indexResponseList.add(indexResponse); Mockito.doReturn(indexResponseList).when(fusionIndexService).getIndexes(indexPlanParams, paginationParams, null); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/completion", modelName) .param("project", "default") // .param("parallel", "false") // .param("ids", ids) // .param("names", (String) null) // .param("index_status", "NO_BUILD") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); } @SuppressWarnings("unchecked") private String getJobId(String content) throws JsonProcessingException { LinkedHashMap<String, Object> envelopeResponse = objectMapper.readValue(content, LinkedHashMap.class); Map<String, Object> data = (Map<String, Object>) envelopeResponse.get("data"); List<?> list = (List<?>) data.get("jobs"); Map<String, String> map = (Map<String, String>) list.get(0); return map.get("job_id"); } @Test public void testDeleteSegmentsAll() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; mockGetModelName(modelName, project, modelId); lenient().doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .deleteSegments(modelId, project, true, false, null, null); mockMvc.perform(MockMvcRequestBuilders.delete("/api/models/{model_name}/segments", modelName) .param("project", "default").param("purge", "true") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).deleteSegments(modelName, "default", true, false, null, null); } @Test public void testDeleteSegmentsByIds() throws Exception { String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String project = "default"; mockGetModelName(modelName, project, modelId); SegmentsRequest request = new SegmentsRequest(); request.setIds(new String[] { "1", "2" }); Mockito.doReturn(new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "")).when(nModelController) .deleteSegments(modelId, project, false, false, request.getIds(), null); mockMvc.perform(MockMvcRequestBuilders.delete("/api/models/{model_name}/segments", modelName) .param("project", "default").param("purge", "false").param("ids", request.getIds()) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).deleteSegments(modelName, "default", false, false, request.getIds(), null); } @Test public void testBuildIndicesManually() throws Exception { BuildIndexRequest request = new BuildIndexRequest(); request.setProject("default"); String modelName = "default_model_name"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; mockGetModelName(modelName, request.getProject(), modelId); Mockito.doAnswer(x -> null).when(nModelController).buildIndicesManually(modelId, request); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model}/indexes", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).buildIndicesManually(eq(modelName), Mockito.any(BuildIndexRequest.class)); } @Test public void testBuildIndicesManually2() throws Exception { HashMap<String, String> request = getBuildIndexRequest(); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model}/indexes", request.get("model_name")) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().is5xxServerError()); } @Test public void testCheckSegments() throws Exception { mockGetModelName("test", "default", "modelId"); Mockito.doAnswer(x -> null).when(modelService).checkSegments(Mockito.any(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString()); CheckSegmentRequest request = new CheckSegmentRequest(); request.setProject("default"); request.setStart("0"); request.setEnd("1"); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/test/segments/check") .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); Mockito.verify(openSegmentController).checkSegments(Mockito.anyString(), Mockito.any()); } @Test public void testGetMultiPartitions() throws Exception { String modelName = "default_model_name"; String project = "default"; String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa"; String segmentId = "73570f31-05a5-448f-973c-44209830dd01"; mockGetModelName(modelName, project, modelId); DataResult<List<SegmentPartitionResponse>> result; lenient().when(nModelController.getMultiPartition(modelId, project, segmentId, Lists.newArrayList(), 0, 10, "last_modify_time", true)).thenReturn(null); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments/multi_partition", modelName) .param("project", project).param("segment_id", segmentId) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments/multi_partition", modelName) .param("project", project).param("segment_id", segmentId) .param("page_offset", "a") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isBadRequest()); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments/multi_partition", modelName) .param("project", project).param("segment_id", segmentId) .param("page_offset", "-1") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().is5xxServerError()); mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model_name}/segments/multi_partition", modelName) .param("project", project).param("segment_id", segmentId) .param("page_offset", "1") .param("page_size", "1") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); } @Test public void testBuildMultiPartition() throws Exception { String modelName = "multi_level_partition"; String project = "multi_level_partition"; String modelId = "747f864b-9721-4b97-acde-0aa8e8656cba"; mockGetModelName(modelName, project, modelId); PartitionsBuildRequest request = new PartitionsBuildRequest(); request.setProject("multi_level_partition"); Mockito.doReturn(null).when(nModelController).buildMultiPartition(modelId, request); mockMvc.perform(MockMvcRequestBuilders.post("/api/models/{model_name}/segments/multi_partition", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); } @Test public void testRefreshMultiPartition() throws Exception { String modelName = "multi_level_partition"; String project = "multi_level_partition"; String modelId = "747f864b-9721-4b97-acde-0aa8e8656cba"; mockGetModelName(modelName, project, modelId); PartitionsRefreshRequest request = new PartitionsRefreshRequest(); request.setProject("multi_level_partition"); Mockito.doReturn(null).when(nModelController).refreshMultiPartition(modelId, request); mockMvc.perform(MockMvcRequestBuilders.put("/api/models/{model_name}/segments/multi_partition", modelName) .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(request)) .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); } @Test public void testDeleteMultiPartition() throws Exception { String modelName = "multi_level_partition"; String project = "multi_level_partition"; String modelId = "747f864b-9721-4b97-acde-0aa8e8656cba"; String segmentId = "8892fa3f-f607-4eec-8159-7c5ae2f16942"; mockGetModelName(modelName, project, modelId); PartitionsRefreshRequest request = new PartitionsRefreshRequest(); request.setProject("multi_level_partition"); Mockito.doNothing().when(modelService).deletePartitionsByValues(anyString(), anyString(), anyString(), anyList()); mockMvc.perform(MockMvcRequestBuilders.delete("/api/models/segments/multi_partition").param("model", modelName) .param("project", project).param("segment_id", segmentId).param("sub_partition_values", "1") .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) .andExpect(MockMvcResultMatchers.status().isOk()); } @Test public void testGetModel() { NDataModelManager nDataModelManager = Mockito.mock(NDataModelManager.class); when(modelService.getManager(any(), anyString())).thenReturn(nDataModelManager); when(modelService.getModel(anyString(), anyString())).thenCallRealMethod(); when(nDataModelManager.listAllModels()).thenReturn(Collections.emptyList()); try { modelService.getModel("SOME_ALIAS", "SOME_PROJECT"); Assert.fail(); } catch (Exception e) { Assert.assertTrue(e instanceof KylinException); Assert.assertEquals(MODEL_NAME_NOT_EXIST.getCodeMsg("SOME_ALIAS"), e.getLocalizedMessage()); } } @Test public void testCheckProjectMLP() { PowerMockito.mockStatic(KylinConfig.class); PowerMockito.mockStatic(NProjectManager.class); KylinConfig kylinConfig = Mockito.mock(KylinConfig.class); PowerMockito.when(KylinConfig.getInstanceFromEnv()).thenReturn(kylinConfig); NProjectManager projectManager = Mockito.mock(NProjectManager.class); PowerMockito.when(NProjectManager.getInstance(Mockito.any())).thenReturn(projectManager); ProjectInstance projectInstance = Mockito.mock(ProjectInstance.class); when(projectManager.getProject(anyString())).thenReturn(projectInstance); KylinConfigExt kylinConfigExt = Mockito.mock(KylinConfigExt.class); when(projectInstance.getName()).thenReturn("TEST_PROJECT_NAME"); when(projectInstance.getConfig()).thenReturn(kylinConfigExt); when(kylinConfigExt.isMultiPartitionEnabled()).thenReturn(false); try { ReflectionTestUtils.invokeMethod(openSegmentController, "checkProjectMLP", "SOME_PROJECT"); Assert.fail(); } catch (Exception e) { Assert.assertTrue(e instanceof KylinException); Assert.assertEquals(PROJECT_MULTI_PARTITION_DISABLE.getCodeMsg("TEST_PROJECT_NAME"), e.getLocalizedMessage()); } } private HashMap<String, String> getBuildIndexRequest() { HashMap result = new HashMap(); result.put("project", "default"); result.put("priority", "2"); result.put("model_name", "default_model_name"); return result; } }
googleapis/google-cloud-java
36,513
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/FeatureOnlineStoreServiceProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/feature_online_store_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; public final class FeatureOnlineStoreServiceProto { private FeatureOnlineStoreServiceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_CompositeKey_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_CompositeKey_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_FeatureNameValuePair_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_FeatureNameValuePair_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Embedding_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Embedding_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_StringFilter_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_StringFilter_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_NumericFilter_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_NumericFilter_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Parameters_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Parameters_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_Neighbor_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_Neighbor_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_FeatureValueAndTimestamp_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_FeatureValueAndTimestamp_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_WriteResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_WriteResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "Bgoogle/cloud/aiplatform/v1beta1/feature_online_store_service.proto\022\037google.clo" + "ud.aiplatform.v1beta1\032\034google/api/annota" + "tions.proto\032\027google/api/client.proto\032\037go" + "ogle/api/field_behavior.proto\032\031google/ap" + "i/resource.proto\032Agoogle/cloud/aiplatform/v1beta1/featurestore_online_service.pr" + "oto\032\034google/protobuf/struct.proto\032\037googl" + "e/protobuf/timestamp.proto\032\027google/rpc/status.proto\"\252\001\n" + "\022FeatureViewDataKey\022\r\n" + "\003key\030\001 \001(\tH\000\022Y\n\r" + "composite_key\030\002 \001(\0132@.googl" + "e.cloud.aiplatform.v1beta1.FeatureViewDataKey.CompositeKeyH\000\032\035\n" + "\014CompositeKey\022\r\n" + "\005parts\030\001 \003(\tB\013\n" + "\tkey_oneof\"\273\003\n" + "\031FetchFeatureValuesRequest\022\020\n" + "\002id\030\003 \001(\tB\002\030\001H\000\022C\n" + "\014feature_view\030\001 \001(\tB-\340A\002\372A\'\n" + "%aiplatform.googleapis.com/FeatureView\022J\n" + "\010data_key\030\006 \001(\0132" + "3.google.cloud.aiplatform.v1beta1.FeatureViewDataKeyB\003\340A\001\022P\n" + "\013data_format\030\007 \001(\01626" + ".google.cloud.aiplatform.v1beta1.FeatureViewDataFormatB\003\340A\001\022U\n" + "\006format\030\005 \001(\0162A.go" + "ogle.cloud.aiplatform.v1beta1.FetchFeatureValuesRequest.FormatB\002\030\001\"E\n" + "\006Format\022\026\n" + "\022FORMAT_UNSPECIFIED\020\000\022\r\n" + "\tKEY_VALUE\020\001\022\020\n" + "\014PROTO_STRUCT\020\002\032\002\030\001B\013\n" + "\tentity_id\"\222\004\n" + "\032FetchFeatureValuesResponse\022j\n\n" + "key_values\030\003 \001(\0132T.google.cloud.aiplatform.v1beta1.Fetc" + "hFeatureValuesResponse.FeatureNameValuePairListH\000\022/\n" + "\014proto_struct\030\002 \001(\0132\027.google.protobuf.StructH\000\022E\n" + "\010data_key\030\004 \001(\01323.g" + "oogle.cloud.aiplatform.v1beta1.FeatureViewDataKey\032\205\002\n" + "\030FeatureNameValuePairList\022{\n" + "\010features\030\001 \003(\0132i.google.cloud.aiplatfo" + "rm.v1beta1.FetchFeatureValuesResponse.Fe" + "atureNameValuePairList.FeatureNameValuePair\032l\n" + "\024FeatureNameValuePair\022>\n" + "\005value\030\002 \001" + "(\0132-.google.cloud.aiplatform.v1beta1.FeatureValueH\000\022\014\n" + "\004name\030\001 \001(\tB\006\n" + "\004dataB\010\n" + "\006format\"\376\001\n" + "\"StreamingFetchFeatureValuesRequest\022C\n" + "\014feature_view\030\001 \001(\tB-\340A\002\372A\'\n" + "%aiplatform.googleapis.com/FeatureView\022F\n" + "\tdata_keys\030\002" + " \003(\01323.google.cloud.aiplatform.v1beta1.FeatureViewDataKey\022K\n" + "\013data_format\030\003" + " \001(\01626.google.cloud.aiplatform.v1beta1.FeatureViewDataFormat\"\347\001\n" + "#StreamingFetchFeatureValuesResponse\022\"\n" + "\006status\030\001 \001(\0132\022.google.rpc.Status\022I\n" + "\004data\030\002 \003(\0132;.google." + "cloud.aiplatform.v1beta1.FetchFeatureValuesResponse\022Q\n" + "\024data_keys_with_error\030\003 \003(" + "\01323.google.cloud.aiplatform.v1beta1.FeatureViewDataKey\"\274\010\n" + "\024NearestNeighborQuery\022\030\n" + "\tentity_id\030\001 \001(\tB\003\340A\001H\000\022Y\n" + "\tembedding\030\002" + " \001(\0132?.google.cloud.aiplatform.v1beta1.NearestNeighborQuery.EmbeddingB\003\340A\001H\000\022\033\n" + "\016neighbor_count\030\003 \001(\005B\003\340A\001\022_\n" + "\016string_filters\030\004 \003(\0132B.google.cloud.aiplatform.v1be" + "ta1.NearestNeighborQuery.StringFilterB\003\340A\001\022a\n" + "\017numeric_filters\030\010 \003(\0132C.google.clo" + "ud.aiplatform.v1beta1.NearestNeighborQuery.NumericFilterB\003\340A\001\0222\n" + "%per_crowding_attribute_neighbor_count\030\005 \001(\005B\003\340A\001\022Y\n\n" + "parameters\030\007 \001(\0132@.google.cloud.aiplatform." + "v1beta1.NearestNeighborQuery.ParametersB\003\340A\001\032\037\n" + "\tEmbedding\022\022\n" + "\005value\030\001 \003(\002B\003\340A\001\032V\n" + "\014StringFilter\022\021\n" + "\004name\030\001 \001(\tB\003\340A\002\022\031\n" + "\014allow_tokens\030\002 \003(\tB\003\340A\001\022\030\n" + "\013deny_tokens\030\003 \003(\tB\003\340A\001\032\324\002\n\r" + "NumericFilter\022\023\n" + "\tvalue_int\030\002 \001(\003H\000\022\025\n" + "\013value_float\030\003 \001(\002H\000\022\026\n" + "\014value_double\030\004 \001(\001H\000\022\021\n" + "\004name\030\001 \001(\tB\003\340A\002\022b\n" + "\002op\030\005 \001(\0162L.google.cloud.aiplatform.v1beta1.Nea" + "restNeighborQuery.NumericFilter.OperatorB\003\340A\001H\001\210\001\001\"x\n" + "\010Operator\022\030\n" + "\024OPERATOR_UNSPECIFIED\020\000\022\010\n" + "\004LESS\020\001\022\016\n\n" + "LESS_EQUAL\020\002\022\t\n" + "\005EQUAL\020\003\022\021\n\r" + "GREATER_EQUAL\020\004\022\013\n" + "\007GREATER\020\005\022\r\n" + "\tNOT_EQUAL\020\006B\007\n" + "\005ValueB\005\n" + "\003_op\032c\n\n" + "Parameters\022,\n" + "\037approximate_neighbor_candidates\030\001 \001(\005B\003\340A\001\022\'\n" + "\032leaf_nodes_search_fraction\030\002 \001(\001B\003\340A\001B\n\n" + "\010instance\"\317\001\n" + "\034SearchNearestEntitiesRequest\022C\n" + "\014feature_view\030\001 \001(\tB-\340A\002\372A\'\n" + "%aiplatform.googleapis.com/FeatureView\022I\n" + "\005query\030\002 \001(\01325.google.cloud.aiplat" + "form.v1beta1.NearestNeighborQueryB\003\340A\002\022\037\n" + "\022return_full_entity\030\003 \001(\010B\003\340A\001\"\353\001\n" + "\020NearestNeighbors\022M\n" + "\tneighbors\030\001 \003(\0132:.google" + ".cloud.aiplatform.v1beta1.NearestNeighbors.Neighbor\032\207\001\n" + "\010Neighbor\022\021\n" + "\tentity_id\030\001 \001(\t\022\020\n" + "\010distance\030\002 \001(\001\022V\n" + "\021entity_key_values\030\003" + " \001(\0132;.google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse\"m\n" + "\035SearchNearestEntitiesResponse\022L\n" + "\021nearest_neighbors\030\001" + " \001(\01321.google.cloud.aiplatform.v1beta1.NearestNeighbors\"\304\006\n" + "\035FeatureViewDirectWriteRequest\022@\n" + "\014feature_view\030\001 \001(\tB*\372A\'\n" + "%aiplatform.googleapis.com/FeatureView\022\200\001\n" + "\033data_key_and_feature_values\030\002 \003(\0132V" + ".google.cloud.aiplatform.v1beta1.Feature" + "ViewDirectWriteRequest.DataKeyAndFeatureValuesB\003\340A\002\032\335\004\n" + "\027DataKeyAndFeatureValues\022E\n" + "\010data_key\030\001" + " \001(\01323.google.cloud.aiplatform.v1beta1.FeatureViewDataKey\022p\n" + "\010features\030\002 \003(\0132^.google.cloud.aiplatform.v1bet" + "a1.FeatureViewDirectWriteRequest.DataKeyAndFeatureValues.Feature\032\210\003\n" + "\007Feature\022>\n" + "\005value\030\003" + " \001(\0132-.google.cloud.aiplatform.v1beta1.FeatureValueH\000\022\226\001\n" + "\023value_and_timestamp\030\002 \001(\0132w.google.cloud.aiplatform.v1b" + "eta1.FeatureViewDirectWriteRequest.DataK" + "eyAndFeatureValues.Feature.FeatureValueAndTimestampH\000\022\014\n" + "\004name\030\001 \001(\t\032\207\001\n" + "\030FeatureValueAndTimestamp\022<\n" + "\005value\030\001 \001(\0132-.google.cloud.aiplatform.v1beta1.FeatureValue\022-\n" + "\ttimestamp\030\002 \001(\0132\032.google.protobuf.TimestampB\014\n\n" + "data_oneof\"\302\002\n" + "\036FeatureViewDirectWriteResponse\022\"\n" + "\006status\030\001 \001(\0132\022.google.rpc.Status\022f\n" + "\017write_responses\030\002 \003(\0132M.go" + "ogle.cloud.aiplatform.v1beta1.FeatureViewDirectWriteResponse.WriteResponse\032\223\001\n\r" + "WriteResponse\022E\n" + "\010data_key\030\001 \001(\01323.google." + "cloud.aiplatform.v1beta1.FeatureViewDataKey\022;\n" + "\027online_store_write_time\030\002 \001(\0132\032.google.protobuf.Timestamp*b\n" + "\025FeatureViewDataFormat\022(\n" + "$FEATURE_VIEW_DATA_FORMAT_UNSPECIFIED\020\000\022\r\n" + "\tKEY_VALUE\020\001\022\020\n" + "\014PROTO_STRUCT\020\0022\350\t\n" + "\031FeatureOnlineStoreService\022\232\002\n" + "\022FetchFeatureValues\022:.google.cloud.aiplatf" + "orm.v1beta1.FetchFeatureValuesRequest\032;." + "google.cloud.aiplatform.v1beta1.FetchFeatureValuesResponse\"\212\001\332A\026feature_view," + " data_key\202\323\344\223\002k\"f/v1beta1/{feature_view=pro" + "jects/*/locations/*/featureOnlineStores/" + "*/featureViews/*}:fetchFeatureValues:\001*\022\303\002\n" + "\033StreamingFetchFeatureValues\022C.google.cloud.aiplatform.v1beta1.StreamingFetch" + "FeatureValuesRequest\032D.google.cloud.aipl" + "atform.v1beta1.StreamingFetchFeatureValuesResponse\"\224\001\332A\027feature_view," + " data_keys\202\323\344\223\002t\"o/v1beta1/{feature_view=projects/*" + "/locations/*/featureOnlineStores/*/featu" + "reViews/*}:streamingFetchFeatureValues:\001*(\0010\001\022\214\002\n" + "\025SearchNearestEntities\022=.google.cloud.aiplatform.v1beta1.SearchNearestE" + "ntitiesRequest\032>.google.cloud.aiplatform.v1beta1.SearchNearestEntitiesResponse\"t" + "\202\323\344\223\002n\"i/v1beta1/{feature_view=projects/" + "*/locations/*/featureOnlineStores/*/featureViews/*}:searchNearestEntities:\001*\022\211\002\n" + "\026FeatureViewDirectWrite\022>.google.cloud.aiplatform.v1beta1.FeatureViewDirectWrite" + "Request\032?.google.cloud.aiplatform.v1beta" + "1.FeatureViewDirectWriteResponse\"j\202\323\344\223\002d" + "\"_/v1beta1/{feature_view=projects/*/locations/*/featureOnlineStores/*/featureVie" + "ws/*}:directWrite:\001*(\0010\001\032M\312A\031aiplatform." + "googleapis.com\322A.https://www.googleapis.com/auth/cloud-platformB\365\001\n" + "#com.google.cloud.aiplatform.v1beta1B\036FeatureOnlineSt" + "oreServiceProtoP\001ZCcloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb;aiplat" + "formpb\252\002\037Google.Cloud.AIPlatform.V1Beta1" + "\312\002\037Google\\Cloud\\AIPlatform\\V1beta1\352\002\"Goo" + "gle::Cloud::AIPlatform::V1beta1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServiceProto.getDescriptor(), com.google.protobuf.StructProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_descriptor, new java.lang.String[] { "Key", "CompositeKey", "KeyOneof", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_CompositeKey_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_CompositeKey_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDataKey_CompositeKey_descriptor, new java.lang.String[] { "Parts", }); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesRequest_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesRequest_descriptor, new java.lang.String[] { "Id", "FeatureView", "DataKey", "DataFormat", "Format", "EntityId", }); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_descriptor, new java.lang.String[] { "KeyValues", "ProtoStruct", "DataKey", "Format", }); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_descriptor, new java.lang.String[] { "Features", }); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_FeatureNameValuePair_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_FeatureNameValuePair_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FetchFeatureValuesResponse_FeatureNameValuePairList_FeatureNameValuePair_descriptor, new java.lang.String[] { "Value", "Name", "Data", }); internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesRequest_descriptor, new java.lang.String[] { "FeatureView", "DataKeys", "DataFormat", }); internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesResponse_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_StreamingFetchFeatureValuesResponse_descriptor, new java.lang.String[] { "Status", "Data", "DataKeysWithError", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor, new java.lang.String[] { "EntityId", "Embedding", "NeighborCount", "StringFilters", "NumericFilters", "PerCrowdingAttributeNeighborCount", "Parameters", "Instance", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Embedding_descriptor = internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Embedding_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Embedding_descriptor, new java.lang.String[] { "Value", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_StringFilter_descriptor = internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor .getNestedTypes() .get(1); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_StringFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_StringFilter_descriptor, new java.lang.String[] { "Name", "AllowTokens", "DenyTokens", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_NumericFilter_descriptor = internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor .getNestedTypes() .get(2); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_NumericFilter_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_NumericFilter_descriptor, new java.lang.String[] { "ValueInt", "ValueFloat", "ValueDouble", "Name", "Op", "Value", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Parameters_descriptor = internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_descriptor .getNestedTypes() .get(3); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Parameters_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighborQuery_Parameters_descriptor, new java.lang.String[] { "ApproximateNeighborCandidates", "LeafNodesSearchFraction", }); internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesRequest_descriptor, new java.lang.String[] { "FeatureView", "Query", "ReturnFullEntity", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_descriptor, new java.lang.String[] { "Neighbors", }); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_Neighbor_descriptor = internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_Neighbor_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_NearestNeighbors_Neighbor_descriptor, new java.lang.String[] { "EntityId", "Distance", "EntityKeyValues", }); internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesResponse_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_SearchNearestEntitiesResponse_descriptor, new java.lang.String[] { "NearestNeighbors", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_descriptor, new java.lang.String[] { "FeatureView", "DataKeyAndFeatureValues", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_descriptor, new java.lang.String[] { "DataKey", "Features", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_descriptor, new java.lang.String[] { "Value", "ValueAndTimestamp", "Name", "DataOneof", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_FeatureValueAndTimestamp_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_FeatureValueAndTimestamp_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteRequest_DataKeyAndFeatureValues_Feature_FeatureValueAndTimestamp_descriptor, new java.lang.String[] { "Value", "Timestamp", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_descriptor, new java.lang.String[] { "Status", "WriteResponses", }); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_WriteResponse_descriptor = internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_descriptor .getNestedTypes() .get(0); internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_WriteResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_aiplatform_v1beta1_FeatureViewDirectWriteResponse_WriteResponse_descriptor, new java.lang.String[] { "DataKey", "OnlineStoreWriteTime", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.aiplatform.v1beta1.FeaturestoreOnlineServiceProto.getDescriptor(); com.google.protobuf.StructProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
googleapis/google-cloud-java
37,313
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/ValidateUrlMapRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * A request message for UrlMaps.Validate. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ValidateUrlMapRequest} */ public final class ValidateUrlMapRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.ValidateUrlMapRequest) ValidateUrlMapRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ValidateUrlMapRequest.newBuilder() to construct. private ValidateUrlMapRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ValidateUrlMapRequest() { project_ = ""; urlMap_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ValidateUrlMapRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ValidateUrlMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ValidateUrlMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ValidateUrlMapRequest.class, com.google.cloud.compute.v1.ValidateUrlMapRequest.Builder.class); } private int bitField0_; public static final int PROJECT_FIELD_NUMBER = 227560217; @SuppressWarnings("serial") private volatile java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URL_MAP_FIELD_NUMBER = 367020684; @SuppressWarnings("serial") private volatile java.lang.Object urlMap_ = ""; /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The urlMap. */ @java.lang.Override public java.lang.String getUrlMap() { java.lang.Object ref = urlMap_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); urlMap_ = s; return s; } } /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for urlMap. */ @java.lang.Override public com.google.protobuf.ByteString getUrlMapBytes() { java.lang.Object ref = urlMap_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); urlMap_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int URL_MAPS_VALIDATE_REQUEST_RESOURCE_FIELD_NUMBER = 395913455; private com.google.cloud.compute.v1.UrlMapsValidateRequest urlMapsValidateRequestResource_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the urlMapsValidateRequestResource field is set. */ @java.lang.Override public boolean hasUrlMapsValidateRequestResource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The urlMapsValidateRequestResource. */ @java.lang.Override public com.google.cloud.compute.v1.UrlMapsValidateRequest getUrlMapsValidateRequestResource() { return urlMapsValidateRequestResource_ == null ? com.google.cloud.compute.v1.UrlMapsValidateRequest.getDefaultInstance() : urlMapsValidateRequestResource_; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.UrlMapsValidateRequestOrBuilder getUrlMapsValidateRequestResourceOrBuilder() { return urlMapsValidateRequestResource_ == null ? com.google.cloud.compute.v1.UrlMapsValidateRequest.getDefaultInstance() : urlMapsValidateRequestResource_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(urlMap_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 367020684, urlMap_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(395913455, getUrlMapsValidateRequestResource()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(urlMap_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(367020684, urlMap_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 395913455, getUrlMapsValidateRequestResource()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.ValidateUrlMapRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.ValidateUrlMapRequest other = (com.google.cloud.compute.v1.ValidateUrlMapRequest) obj; if (!getProject().equals(other.getProject())) return false; if (!getUrlMap().equals(other.getUrlMap())) return false; if (hasUrlMapsValidateRequestResource() != other.hasUrlMapsValidateRequestResource()) return false; if (hasUrlMapsValidateRequestResource()) { if (!getUrlMapsValidateRequestResource().equals(other.getUrlMapsValidateRequestResource())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + URL_MAP_FIELD_NUMBER; hash = (53 * hash) + getUrlMap().hashCode(); if (hasUrlMapsValidateRequestResource()) { hash = (37 * hash) + URL_MAPS_VALIDATE_REQUEST_RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getUrlMapsValidateRequestResource().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.ValidateUrlMapRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for UrlMaps.Validate. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.ValidateUrlMapRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.ValidateUrlMapRequest) com.google.cloud.compute.v1.ValidateUrlMapRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ValidateUrlMapRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ValidateUrlMapRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.ValidateUrlMapRequest.class, com.google.cloud.compute.v1.ValidateUrlMapRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.ValidateUrlMapRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUrlMapsValidateRequestResourceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; project_ = ""; urlMap_ = ""; urlMapsValidateRequestResource_ = null; if (urlMapsValidateRequestResourceBuilder_ != null) { urlMapsValidateRequestResourceBuilder_.dispose(); urlMapsValidateRequestResourceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_ValidateUrlMapRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.ValidateUrlMapRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.ValidateUrlMapRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.ValidateUrlMapRequest build() { com.google.cloud.compute.v1.ValidateUrlMapRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.ValidateUrlMapRequest buildPartial() { com.google.cloud.compute.v1.ValidateUrlMapRequest result = new com.google.cloud.compute.v1.ValidateUrlMapRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.compute.v1.ValidateUrlMapRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.project_ = project_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.urlMap_ = urlMap_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.urlMapsValidateRequestResource_ = urlMapsValidateRequestResourceBuilder_ == null ? urlMapsValidateRequestResource_ : urlMapsValidateRequestResourceBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.ValidateUrlMapRequest) { return mergeFrom((com.google.cloud.compute.v1.ValidateUrlMapRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.ValidateUrlMapRequest other) { if (other == com.google.cloud.compute.v1.ValidateUrlMapRequest.getDefaultInstance()) return this; if (!other.getProject().isEmpty()) { project_ = other.project_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getUrlMap().isEmpty()) { urlMap_ = other.urlMap_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasUrlMapsValidateRequestResource()) { mergeUrlMapsValidateRequestResource(other.getUrlMapsValidateRequestResource()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1820481738: { project_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 1820481738 case -1358801822: { urlMap_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case -1358801822 case -1127659654: { input.readMessage( getUrlMapsValidateRequestResourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case -1127659654 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object urlMap_ = ""; /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The urlMap. */ public java.lang.String getUrlMap() { java.lang.Object ref = urlMap_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); urlMap_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for urlMap. */ public com.google.protobuf.ByteString getUrlMapBytes() { java.lang.Object ref = urlMap_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); urlMap_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The urlMap to set. * @return This builder for chaining. */ public Builder setUrlMap(java.lang.String value) { if (value == null) { throw new NullPointerException(); } urlMap_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearUrlMap() { urlMap_ = getDefaultInstance().getUrlMap(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Name of the UrlMap resource to be validated as. * </pre> * * <code>string url_map = 367020684 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for urlMap to set. * @return This builder for chaining. */ public Builder setUrlMapBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); urlMap_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.compute.v1.UrlMapsValidateRequest urlMapsValidateRequestResource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.UrlMapsValidateRequest, com.google.cloud.compute.v1.UrlMapsValidateRequest.Builder, com.google.cloud.compute.v1.UrlMapsValidateRequestOrBuilder> urlMapsValidateRequestResourceBuilder_; /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the urlMapsValidateRequestResource field is set. */ public boolean hasUrlMapsValidateRequestResource() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The urlMapsValidateRequestResource. */ public com.google.cloud.compute.v1.UrlMapsValidateRequest getUrlMapsValidateRequestResource() { if (urlMapsValidateRequestResourceBuilder_ == null) { return urlMapsValidateRequestResource_ == null ? com.google.cloud.compute.v1.UrlMapsValidateRequest.getDefaultInstance() : urlMapsValidateRequestResource_; } else { return urlMapsValidateRequestResourceBuilder_.getMessage(); } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUrlMapsValidateRequestResource( com.google.cloud.compute.v1.UrlMapsValidateRequest value) { if (urlMapsValidateRequestResourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } urlMapsValidateRequestResource_ = value; } else { urlMapsValidateRequestResourceBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUrlMapsValidateRequestResource( com.google.cloud.compute.v1.UrlMapsValidateRequest.Builder builderForValue) { if (urlMapsValidateRequestResourceBuilder_ == null) { urlMapsValidateRequestResource_ = builderForValue.build(); } else { urlMapsValidateRequestResourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUrlMapsValidateRequestResource( com.google.cloud.compute.v1.UrlMapsValidateRequest value) { if (urlMapsValidateRequestResourceBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && urlMapsValidateRequestResource_ != null && urlMapsValidateRequestResource_ != com.google.cloud.compute.v1.UrlMapsValidateRequest.getDefaultInstance()) { getUrlMapsValidateRequestResourceBuilder().mergeFrom(value); } else { urlMapsValidateRequestResource_ = value; } } else { urlMapsValidateRequestResourceBuilder_.mergeFrom(value); } if (urlMapsValidateRequestResource_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUrlMapsValidateRequestResource() { bitField0_ = (bitField0_ & ~0x00000004); urlMapsValidateRequestResource_ = null; if (urlMapsValidateRequestResourceBuilder_ != null) { urlMapsValidateRequestResourceBuilder_.dispose(); urlMapsValidateRequestResourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.UrlMapsValidateRequest.Builder getUrlMapsValidateRequestResourceBuilder() { bitField0_ |= 0x00000004; onChanged(); return getUrlMapsValidateRequestResourceFieldBuilder().getBuilder(); } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.compute.v1.UrlMapsValidateRequestOrBuilder getUrlMapsValidateRequestResourceOrBuilder() { if (urlMapsValidateRequestResourceBuilder_ != null) { return urlMapsValidateRequestResourceBuilder_.getMessageOrBuilder(); } else { return urlMapsValidateRequestResource_ == null ? com.google.cloud.compute.v1.UrlMapsValidateRequest.getDefaultInstance() : urlMapsValidateRequestResource_; } } /** * * * <pre> * The body resource for this request * </pre> * * <code> * .google.cloud.compute.v1.UrlMapsValidateRequest url_maps_validate_request_resource = 395913455 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.UrlMapsValidateRequest, com.google.cloud.compute.v1.UrlMapsValidateRequest.Builder, com.google.cloud.compute.v1.UrlMapsValidateRequestOrBuilder> getUrlMapsValidateRequestResourceFieldBuilder() { if (urlMapsValidateRequestResourceBuilder_ == null) { urlMapsValidateRequestResourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.compute.v1.UrlMapsValidateRequest, com.google.cloud.compute.v1.UrlMapsValidateRequest.Builder, com.google.cloud.compute.v1.UrlMapsValidateRequestOrBuilder>( getUrlMapsValidateRequestResource(), getParentForChildren(), isClean()); urlMapsValidateRequestResource_ = null; } return urlMapsValidateRequestResourceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.ValidateUrlMapRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.ValidateUrlMapRequest) private static final com.google.cloud.compute.v1.ValidateUrlMapRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.ValidateUrlMapRequest(); } public static com.google.cloud.compute.v1.ValidateUrlMapRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ValidateUrlMapRequest> PARSER = new com.google.protobuf.AbstractParser<ValidateUrlMapRequest>() { @java.lang.Override public ValidateUrlMapRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ValidateUrlMapRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ValidateUrlMapRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.ValidateUrlMapRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,644
java-domains/google-cloud-domains/src/main/java/com/google/cloud/domains/v1alpha2/stub/GrpcDomainsStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.domains.v1alpha2.stub; import static com.google.cloud.domains.v1alpha2.DomainsClient.ListRegistrationsPagedResponse; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.domains.v1alpha2.AuthorizationCode; import com.google.cloud.domains.v1alpha2.ConfigureContactSettingsRequest; import com.google.cloud.domains.v1alpha2.ConfigureDnsSettingsRequest; import com.google.cloud.domains.v1alpha2.ConfigureManagementSettingsRequest; import com.google.cloud.domains.v1alpha2.DeleteRegistrationRequest; import com.google.cloud.domains.v1alpha2.ExportRegistrationRequest; import com.google.cloud.domains.v1alpha2.GetRegistrationRequest; import com.google.cloud.domains.v1alpha2.ListRegistrationsRequest; import com.google.cloud.domains.v1alpha2.ListRegistrationsResponse; import com.google.cloud.domains.v1alpha2.OperationMetadata; import com.google.cloud.domains.v1alpha2.RegisterDomainRequest; import com.google.cloud.domains.v1alpha2.Registration; import com.google.cloud.domains.v1alpha2.ResetAuthorizationCodeRequest; import com.google.cloud.domains.v1alpha2.RetrieveAuthorizationCodeRequest; import com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersRequest; import com.google.cloud.domains.v1alpha2.RetrieveRegisterParametersResponse; import com.google.cloud.domains.v1alpha2.RetrieveTransferParametersRequest; import com.google.cloud.domains.v1alpha2.RetrieveTransferParametersResponse; import com.google.cloud.domains.v1alpha2.SearchDomainsRequest; import com.google.cloud.domains.v1alpha2.SearchDomainsResponse; import com.google.cloud.domains.v1alpha2.TransferDomainRequest; import com.google.cloud.domains.v1alpha2.UpdateRegistrationRequest; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the Domains service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class GrpcDomainsStub extends DomainsStub { private static final MethodDescriptor<SearchDomainsRequest, SearchDomainsResponse> searchDomainsMethodDescriptor = MethodDescriptor.<SearchDomainsRequest, SearchDomainsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/SearchDomains") .setRequestMarshaller( ProtoUtils.marshaller(SearchDomainsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(SearchDomainsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor< RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse> retrieveRegisterParametersMethodDescriptor = MethodDescriptor .<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/RetrieveRegisterParameters") .setRequestMarshaller( ProtoUtils.marshaller(RetrieveRegisterParametersRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(RetrieveRegisterParametersResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<RegisterDomainRequest, Operation> registerDomainMethodDescriptor = MethodDescriptor.<RegisterDomainRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/RegisterDomain") .setRequestMarshaller( ProtoUtils.marshaller(RegisterDomainRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor< RetrieveTransferParametersRequest, RetrieveTransferParametersResponse> retrieveTransferParametersMethodDescriptor = MethodDescriptor .<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/RetrieveTransferParameters") .setRequestMarshaller( ProtoUtils.marshaller(RetrieveTransferParametersRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(RetrieveTransferParametersResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<TransferDomainRequest, Operation> transferDomainMethodDescriptor = MethodDescriptor.<TransferDomainRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/TransferDomain") .setRequestMarshaller( ProtoUtils.marshaller(TransferDomainRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListRegistrationsRequest, ListRegistrationsResponse> listRegistrationsMethodDescriptor = MethodDescriptor.<ListRegistrationsRequest, ListRegistrationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/ListRegistrations") .setRequestMarshaller( ProtoUtils.marshaller(ListRegistrationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListRegistrationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetRegistrationRequest, Registration> getRegistrationMethodDescriptor = MethodDescriptor.<GetRegistrationRequest, Registration>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/GetRegistration") .setRequestMarshaller( ProtoUtils.marshaller(GetRegistrationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Registration.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateRegistrationRequest, Operation> updateRegistrationMethodDescriptor = MethodDescriptor.<UpdateRegistrationRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/UpdateRegistration") .setRequestMarshaller( ProtoUtils.marshaller(UpdateRegistrationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ConfigureManagementSettingsRequest, Operation> configureManagementSettingsMethodDescriptor = MethodDescriptor.<ConfigureManagementSettingsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.domains.v1alpha2.Domains/ConfigureManagementSettings") .setRequestMarshaller( ProtoUtils.marshaller(ConfigureManagementSettingsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsMethodDescriptor = MethodDescriptor.<ConfigureDnsSettingsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/ConfigureDnsSettings") .setRequestMarshaller( ProtoUtils.marshaller(ConfigureDnsSettingsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ConfigureContactSettingsRequest, Operation> configureContactSettingsMethodDescriptor = MethodDescriptor.<ConfigureContactSettingsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/ConfigureContactSettings") .setRequestMarshaller( ProtoUtils.marshaller(ConfigureContactSettingsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ExportRegistrationRequest, Operation> exportRegistrationMethodDescriptor = MethodDescriptor.<ExportRegistrationRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/ExportRegistration") .setRequestMarshaller( ProtoUtils.marshaller(ExportRegistrationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteRegistrationRequest, Operation> deleteRegistrationMethodDescriptor = MethodDescriptor.<DeleteRegistrationRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/DeleteRegistration") .setRequestMarshaller( ProtoUtils.marshaller(DeleteRegistrationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<RetrieveAuthorizationCodeRequest, AuthorizationCode> retrieveAuthorizationCodeMethodDescriptor = MethodDescriptor.<RetrieveAuthorizationCodeRequest, AuthorizationCode>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/RetrieveAuthorizationCode") .setRequestMarshaller( ProtoUtils.marshaller(RetrieveAuthorizationCodeRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AuthorizationCode.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ResetAuthorizationCodeRequest, AuthorizationCode> resetAuthorizationCodeMethodDescriptor = MethodDescriptor.<ResetAuthorizationCodeRequest, AuthorizationCode>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.domains.v1alpha2.Domains/ResetAuthorizationCode") .setRequestMarshaller( ProtoUtils.marshaller(ResetAuthorizationCodeRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AuthorizationCode.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<SearchDomainsRequest, SearchDomainsResponse> searchDomainsCallable; private final UnaryCallable<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse> retrieveRegisterParametersCallable; private final UnaryCallable<RegisterDomainRequest, Operation> registerDomainCallable; private final OperationCallable<RegisterDomainRequest, Registration, OperationMetadata> registerDomainOperationCallable; private final UnaryCallable<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse> retrieveTransferParametersCallable; private final UnaryCallable<TransferDomainRequest, Operation> transferDomainCallable; private final OperationCallable<TransferDomainRequest, Registration, OperationMetadata> transferDomainOperationCallable; private final UnaryCallable<ListRegistrationsRequest, ListRegistrationsResponse> listRegistrationsCallable; private final UnaryCallable<ListRegistrationsRequest, ListRegistrationsPagedResponse> listRegistrationsPagedCallable; private final UnaryCallable<GetRegistrationRequest, Registration> getRegistrationCallable; private final UnaryCallable<UpdateRegistrationRequest, Operation> updateRegistrationCallable; private final OperationCallable<UpdateRegistrationRequest, Registration, OperationMetadata> updateRegistrationOperationCallable; private final UnaryCallable<ConfigureManagementSettingsRequest, Operation> configureManagementSettingsCallable; private final OperationCallable< ConfigureManagementSettingsRequest, Registration, OperationMetadata> configureManagementSettingsOperationCallable; private final UnaryCallable<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsCallable; private final OperationCallable<ConfigureDnsSettingsRequest, Registration, OperationMetadata> configureDnsSettingsOperationCallable; private final UnaryCallable<ConfigureContactSettingsRequest, Operation> configureContactSettingsCallable; private final OperationCallable<ConfigureContactSettingsRequest, Registration, OperationMetadata> configureContactSettingsOperationCallable; private final UnaryCallable<ExportRegistrationRequest, Operation> exportRegistrationCallable; private final OperationCallable<ExportRegistrationRequest, Registration, OperationMetadata> exportRegistrationOperationCallable; private final UnaryCallable<DeleteRegistrationRequest, Operation> deleteRegistrationCallable; private final OperationCallable<DeleteRegistrationRequest, Empty, OperationMetadata> deleteRegistrationOperationCallable; private final UnaryCallable<RetrieveAuthorizationCodeRequest, AuthorizationCode> retrieveAuthorizationCodeCallable; private final UnaryCallable<ResetAuthorizationCodeRequest, AuthorizationCode> resetAuthorizationCodeCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcDomainsStub create(DomainsStubSettings settings) throws IOException { return new GrpcDomainsStub(settings, ClientContext.create(settings)); } public static final GrpcDomainsStub create(ClientContext clientContext) throws IOException { return new GrpcDomainsStub(DomainsStubSettings.newBuilder().build(), clientContext); } public static final GrpcDomainsStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcDomainsStub( DomainsStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcDomainsStub, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected GrpcDomainsStub(DomainsStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcDomainsCallableFactory()); } /** * Constructs an instance of GrpcDomainsStub, using the given settings. This is protected so that * it is easy to make a subclass, but otherwise, the static factory methods should be preferred. */ protected GrpcDomainsStub( DomainsStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<SearchDomainsRequest, SearchDomainsResponse> searchDomainsTransportSettings = GrpcCallSettings.<SearchDomainsRequest, SearchDomainsResponse>newBuilder() .setMethodDescriptor(searchDomainsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("location", String.valueOf(request.getLocation())); return builder.build(); }) .build(); GrpcCallSettings<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse> retrieveRegisterParametersTransportSettings = GrpcCallSettings .<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse>newBuilder() .setMethodDescriptor(retrieveRegisterParametersMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("location", String.valueOf(request.getLocation())); return builder.build(); }) .build(); GrpcCallSettings<RegisterDomainRequest, Operation> registerDomainTransportSettings = GrpcCallSettings.<RegisterDomainRequest, Operation>newBuilder() .setMethodDescriptor(registerDomainMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse> retrieveTransferParametersTransportSettings = GrpcCallSettings .<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse>newBuilder() .setMethodDescriptor(retrieveTransferParametersMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("location", String.valueOf(request.getLocation())); return builder.build(); }) .build(); GrpcCallSettings<TransferDomainRequest, Operation> transferDomainTransportSettings = GrpcCallSettings.<TransferDomainRequest, Operation>newBuilder() .setMethodDescriptor(transferDomainMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<ListRegistrationsRequest, ListRegistrationsResponse> listRegistrationsTransportSettings = GrpcCallSettings.<ListRegistrationsRequest, ListRegistrationsResponse>newBuilder() .setMethodDescriptor(listRegistrationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetRegistrationRequest, Registration> getRegistrationTransportSettings = GrpcCallSettings.<GetRegistrationRequest, Registration>newBuilder() .setMethodDescriptor(getRegistrationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<UpdateRegistrationRequest, Operation> updateRegistrationTransportSettings = GrpcCallSettings.<UpdateRegistrationRequest, Operation>newBuilder() .setMethodDescriptor(updateRegistrationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "registration.name", String.valueOf(request.getRegistration().getName())); return builder.build(); }) .build(); GrpcCallSettings<ConfigureManagementSettingsRequest, Operation> configureManagementSettingsTransportSettings = GrpcCallSettings.<ConfigureManagementSettingsRequest, Operation>newBuilder() .setMethodDescriptor(configureManagementSettingsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("registration", String.valueOf(request.getRegistration())); return builder.build(); }) .build(); GrpcCallSettings<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsTransportSettings = GrpcCallSettings.<ConfigureDnsSettingsRequest, Operation>newBuilder() .setMethodDescriptor(configureDnsSettingsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("registration", String.valueOf(request.getRegistration())); return builder.build(); }) .build(); GrpcCallSettings<ConfigureContactSettingsRequest, Operation> configureContactSettingsTransportSettings = GrpcCallSettings.<ConfigureContactSettingsRequest, Operation>newBuilder() .setMethodDescriptor(configureContactSettingsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("registration", String.valueOf(request.getRegistration())); return builder.build(); }) .build(); GrpcCallSettings<ExportRegistrationRequest, Operation> exportRegistrationTransportSettings = GrpcCallSettings.<ExportRegistrationRequest, Operation>newBuilder() .setMethodDescriptor(exportRegistrationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<DeleteRegistrationRequest, Operation> deleteRegistrationTransportSettings = GrpcCallSettings.<DeleteRegistrationRequest, Operation>newBuilder() .setMethodDescriptor(deleteRegistrationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<RetrieveAuthorizationCodeRequest, AuthorizationCode> retrieveAuthorizationCodeTransportSettings = GrpcCallSettings.<RetrieveAuthorizationCodeRequest, AuthorizationCode>newBuilder() .setMethodDescriptor(retrieveAuthorizationCodeMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("registration", String.valueOf(request.getRegistration())); return builder.build(); }) .build(); GrpcCallSettings<ResetAuthorizationCodeRequest, AuthorizationCode> resetAuthorizationCodeTransportSettings = GrpcCallSettings.<ResetAuthorizationCodeRequest, AuthorizationCode>newBuilder() .setMethodDescriptor(resetAuthorizationCodeMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("registration", String.valueOf(request.getRegistration())); return builder.build(); }) .build(); this.searchDomainsCallable = callableFactory.createUnaryCallable( searchDomainsTransportSettings, settings.searchDomainsSettings(), clientContext); this.retrieveRegisterParametersCallable = callableFactory.createUnaryCallable( retrieveRegisterParametersTransportSettings, settings.retrieveRegisterParametersSettings(), clientContext); this.registerDomainCallable = callableFactory.createUnaryCallable( registerDomainTransportSettings, settings.registerDomainSettings(), clientContext); this.registerDomainOperationCallable = callableFactory.createOperationCallable( registerDomainTransportSettings, settings.registerDomainOperationSettings(), clientContext, operationsStub); this.retrieveTransferParametersCallable = callableFactory.createUnaryCallable( retrieveTransferParametersTransportSettings, settings.retrieveTransferParametersSettings(), clientContext); this.transferDomainCallable = callableFactory.createUnaryCallable( transferDomainTransportSettings, settings.transferDomainSettings(), clientContext); this.transferDomainOperationCallable = callableFactory.createOperationCallable( transferDomainTransportSettings, settings.transferDomainOperationSettings(), clientContext, operationsStub); this.listRegistrationsCallable = callableFactory.createUnaryCallable( listRegistrationsTransportSettings, settings.listRegistrationsSettings(), clientContext); this.listRegistrationsPagedCallable = callableFactory.createPagedCallable( listRegistrationsTransportSettings, settings.listRegistrationsSettings(), clientContext); this.getRegistrationCallable = callableFactory.createUnaryCallable( getRegistrationTransportSettings, settings.getRegistrationSettings(), clientContext); this.updateRegistrationCallable = callableFactory.createUnaryCallable( updateRegistrationTransportSettings, settings.updateRegistrationSettings(), clientContext); this.updateRegistrationOperationCallable = callableFactory.createOperationCallable( updateRegistrationTransportSettings, settings.updateRegistrationOperationSettings(), clientContext, operationsStub); this.configureManagementSettingsCallable = callableFactory.createUnaryCallable( configureManagementSettingsTransportSettings, settings.configureManagementSettingsSettings(), clientContext); this.configureManagementSettingsOperationCallable = callableFactory.createOperationCallable( configureManagementSettingsTransportSettings, settings.configureManagementSettingsOperationSettings(), clientContext, operationsStub); this.configureDnsSettingsCallable = callableFactory.createUnaryCallable( configureDnsSettingsTransportSettings, settings.configureDnsSettingsSettings(), clientContext); this.configureDnsSettingsOperationCallable = callableFactory.createOperationCallable( configureDnsSettingsTransportSettings, settings.configureDnsSettingsOperationSettings(), clientContext, operationsStub); this.configureContactSettingsCallable = callableFactory.createUnaryCallable( configureContactSettingsTransportSettings, settings.configureContactSettingsSettings(), clientContext); this.configureContactSettingsOperationCallable = callableFactory.createOperationCallable( configureContactSettingsTransportSettings, settings.configureContactSettingsOperationSettings(), clientContext, operationsStub); this.exportRegistrationCallable = callableFactory.createUnaryCallable( exportRegistrationTransportSettings, settings.exportRegistrationSettings(), clientContext); this.exportRegistrationOperationCallable = callableFactory.createOperationCallable( exportRegistrationTransportSettings, settings.exportRegistrationOperationSettings(), clientContext, operationsStub); this.deleteRegistrationCallable = callableFactory.createUnaryCallable( deleteRegistrationTransportSettings, settings.deleteRegistrationSettings(), clientContext); this.deleteRegistrationOperationCallable = callableFactory.createOperationCallable( deleteRegistrationTransportSettings, settings.deleteRegistrationOperationSettings(), clientContext, operationsStub); this.retrieveAuthorizationCodeCallable = callableFactory.createUnaryCallable( retrieveAuthorizationCodeTransportSettings, settings.retrieveAuthorizationCodeSettings(), clientContext); this.resetAuthorizationCodeCallable = callableFactory.createUnaryCallable( resetAuthorizationCodeTransportSettings, settings.resetAuthorizationCodeSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<SearchDomainsRequest, SearchDomainsResponse> searchDomainsCallable() { return searchDomainsCallable; } @Override public UnaryCallable<RetrieveRegisterParametersRequest, RetrieveRegisterParametersResponse> retrieveRegisterParametersCallable() { return retrieveRegisterParametersCallable; } @Override public UnaryCallable<RegisterDomainRequest, Operation> registerDomainCallable() { return registerDomainCallable; } @Override public OperationCallable<RegisterDomainRequest, Registration, OperationMetadata> registerDomainOperationCallable() { return registerDomainOperationCallable; } @Override public UnaryCallable<RetrieveTransferParametersRequest, RetrieveTransferParametersResponse> retrieveTransferParametersCallable() { return retrieveTransferParametersCallable; } @Override public UnaryCallable<TransferDomainRequest, Operation> transferDomainCallable() { return transferDomainCallable; } @Override public OperationCallable<TransferDomainRequest, Registration, OperationMetadata> transferDomainOperationCallable() { return transferDomainOperationCallable; } @Override public UnaryCallable<ListRegistrationsRequest, ListRegistrationsResponse> listRegistrationsCallable() { return listRegistrationsCallable; } @Override public UnaryCallable<ListRegistrationsRequest, ListRegistrationsPagedResponse> listRegistrationsPagedCallable() { return listRegistrationsPagedCallable; } @Override public UnaryCallable<GetRegistrationRequest, Registration> getRegistrationCallable() { return getRegistrationCallable; } @Override public UnaryCallable<UpdateRegistrationRequest, Operation> updateRegistrationCallable() { return updateRegistrationCallable; } @Override public OperationCallable<UpdateRegistrationRequest, Registration, OperationMetadata> updateRegistrationOperationCallable() { return updateRegistrationOperationCallable; } @Override public UnaryCallable<ConfigureManagementSettingsRequest, Operation> configureManagementSettingsCallable() { return configureManagementSettingsCallable; } @Override public OperationCallable<ConfigureManagementSettingsRequest, Registration, OperationMetadata> configureManagementSettingsOperationCallable() { return configureManagementSettingsOperationCallable; } @Override public UnaryCallable<ConfigureDnsSettingsRequest, Operation> configureDnsSettingsCallable() { return configureDnsSettingsCallable; } @Override public OperationCallable<ConfigureDnsSettingsRequest, Registration, OperationMetadata> configureDnsSettingsOperationCallable() { return configureDnsSettingsOperationCallable; } @Override public UnaryCallable<ConfigureContactSettingsRequest, Operation> configureContactSettingsCallable() { return configureContactSettingsCallable; } @Override public OperationCallable<ConfigureContactSettingsRequest, Registration, OperationMetadata> configureContactSettingsOperationCallable() { return configureContactSettingsOperationCallable; } @Override public UnaryCallable<ExportRegistrationRequest, Operation> exportRegistrationCallable() { return exportRegistrationCallable; } @Override public OperationCallable<ExportRegistrationRequest, Registration, OperationMetadata> exportRegistrationOperationCallable() { return exportRegistrationOperationCallable; } @Override public UnaryCallable<DeleteRegistrationRequest, Operation> deleteRegistrationCallable() { return deleteRegistrationCallable; } @Override public OperationCallable<DeleteRegistrationRequest, Empty, OperationMetadata> deleteRegistrationOperationCallable() { return deleteRegistrationOperationCallable; } @Override public UnaryCallable<RetrieveAuthorizationCodeRequest, AuthorizationCode> retrieveAuthorizationCodeCallable() { return retrieveAuthorizationCodeCallable; } @Override public UnaryCallable<ResetAuthorizationCodeRequest, AuthorizationCode> resetAuthorizationCodeCallable() { return resetAuthorizationCodeCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
googleapis/google-api-java-client-services
37,682
clients/google-api-services-retail/v2beta/1.31.0/com/google/api/services/retail/v2beta/model/GoogleCloudRetailV2betaUserEvent.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.retail.v2beta.model; /** * UserEvent captures all metadata information Retail API needs to know about how end users interact * with customers' website. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Retail API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudRetailV2betaUserEvent extends com.google.api.client.json.GenericJson { /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, GoogleCloudRetailV2betaCustomAttribute> attributes; static { // hack to force ProGuard to consider GoogleCloudRetailV2betaCustomAttribute used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudRetailV2betaCustomAttribute.class); } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String attributionToken; /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String cartId; /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2betaCompletionDetail completionDetail; /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * The value may be {@code null}. */ @com.google.api.client.util.Key private String eventTime; /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String eventType; /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> experimentIds; /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String filter; /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer offset; /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String orderBy; /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> pageCategories; /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String pageViewId; /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleCloudRetailV2betaProductDetail> productDetails; static { // hack to force ProGuard to consider GoogleCloudRetailV2betaProductDetail used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(GoogleCloudRetailV2betaProductDetail.class); } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2betaPurchaseTransaction purchaseTransaction; /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String referrerUri; /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String searchQuery; /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sessionId; /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String uri; /** * User information. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2betaUserInfo userInfo; /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String visitorId; /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * @return value or {@code null} for none */ public java.util.Map<String, GoogleCloudRetailV2betaCustomAttribute> getAttributes() { return attributes; } /** * Extra user event features to include in the recommendation model. If you provide custom * attributes for ingested user events, also include them in the user events that you associate * with prediction requests. Custom attribute formatting must be consistent between imported * events and events provided with prediction requests. This lets the Retail API use those custom * attributes when training models and serving predictions, which helps improve recommendation * quality. This field needs to pass all below criteria, otherwise an INVALID_ARGUMENT error is * returned: * The key must be a UTF-8 encoded string with a length limit of 5,000 characters. * * For text attributes, at most 400 values are allowed. Empty values are not allowed. Each value * must be a UTF-8 encoded string with a length limit of 256 characters. * For number attributes, * at most 400 values are allowed. For product recommendations, an example of extra user * information is traffic_channel, which is how a user arrives at the site. Users can arrive at * the site by coming to the site directly, coming through Google search, or in other ways. * @param attributes attributes or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setAttributes(java.util.Map<String, GoogleCloudRetailV2betaCustomAttribute> attributes) { this.attributes = attributes; return this; } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * @return value or {@code null} for none */ public java.lang.String getAttributionToken() { return attributionToken; } /** * Highly recommended for user events that are the result of PredictionService.Predict. This field * enables accurate attribution of recommendation model performance. The value must be a valid * PredictResponse.attribution_token for user events that are the result of * PredictionService.Predict. The value must be a valid SearchResponse.attribution_token for user * events that are the result of SearchService.Search. This token enables us to accurately * attribute page view or purchase back to the event and the particular predict response * containing this clicked/purchased product. If user clicks on product K in the recommendation * results, pass PredictResponse.attribution_token as a URL parameter to product K's page. When * recording events on product K's page, log the PredictResponse.attribution_token to this field. * @param attributionToken attributionToken or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setAttributionToken(java.lang.String attributionToken) { this.attributionToken = attributionToken; return this; } /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * @return value or {@code null} for none */ public java.lang.String getCartId() { return cartId; } /** * The ID or name of the associated shopping cart. This ID is used to associate multiple items * added or present in the cart before purchase. This can only be set for `add-to-cart`, * `purchase-complete`, or `shopping-cart-page-view` events. * @param cartId cartId or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setCartId(java.lang.String cartId) { this.cartId = cartId; return this; } /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * @return value or {@code null} for none */ public GoogleCloudRetailV2betaCompletionDetail getCompletionDetail() { return completionDetail; } /** * The main auto-completion details related to the event. This field should be set for `search` * event when autocomplete function is enabled and the user clicks a suggestion for search. * @param completionDetail completionDetail or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setCompletionDetail(GoogleCloudRetailV2betaCompletionDetail completionDetail) { this.completionDetail = completionDetail; return this; } /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * @return value or {@code null} for none */ public String getEventTime() { return eventTime; } /** * Only required for UserEventService.ImportUserEvents method. Timestamp of when the user event * happened. * @param eventTime eventTime or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setEventTime(String eventTime) { this.eventTime = eventTime; return this; } /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * @return value or {@code null} for none */ public java.lang.String getEventType() { return eventType; } /** * Required. User event type. Allowed values are: * `add-to-cart`: Products being added to cart. * * `category-page-view`: Special pages such as sale or promotion pages viewed. * `detail-page- * view`: Products detail page viewed. * `home-page-view`: Homepage viewed. * `promotion-offered`: * Promotion is offered to a user. * `promotion-not-offered`: Promotion is not offered to a user. * * `purchase-complete`: User finishing a purchase. * `search`: Product search. * `shopping-cart- * page-view`: User viewing a shopping cart. * @param eventType eventType or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setEventType(java.lang.String eventType) { this.eventType = eventType; return this; } /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * @return value or {@code null} for none */ public java.util.List<java.lang.String> getExperimentIds() { return experimentIds; } /** * A list of identifiers for the independent experiment groups this user event belongs to. This is * used to distinguish between user events associated with different experiment setups (e.g. using * Retail API, using different recommendation models). * @param experimentIds experimentIds or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setExperimentIds(java.util.List<java.lang.String> experimentIds) { this.experimentIds = experimentIds; return this; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getFilter() { return filter; } /** * The filter syntax consists of an expression language for constructing a predicate from one or * more fields of the products being filtered. See SearchRequest.filter for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. * @param filter filter or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setFilter(java.lang.String filter) { this.filter = filter; return this; } /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @return value or {@code null} for none */ public java.lang.Integer getOffset() { return offset; } /** * An integer that specifies the current offset for pagination (the 0-indexed starting location, * amongst the products deemed by the API as relevant). See SearchRequest.offset for definition. * If this field is negative, an INVALID_ARGUMENT is returned. This can only be set for `search` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @param offset offset or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setOffset(java.lang.Integer offset) { this.offset = offset; return this; } /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getOrderBy() { return orderBy; } /** * The order in which products are returned. See SearchRequest.order_by for definition and syntax. * The value must be a UTF-8 encoded string with a length limit of 1,000 characters. Otherwise, an * INVALID_ARGUMENT error is returned. This can only be set for `search` events. Other event types * should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param orderBy orderBy or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setOrderBy(java.lang.String orderBy) { this.orderBy = orderBy; return this; } /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPageCategories() { return pageCategories; } /** * The categories associated with a category page. To represent full path of category, use '>' * sign to separate different hierarchies. If '>' is part of the category name, please replace it * with other character(s). Category pages include special pages such as sales or promotions. For * instance, a special sale page may have the category hierarchy: "pageCategories" : ["Sales > * 2017 Black Friday Deals"]. Required for `category-page-view` events. At least one of * search_query or page_categories is required for `search` events. Other event types should not * set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param pageCategories pageCategories or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setPageCategories(java.util.List<java.lang.String> pageCategories) { this.pageCategories = pageCategories; return this; } /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @return value or {@code null} for none */ public java.lang.String getPageViewId() { return pageViewId; } /** * A unique ID of a web page view. This should be kept the same for all user events triggered from * the same pageview. For example, an item detail page view could trigger multiple events as the * user is browsing the page. The `pageViewId` property should be kept the same for all these * events so that they can be grouped together properly. When using the client side event * reporting with JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @param pageViewId pageViewId or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setPageViewId(java.lang.String pageViewId) { this.pageViewId = pageViewId; return this; } /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * @return value or {@code null} for none */ public java.util.List<GoogleCloudRetailV2betaProductDetail> getProductDetails() { return productDetails; } /** * The main product details related to the event. This field is optional except for the following * event types: * `add-to-cart` * `detail-page-view` * `purchase-complete` In a `search` event, * this field represents the products returned to the end user on the current page (the end user * may have not finished browsing the whole page yet). When a new page is returned to the end * user, after pagination/filtering/ordering even for the same query, a new `search` event with * different product_details is desired. The end user may have not finished browsing the whole * page yet. * @param productDetails productDetails or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setProductDetails(java.util.List<GoogleCloudRetailV2betaProductDetail> productDetails) { this.productDetails = productDetails; return this; } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @return value or {@code null} for none */ public GoogleCloudRetailV2betaPurchaseTransaction getPurchaseTransaction() { return purchaseTransaction; } /** * A transaction represents the entire purchase transaction. Required for `purchase-complete` * events. Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is * returned. * @param purchaseTransaction purchaseTransaction or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setPurchaseTransaction(GoogleCloudRetailV2betaPurchaseTransaction purchaseTransaction) { this.purchaseTransaction = purchaseTransaction; return this; } /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @return value or {@code null} for none */ public java.lang.String getReferrerUri() { return referrerUri; } /** * The referrer URL of the current page. When using the client side event reporting with * JavaScript pixel and Google Tag Manager, this value is filled in automatically. * @param referrerUri referrerUri or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setReferrerUri(java.lang.String referrerUri) { this.referrerUri = referrerUri; return this; } /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @return value or {@code null} for none */ public java.lang.String getSearchQuery() { return searchQuery; } /** * The user's search query. See SearchRequest.query for definition. The value must be a UTF-8 * encoded string with a length limit of 5,000 characters. Otherwise, an INVALID_ARGUMENT error is * returned. At least one of search_query or page_categories is required for `search` events. * Other event types should not set this field. Otherwise, an INVALID_ARGUMENT error is returned. * @param searchQuery searchQuery or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setSearchQuery(java.lang.String searchQuery) { this.searchQuery = searchQuery; return this; } /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * @return value or {@code null} for none */ public java.lang.String getSessionId() { return sessionId; } /** * A unique identifier for tracking a visitor session with a length limit of 128 bytes. A session * is an aggregation of an end user behavior in a time span. A general guideline to populate the * sesion_id: 1. If user has no activity for 30 min, a new session_id should be assigned. 2. The * session_id should be unique across users, suggest use uuid or add visitor_id as prefix. * @param sessionId sessionId or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setSessionId(java.lang.String sessionId) { this.sessionId = sessionId; return this; } /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * @return value or {@code null} for none */ public java.lang.String getUri() { return uri; } /** * Complete URL (window.location.href) of the user's current page. When using the client side * event reporting with JavaScript pixel and Google Tag Manager, this value is filled in * automatically. Maximum length 5,000 characters. * @param uri uri or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setUri(java.lang.String uri) { this.uri = uri; return this; } /** * User information. * @return value or {@code null} for none */ public GoogleCloudRetailV2betaUserInfo getUserInfo() { return userInfo; } /** * User information. * @param userInfo userInfo or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setUserInfo(GoogleCloudRetailV2betaUserInfo userInfo) { this.userInfo = userInfo; return this; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * @return value or {@code null} for none */ public java.lang.String getVisitorId() { return visitorId; } /** * Required. A unique identifier for tracking visitors. For example, this could be implemented * with an HTTP cookie, which should be able to uniquely identify a visitor on a single device. * This unique identifier should not change if the visitor log in/out of the website. Don't set * the field to the same fixed ID for different users. This mixes the event history of those users * together, which results in degraded model quality. The field must be a UTF-8 encoded string * with a length limit of 128 characters. Otherwise, an INVALID_ARGUMENT error is returned. The * field should not contain PII or user-data. We recommend to use Google Analytics [Client * ID](https://developers.google.com/analytics/devguides/collection/analyticsjs/field- * reference#clientId) for this field. * @param visitorId visitorId or {@code null} for none */ public GoogleCloudRetailV2betaUserEvent setVisitorId(java.lang.String visitorId) { this.visitorId = visitorId; return this; } @Override public GoogleCloudRetailV2betaUserEvent set(String fieldName, Object value) { return (GoogleCloudRetailV2betaUserEvent) super.set(fieldName, value); } @Override public GoogleCloudRetailV2betaUserEvent clone() { return (GoogleCloudRetailV2betaUserEvent) super.clone(); } }
apache/metamodel
37,163
csv/src/test/java/org/apache/metamodel/csv/CsvDataContextTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.metamodel.csv; import java.io.File; import java.io.FileInputStream; import java.util.Arrays; import java.util.List; import java.util.Map; import javax.swing.table.TableModel; import org.apache.metamodel.DataContext; import org.apache.metamodel.QueryPostprocessDataContext; import org.apache.metamodel.UpdateCallback; import org.apache.metamodel.UpdateScript; import org.apache.metamodel.convert.Converters; import org.apache.metamodel.convert.StringToBooleanConverter; import org.apache.metamodel.convert.StringToIntegerConverter; import org.apache.metamodel.convert.TypeConverter; import org.apache.metamodel.data.DataSet; import org.apache.metamodel.data.DataSetTableModel; import org.apache.metamodel.data.Row; import org.apache.metamodel.query.FilterItem; import org.apache.metamodel.query.FunctionType; import org.apache.metamodel.query.OperatorType; import org.apache.metamodel.query.Query; import org.apache.metamodel.query.SelectItem; import org.apache.metamodel.schema.Column; import org.apache.metamodel.schema.MutableColumn; import org.apache.metamodel.schema.Schema; import org.apache.metamodel.schema.Table; import org.apache.metamodel.schema.naming.CustomColumnNamingStrategy; import org.apache.metamodel.util.FileHelper; import org.apache.metamodel.util.MutableRef; import junit.framework.TestCase; public class CsvDataContextTest extends TestCase { private final CsvConfiguration semicolonConfiguration = new CsvConfiguration( CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, "UTF-8", ';', '\'', CsvConfiguration.DEFAULT_ESCAPE_CHAR); public void testEmptyFileNoColumnHeaderLine() throws Exception { final File file = new File("target/testEmptyFileNoColumnHeaderLine.csv"); FileHelper.copy(new File("src/test/resources/empty_file.csv"), file); CsvConfiguration csvConfiguration = new CsvConfiguration(CsvConfiguration.NO_COLUMN_NAME_LINE, FileHelper.DEFAULT_ENCODING, CsvConfiguration.DEFAULT_SEPARATOR_CHAR, CsvConfiguration.NOT_A_CHAR, CsvConfiguration.DEFAULT_ESCAPE_CHAR); final CsvDataContext dc = new CsvDataContext(file, csvConfiguration); assertEquals(2, dc.getDefaultSchema().getTableCount()); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.createTable(dc.getDefaultSchema(), "new_table").withColumn("COL_1").withColumn("COL_2") .execute(); callback.insertInto("new_table").value(0, "1").value(1, 2).execute(); } }); CsvDataContext dc1 = new CsvDataContext(file, csvConfiguration); List<Table> tables = dc1.getDefaultSchema().getTables(); assertEquals(2, tables.size()); Table table = tables.get(0); assertEquals("testEmptyFileNoColumnHeaderLine.csv", table.getName()); assertEquals(2, table.getColumnCount()); DataSet ds = dc1.query().from(table).selectAll().execute(); assertTrue(ds.next()); assertEquals("Row[values=[1, 2]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); } public void testEmptyFileTableCreation() throws Exception { final File file = new File("target/testEmptyFileNoColumnHeaderLine.csv"); FileHelper.copy(new File("src/test/resources/empty_file.csv"), file); final CsvDataContext dc = new CsvDataContext(file); assertEquals(2, dc.getDefaultSchema().getTableCount()); final Table table1 = dc.getDefaultSchema().getTables().get(0); assertEquals("testEmptyFileNoColumnHeaderLine.csv", table1.getName()); assertEquals(0, table1.getColumnCount()); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.dropTable(dc.getDefaultSchema().getTable(0)).execute(); callback.createTable(dc.getDefaultSchema(), "newtable1").withColumn("foo").withColumn("bar").execute(); } }); assertEquals("\"foo\",\"bar\"", FileHelper.readFileAsString(file)); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { // fire additional create table statements callback.createTable(dc.getDefaultSchema(), "newtable2").withColumn("foo").withColumn("bar").execute(); callback.createTable(dc.getDefaultSchema(), "newtable3").withColumn("bar").withColumn("baz").execute(); } }); assertEquals("\"bar\",\"baz\"", FileHelper.readFileAsString(file)); // still the table count should only be 2 assertEquals(2, dc.getDefaultSchema().getTableCount()); } public void testAppendToFileWithoutLineBreak() throws Exception { File targetFile = new File("target/csv_no_linebreak"); FileHelper.copy(new File("src/test/resources/csv_no_linebreak.csv"), targetFile); assertTrue(targetFile.exists()); assertEquals("foo,bar!LINEBREAK!hello,world!LINEBREAK!hi,there", FileHelper.readFileAsString(targetFile) .replaceAll("\n", "!LINEBREAK!")); final CsvDataContext dc = new CsvDataContext(targetFile); final Table table = dc.getDefaultSchema().getTables().get(0); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.insertInto(table).value(0, "1234").value(1, "5678").execute(); } }); assertEquals("foo,bar!LINEBREAK!hello,world!LINEBREAK!hi,there!LINEBREAK!\"1234\",\"5678\"", FileHelper .readFileAsString(targetFile).replaceAll("\n", "!LINEBREAK!")); } public void testHandlingOfEmptyLinesMultipleLinesSupport() throws Exception { // test with multiline values DataContext dc = new CsvDataContext(new File("src/test/resources/csv_with_empty_lines.csv"), new CsvConfiguration(1, false, true)); testHandlingOfEmptyLines(dc); } public void testHandlingOfEmptyLinesSingleLinesSupport() throws Exception { // test with only single line values DataContext dc = new CsvDataContext(new File("src/test/resources/csv_with_empty_lines.csv"), new CsvConfiguration(1, false, false)); testHandlingOfEmptyLines(dc); } public void testHandlingOfEmptyLines(DataContext dc) throws Exception { DataSet ds = dc.query().from(dc.getDefaultSchema().getTable(0)).selectAll().execute(); assertTrue(ds.next()); assertEquals("Row[values=[hello, world]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[hi, there]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); } public void testEmptyFileNoHeaderLine() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/empty_file.csv"), new CsvConfiguration( CsvConfiguration.NO_COLUMN_NAME_LINE)); assertEquals(2, dc.getDefaultSchema().getTableCount()); Table table = dc.getDefaultSchema().getTables().get(0); assertEquals("empty_file.csv", table.getName()); assertEquals(0, table.getColumnCount()); } public void testUnexistingHeaderLine() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/csv_people.csv"), new CsvConfiguration(20)); assertEquals(2, dc.getDefaultSchema().getTableCount()); Table table = dc.getDefaultSchema().getTables().get(0); assertEquals("csv_people.csv", table.getName()); assertEquals(0, table.getColumnCount()); } public void testInconsistentColumns() throws Exception { CsvConfiguration conf = new CsvConfiguration(CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, "UTF8", ',', '"', '\\', true); DataContext dc = new CsvDataContext(new File("src/test/resources/csv_inconsistent_columns.csv"), conf); DataSet ds = dc.query().from("csv_inconsistent_columns.csv").select("hello").and("world").execute(); assertTrue(ds.next()); assertTrue(ds.next()); try { ds.next(); fail("Exception expected"); } catch (InconsistentRowLengthException e) { assertEquals("Inconsistent length of row no. 3. Expected 2 columns but found 3.", e.getMessage()); Row proposedRow = e.getProposedRow(); assertEquals("[5, 6]", Arrays.toString(proposedRow.getValues())); String[] sourceLine = e.getSourceLine(); assertEquals("[5, 6, 7]", Arrays.toString(sourceLine)); } assertTrue(ds.next()); try { ds.next(); fail("Exception expected"); } catch (InconsistentRowLengthException e) { assertEquals("Inconsistent length of row no. 5. Expected 2 columns but found 1.", e.getMessage()); Row proposedRow = e.getProposedRow(); assertEquals("[10, null]", Arrays.toString(proposedRow.getValues())); String[] sourceLine = e.getSourceLine(); assertEquals("[10]", Arrays.toString(sourceLine)); } assertTrue(ds.next()); assertFalse(ds.next()); } public void testApproximatedCountSmallFile() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/csv_people.csv")); Table table = dc.getDefaultSchema().getTables().get(0); Query q = dc.query().from(table).selectCount().toQuery(); SelectItem selectItem = q.getSelectClause().getItem(0); selectItem.setFunctionApproximationAllowed(true); DataSet ds = dc.executeQuery(q); assertTrue(ds.next()); Object[] values = ds.getRow().getValues(); assertEquals(1, values.length); assertEquals(9, ((Long) ds.getRow().getValue(selectItem)).intValue()); assertEquals(9, ((Long) values[0]).intValue()); assertFalse(ds.next()); } public void testFilterOnNumberColumn() throws Exception { CsvDataContext dc = new CsvDataContext(new File("src/test/resources/csv_people.csv")); Table table = dc.getDefaultSchema().getTables().get(0); Query q = dc.query().from(table).select("name").where("age").greaterThan(18).toQuery(); List<Object[]> result = dc.executeQuery(q).toObjectArrays(); assertEquals(2, result.size()); assertEquals("[michael]", Arrays.toString(result.get(0))); assertEquals("[hillary]", Arrays.toString(result.get(1))); } public void testGetFromInputStream() throws Exception { DataContext dc = null; // repeat this step a few times to test temp-file creation, see Ticket // #437 for (int i = 0; i < 5; i++) { File file = new File("src/test/resources/tickets.csv"); FileInputStream inputStream = new FileInputStream(file); dc = new CsvDataContext(inputStream, new CsvConfiguration()); } Schema schema = dc.getDefaultSchema(); String name = schema.getTable(0).getName(); assertTrue(name.startsWith("metamodel")); assertTrue(name.endsWith("csv")); // Test two seperate reads to ensure that the temp file is working // properly and persistent. doTicketFileTests(dc); doTicketFileTests(dc); } public void testMultilineExample() throws Exception { File file = new File("src/test/resources/tickets.csv"); DataContext dc = new CsvDataContext(file); Schema schema = dc.getDefaultSchema(); Table table = schema.getTableByName("tickets.csv"); Column descColumn = table.getColumnByName("_description"); assertNotNull(table); assertNotNull(descColumn); doTicketFileTests(dc); } public void doTicketFileTests(DataContext dc) { Table table = dc.getDefaultSchema().getTables().get(0); Query q = dc.query().from(table).select(table.getColumns()).toQuery(); DataSet dataSet = dc.executeQuery(q); List<Object[]> objectArrays = dataSet.toObjectArrays(); assertEquals(13, objectArrays.get(0).length); assertEquals(36, objectArrays.size()); assertEquals("2", objectArrays.get(0)[0].toString()); Object description = objectArrays.get(0)[11]; assertTrue(description instanceof String); assertEquals( "We should have a look at the Value Distribution and Time Analysis profiles. They consume very large amounts of memory because they basicly save all values in maps for analysis.\n" + "\n" + "One way of improving this could be through caching. Another way could be through more appropriate (less verbose) storing of intermediate data (this looks obvious in Time Analysis profile). A third way could be by letting the profiles create queries themselves (related to metadata profiling, #222).", (String) description); } public void testHighColumnNameLineNumber() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file, new CsvConfiguration(3)); assertEquals(2, dc.getSchemas().size()); Schema schema = dc.getDefaultSchema(); assertEquals("resources", schema.getName()); assertEquals(2, schema.getTableCount()); Table table = schema.getTables().get(0); assertEquals("csv_people.csv", table.getName()); assertEquals(4, table.getColumnCount()); assertEquals(0, table.getRelationshipCount()); Column[] columns = table.getColumns().toArray(new Column[0]); assertEquals("2", columns[0].getName()); assertEquals("michael", columns[1].getName()); assertEquals("male", columns[2].getName()); assertEquals("19", columns[3].getName()); Query query = dc.query().from(table).select(table.getColumnByName("michael")).toQuery(); DataSet dataSet = dc.executeQuery(query); assertTrue(dataSet.next()); assertEquals("peter", dataSet.getRow().getValue(0)); assertTrue(dataSet.next()); assertEquals("bob", dataSet.getRow().getValue(0)); assertTrue(dataSet.next()); assertEquals("barbara, barb", dataSet.getRow().getValue(0)); } public void testNoColumnNames() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file, new CsvConfiguration( CsvConfiguration.NO_COLUMN_NAME_LINE)); assertEquals(2, dc.getSchemas().size()); Schema schema = dc.getDefaultSchema(); assertEquals("resources", schema.getName()); assertEquals(2, schema.getTableCount()); Table table = schema.getTables().get(0); assertEquals("csv_people.csv", table.getName()); assertEquals(4, table.getColumnCount()); assertEquals(0, table.getRelationshipCount()); Column[] columns = table.getColumns().toArray(new Column[0]); assertEquals("A", columns[0].getName()); assertEquals("B", columns[1].getName()); assertEquals("C", columns[2].getName()); assertEquals("D", columns[3].getName()); Query query = dc.query().from(table).select(table.getColumnByName("B")).toQuery(); DataSet dataSet = dc.executeQuery(query); assertTrue(dataSet.next()); assertEquals("name", dataSet.getRow().getValue(0)); assertTrue(dataSet.next()); assertEquals("mike", dataSet.getRow().getValue(0)); assertTrue(dataSet.next()); assertEquals("michael", dataSet.getRow().getValue(0)); } public void testGetSchemas() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file); assertEquals(2, dc.getSchemas().size()); Schema schema = dc.getDefaultSchema(); assertEquals("resources", schema.getName()); assertEquals(2, schema.getTableCount()); Table table = schema.getTables().get(0); assertEquals("csv_people.csv", table.getName()); assertEquals(4, table.getColumnCount()); assertEquals(0, table.getRelationshipCount()); Column[] columns = table.getColumns().toArray(new Column[0]); assertEquals("id", columns[0].getName()); assertEquals("name", columns[1].getName()); assertEquals("gender", columns[2].getName()); assertEquals("age", columns[3].getName()); } public void testWhereItemNotInSelectClause() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file); Table table = dc.getDefaultSchema().getTableByName("csv_people.csv"); Query q = new Query(); q.from(table); q.where(new FilterItem(new SelectItem(table.getColumnByName("id")), OperatorType.EQUALS_TO, 1)); q.select(table.getColumnByName("name")); DataSet data = dc.executeQuery(q); assertTrue(data.next()); assertEquals("Row[values=[mike]]", data.getRow().toString()); assertFalse(data.next()); } public void testWhereColumnInValues() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file, new CsvConfiguration(1, true, true)); Table table = dc.getDefaultSchema().getTableByName("csv_people.csv"); Query q = dc.query().from(table).as("t").select("name").and("age").where("age").in("18", "20").toQuery(); assertEquals("SELECT t.name, t.age FROM resources.csv_people.csv t WHERE t.age IN ('18' , '20')", q.toSql()); DataSet ds = dc.executeQuery(q); assertTrue(ds.next()); assertEquals("Row[values=[mike, 18]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[peter, 18]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[barbara, barb, 18]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[charlotte, 18]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[hillary, 20]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); } public void testGroupByQuery() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/csv_people.csv")); Table table = dc.getDefaultSchema().getTableByName("csv_people.csv"); Query q = new Query(); q.from(table); q.groupBy(table.getColumnByName("gender")); q.select(new SelectItem(table.getColumnByName("gender")), new SelectItem(FunctionType.MAX, table.getColumnByName("age")), new SelectItem(FunctionType.MIN, table.getColumnByName("age")), new SelectItem(FunctionType.COUNT, "*", "total"), new SelectItem(FunctionType.MIN, table.getColumnByName("id")).setAlias("firstId")); DataSet data = dc.executeQuery(q); assertEquals( "[csv_people.csv.gender, MAX(csv_people.csv.age), MIN(csv_people.csv.age), COUNT(*) AS total, MIN(csv_people.csv.id) AS firstId]", Arrays.toString(data.getSelectItems().toArray())); String[] expectations = new String[] { "Row[values=[female, 20, 17, 5, 5]]", "Row[values=[male, 19, 17, 4, 1]]" }; assertTrue(data.next()); assertTrue(Arrays.asList(expectations).contains(data.getRow().toString())); assertTrue(data.next()); assertTrue(Arrays.asList(expectations).contains(data.getRow().toString())); assertFalse(data.next()); } public void testMaterializeTable() throws Exception { File file = new File("src/test/resources/csv_people.csv"); CsvDataContext dc = new CsvDataContext(file, new CsvConfiguration(1, false, false)); Table table = dc.getSchemas().get(0).getTables().get(0); DataSet dataSet = dc.materializeMainSchemaTable(table, table.getColumns(), -1); assertNull(dataSet.getRow()); assertTrue(dataSet.next()); assertEquals("Row[values=[1, mike, male, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertEquals("Row[values=[2, michael, male, 19]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertEquals("Row[values=[3, peter, male, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertEquals("Row[values=[5, barbara, barb, female, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertEquals("Row[values=[9, carrie, female, 17]]", dataSet.getRow().toString()); assertFalse(dataSet.next()); dataSet = dc.materializeMainSchemaTable(table, table.getColumns(), 1); assertTrue(dataSet.next()); assertEquals("Row[values=[1, mike, male, 18]]", dataSet.getRow().toString()); assertFalse(dataSet.next()); } public void testAlternativeDelimitors() throws Exception { File file = new File("src/test/resources/csv_semicolon_singlequote.csv"); CsvDataContext dc = new CsvDataContext(file, semicolonConfiguration); Table table = dc.getSchemas().get(0).getTables().get(0); DataSet dataSet = dc.materializeMainSchemaTable(table, table.getColumns(), -1); assertTrue(dataSet.next()); assertEquals("Row[values=[1, mike, male, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertEquals("Row[values=[2, michael, male, 19]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertEquals("Row[values=[3, peter, male, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertEquals("Row[values=[5, barbara; barb, female, 18]]", dataSet.getRow().toString()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertTrue(dataSet.next()); assertEquals("Row[values=[9, carrie, female, 17]]", dataSet.getRow().toString()); assertFalse(dataSet.next()); assertNull(dataSet.getRow()); } public void testMaxRows() throws Exception { File file = new File("src/test/resources/csv_semicolon_singlequote.csv"); CsvDataContext dc = new CsvDataContext(file, semicolonConfiguration); Table table = dc.getDefaultSchema().getTables().get(0); Query query = new Query().from(table).select(table.getColumns()).setMaxRows(5); DataSet dataSet = dc.executeQuery(query); TableModel tableModel = new DataSetTableModel(dataSet); assertEquals(5, tableModel.getRowCount()); } public void testQueryOnlyAggregate() throws Exception { File file = new File("src/test/resources/csv_people.csv"); QueryPostprocessDataContext dc = new CsvDataContext(file); Table table = dc.getDefaultSchema().getTables().get(0); Query q = new Query().selectCount().from(table); assertEquals("SELECT COUNT(*) FROM resources.csv_people.csv", q.toString()); List<Object[]> data = dc.executeQuery(q).toObjectArrays(); assertEquals(1, data.size()); Object[] row = data.get(0); assertEquals(1, row.length); assertEquals("[9]", Arrays.toString(row)); q.select(table.getColumns().get(0)); assertEquals("SELECT COUNT(*), csv_people.csv.id FROM resources.csv_people.csv", q.toString()); data = dc.executeQuery(q).toObjectArrays(); assertEquals(9, data.size()); row = data.get(0); assertEquals(2, row.length); assertEquals("[9, 1]", Arrays.toString(row)); row = data.get(1); assertEquals(2, row.length); assertEquals("[9, 2]", Arrays.toString(row)); row = data.get(2); assertEquals(2, row.length); assertEquals("[9, 3]", Arrays.toString(row)); row = data.get(8); assertEquals(2, row.length); assertEquals("[9, 9]", Arrays.toString(row)); } public void testOffsetAndMaxrows() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/csv_people.csv")); Table table = dc.getDefaultSchema().getTables().get(0); Query q = dc.query().from(table).select(table.getColumnByName("name")).toQuery(); q.setFirstRow(3); q.setMaxRows(2); DataSet ds; ds = dc.executeQuery(q); assertEquals(1, ds.getSelectItems().size()); assertTrue(ds.next()); assertEquals("peter", ds.getRow().getValue(0).toString()); assertTrue(ds.next()); assertEquals("bob", ds.getRow().getValue(0).toString()); assertFalse(ds.next()); ds.close(); // try with iterator ds = dc.executeQuery(q); int i = 0; for (Row row : ds) { assertNotNull(row); i++; } assertEquals(2, i); } public void testTruncateDeleteAllRecordsFromInconsistentFile() throws Exception { File file = new File("target/csv_delete_all_records.txt"); FileHelper.copy(new File("src/test/resources/csv_to_be_truncated.csv"), file); CsvDataContext dc = new CsvDataContext(file, new CsvConfiguration(1, "UTF8", ',', '"', '\\', true)); assertEquals("[id, name, gender, age]", Arrays.toString(dc.getDefaultSchema().getTable(0).getColumnNames().toArray())); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.deleteFrom("csv_delete_all_records.txt").execute(); } }); DataSet ds = dc.query().from("csv_delete_all_records.txt").selectCount().execute(); assertTrue(ds.next()); assertEquals(0, ((Number) ds.getRow().getValue(0)).intValue()); assertFalse(ds.next()); String fileAsString = FileHelper.readFileAsString(file); assertEquals("\"id\",\"name\",\"gender\",\"age\"", fileAsString); } public void testWriteSimpleTableInNewFile() throws Exception { final File file = new File("target/csv_write_ex1.txt"); file.delete(); assertFalse(file.exists()); CsvDataContext dc = new CsvDataContext(file); final Schema schema = dc.getDefaultSchema(); assertEquals(0, schema.getTableCount()); final MutableRef<Table> tableRef = new MutableRef<Table>(); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback cb) { Table table = cb.createTable(schema, "foobar").withColumn("foo").withColumn("bar").execute(); tableRef.set(table); assertEquals(schema.getTables().get(0), table); assertTrue(file.exists()); assertEquals("[foo, bar]", Arrays.toString(table.getColumnNames().toArray())); cb.insertInto(table).value(0, "f").value(1, "b").execute(); cb.insertInto(table).value(0, "o").value(table.getColumnByName("bar"), "a").execute(); cb.insertInto(table).value(0, "o").value("bar", "r").execute(); } }); // query the file to check results final Table readTable = schema.getTables().get(0); assertEquals(tableRef.get(), readTable); assertEquals("[foo, bar]", Arrays.toString(readTable.getColumnNames().toArray())); final Query query = dc.query().from(readTable).select("bar").and("foo").toQuery(); DataSet ds = dc.executeQuery(query); assertTrue(ds.next()); assertEquals("Row[values=[b, f]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[a, o]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[r, o]]", ds.getRow().toString()); assertFalse(ds.next()); // do the same trick on an existing file dc = new CsvDataContext(file); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback cb) { cb.insertInto(tableRef.get()).value("foo", "hello").value("bar", "world").execute(); } }); ds = dc.executeQuery(query); assertTrue(ds.next()); assertEquals("Row[values=[b, f]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[a, o]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[r, o]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[world, hello]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.deleteFrom(readTable).where("bar").eq("a").execute(); callback.deleteFrom(readTable).where("bar").eq("r").execute(); } }); ds = dc.executeQuery(query); assertTrue(ds.next()); assertEquals("Row[values=[b, f]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[world, hello]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.update(readTable).value("foo", "universe").execute(); callback.update(readTable).value("bar", "c").where("bar").isEquals("b").execute(); } }); ds = dc.executeQuery(query); assertTrue(ds.next()); assertEquals("Row[values=[world, universe]]", ds.getRow().toString()); assertTrue(ds.next()); assertEquals("Row[values=[c, universe]]", ds.getRow().toString()); assertFalse(ds.next()); ds.close(); // drop table dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback callback) { callback.dropTable(readTable).execute(); } }); assertFalse(file.exists()); } public void testOnlyNumberOneSymbol() throws Exception { DataContext dc = new CsvDataContext(new File("src/test/resources/csv_only_number_one.csv")); Map<Column, TypeConverter<?, ?>> converters = Converters.autoDetectConverters(dc, dc.getDefaultSchema() .getTables().get(0), 1000); assertEquals(1, converters.size()); assertEquals(StringToBooleanConverter.class, converters.values().iterator().next().getClass()); dc = Converters.addTypeConverters(dc, converters); Table table = dc.getDefaultSchema().getTables().get(0); MutableColumn col = (MutableColumn) table.getColumns().get(0); Query q = dc.query().from(table).select(col).toQuery(); assertEquals("SELECT csv_only_number_one.csv.number FROM resources.csv_only_number_one.csv", q.toSql()); DataSet ds = dc.executeQuery(q); while (ds.next()) { assertEquals(true, ds.getRow().getValue(0)); } ds.close(); dc = Converters.addTypeConverter(dc, col, new StringToIntegerConverter()); ds = dc.executeQuery(q); while (ds.next()) { assertEquals(1, ds.getRow().getValue(0)); } ds.close(); } public void testWriteOddConfiguration() throws Exception { final File file = new File("target/csv_write_ex2.txt"); file.delete(); assertFalse(file.exists()); final CsvDataContext dc = new CsvDataContext(file, new CsvConfiguration( CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, "UTF8", '|', '?', '!')); dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback cb) { Table table = cb.createTable(dc.getDefaultSchema(), "table").withColumn("id").withColumn("name") .execute(); cb.insertInto(table).value("id", 1).value("name", "Kasper").execute(); cb.insertInto(table).value("id", 2).value("name", "Kas|per?").execute(); } }); String[] lines = FileHelper.readFileAsString(file).split("\n"); assertEquals(3, lines.length); assertEquals("?id?|?name?", lines[0]); assertEquals("?1?|?Kasper?", lines[1]); assertEquals("?2?|?Kas|per!??", lines[2]); } public void testCannotWriteToReadOnly() throws Exception { final CsvDataContext dc = new CsvDataContext(new FileInputStream("src/test/resources/empty_file.csv"), new CsvConfiguration()); try { dc.executeUpdate(new UpdateScript() { @Override public void run(UpdateCallback cb) { cb.createTable(dc.getDefaultSchema(), "foo"); } }); fail("Exception expected"); } catch (IllegalStateException e) { assertEquals("This CSV DataContext is not writable, as it based on a read-only resource.", e.getMessage()); } // try { // dc.executeUpdate(new Update() { // @Override // public void run(UpdateCallback cb) { // cb.insertInto(dc.getDefaultSchema().getTables()[0]); // } // }); // fail("Exception expected"); // } catch (IllegalStateException e) { // assertEquals( // "This CSV DataContext is not writable, as it based on a read-only resource.", // e.getMessage()); // } } // public void testOnlyWriteToOwnSchemasAndTables() throws Exception { // CsvDataContext dc = new CsvDataContext(new File( // "src/test/resources/empty_file.csv"), new CsvConfiguration()); // try { // dc.executeUpdate(new Update() { // @Override // public void run(UpdateCallback cb) { // cb.createTable(new MutableSchema("bar"), "foo"); // } // ); // fail("Exception expected"); // } catch (IllegalArgumentException e) { // assertEquals("Not a valid CSV schema: Schema[name=bar]", // e.getMessage()); // } // // try { // dc.insertInto(new MutableTable("bla")); // fail("Exception expected"); // } catch (IllegalArgumentException e) { // assertEquals( // "Not a valid CSV table: Table[name=bla,type=null,remarks=null]", // e.getMessage()); // } // } public void testCustomColumnNames() throws Exception { final String firstColumnName = "first"; final String secondColumnName = "second"; final String thirdColumnName = "third"; final String fourthColumnName = "fourth"; final CsvConfiguration configuration = new CsvConfiguration(CsvConfiguration.DEFAULT_COLUMN_NAME_LINE, new CustomColumnNamingStrategy(firstColumnName, secondColumnName, thirdColumnName, fourthColumnName), FileHelper.DEFAULT_ENCODING, CsvConfiguration.DEFAULT_SEPARATOR_CHAR, CsvConfiguration.DEFAULT_QUOTE_CHAR, CsvConfiguration.DEFAULT_ESCAPE_CHAR, false, true); final DataContext dataContext = new CsvDataContext(new File("src/test/resources/csv_people.csv"), configuration); final Table table = dataContext.getDefaultSchema().getTable(0); assertNotNull(table.getColumnByName(firstColumnName)); assertNotNull(table.getColumnByName(secondColumnName)); assertNotNull(table.getColumnByName(thirdColumnName)); assertNotNull(table.getColumnByName(fourthColumnName)); } }
googleapis/google-cloud-java
37,269
java-service-usage/proto-google-cloud-service-usage-v1beta1/src/main/java/com/google/api/serviceusage/v1beta1/ListConsumerOverridesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/serviceusage/v1beta1/serviceusage.proto // Protobuf Java Version: 3.25.8 package com.google.api.serviceusage.v1beta1; /** * * * <pre> * Response message for ListConsumerOverrides. * </pre> * * Protobuf type {@code google.api.serviceusage.v1beta1.ListConsumerOverridesResponse} */ public final class ListConsumerOverridesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) ListConsumerOverridesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListConsumerOverridesResponse.newBuilder() to construct. private ListConsumerOverridesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListConsumerOverridesResponse() { overrides_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListConsumerOverridesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.serviceusage.v1beta1.ServiceUsageProto .internal_static_google_api_serviceusage_v1beta1_ListConsumerOverridesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.serviceusage.v1beta1.ServiceUsageProto .internal_static_google_api_serviceusage_v1beta1_ListConsumerOverridesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.class, com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.Builder.class); } public static final int OVERRIDES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_; /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ @java.lang.Override public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() { return overrides_; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder> getOverridesOrBuilderList() { return overrides_; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ @java.lang.Override public int getOverridesCount() { return overrides_.size(); } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ @java.lang.Override public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) { return overrides_.get(index); } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ @java.lang.Override public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder( int index) { return overrides_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < overrides_.size(); i++) { output.writeMessage(1, overrides_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < overrides_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, overrides_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse)) { return super.equals(obj); } com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse other = (com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) obj; if (!getOverridesList().equals(other.getOverridesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getOverridesCount() > 0) { hash = (37 * hash) + OVERRIDES_FIELD_NUMBER; hash = (53 * hash) + getOverridesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListConsumerOverrides. * </pre> * * Protobuf type {@code google.api.serviceusage.v1beta1.ListConsumerOverridesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.api.serviceusage.v1beta1.ServiceUsageProto .internal_static_google_api_serviceusage_v1beta1_ListConsumerOverridesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.api.serviceusage.v1beta1.ServiceUsageProto .internal_static_google_api_serviceusage_v1beta1_ListConsumerOverridesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.class, com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.Builder.class); } // Construct using // com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (overridesBuilder_ == null) { overrides_ = java.util.Collections.emptyList(); } else { overrides_ = null; overridesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.api.serviceusage.v1beta1.ServiceUsageProto .internal_static_google_api_serviceusage_v1beta1_ListConsumerOverridesResponse_descriptor; } @java.lang.Override public com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse getDefaultInstanceForType() { return com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.getDefaultInstance(); } @java.lang.Override public com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse build() { com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse buildPartial() { com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse result = new com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse result) { if (overridesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { overrides_ = java.util.Collections.unmodifiableList(overrides_); bitField0_ = (bitField0_ & ~0x00000001); } result.overrides_ = overrides_; } else { result.overrides_ = overridesBuilder_.build(); } } private void buildPartial0( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) { return mergeFrom((com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse other) { if (other == com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse.getDefaultInstance()) return this; if (overridesBuilder_ == null) { if (!other.overrides_.isEmpty()) { if (overrides_.isEmpty()) { overrides_ = other.overrides_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureOverridesIsMutable(); overrides_.addAll(other.overrides_); } onChanged(); } } else { if (!other.overrides_.isEmpty()) { if (overridesBuilder_.isEmpty()) { overridesBuilder_.dispose(); overridesBuilder_ = null; overrides_ = other.overrides_; bitField0_ = (bitField0_ & ~0x00000001); overridesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOverridesFieldBuilder() : null; } else { overridesBuilder_.addAllMessages(other.overrides_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.api.serviceusage.v1beta1.QuotaOverride m = input.readMessage( com.google.api.serviceusage.v1beta1.QuotaOverride.parser(), extensionRegistry); if (overridesBuilder_ == null) { ensureOverridesIsMutable(); overrides_.add(m); } else { overridesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> overrides_ = java.util.Collections.emptyList(); private void ensureOverridesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { overrides_ = new java.util.ArrayList<com.google.api.serviceusage.v1beta1.QuotaOverride>(overrides_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1beta1.QuotaOverride, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder, com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder> overridesBuilder_; /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride> getOverridesList() { if (overridesBuilder_ == null) { return java.util.Collections.unmodifiableList(overrides_); } else { return overridesBuilder_.getMessageList(); } } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public int getOverridesCount() { if (overridesBuilder_ == null) { return overrides_.size(); } else { return overridesBuilder_.getCount(); } } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public com.google.api.serviceusage.v1beta1.QuotaOverride getOverrides(int index) { if (overridesBuilder_ == null) { return overrides_.get(index); } else { return overridesBuilder_.getMessage(index); } } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder setOverrides( int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) { if (overridesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOverridesIsMutable(); overrides_.set(index, value); onChanged(); } else { overridesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder setOverrides( int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) { if (overridesBuilder_ == null) { ensureOverridesIsMutable(); overrides_.set(index, builderForValue.build()); onChanged(); } else { overridesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder addOverrides(com.google.api.serviceusage.v1beta1.QuotaOverride value) { if (overridesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOverridesIsMutable(); overrides_.add(value); onChanged(); } else { overridesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder addOverrides( int index, com.google.api.serviceusage.v1beta1.QuotaOverride value) { if (overridesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOverridesIsMutable(); overrides_.add(index, value); onChanged(); } else { overridesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder addOverrides( com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) { if (overridesBuilder_ == null) { ensureOverridesIsMutable(); overrides_.add(builderForValue.build()); onChanged(); } else { overridesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder addOverrides( int index, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder builderForValue) { if (overridesBuilder_ == null) { ensureOverridesIsMutable(); overrides_.add(index, builderForValue.build()); onChanged(); } else { overridesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder addAllOverrides( java.lang.Iterable<? extends com.google.api.serviceusage.v1beta1.QuotaOverride> values) { if (overridesBuilder_ == null) { ensureOverridesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, overrides_); onChanged(); } else { overridesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder clearOverrides() { if (overridesBuilder_ == null) { overrides_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { overridesBuilder_.clear(); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public Builder removeOverrides(int index) { if (overridesBuilder_ == null) { ensureOverridesIsMutable(); overrides_.remove(index); onChanged(); } else { overridesBuilder_.remove(index); } return this; } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder getOverridesBuilder( int index) { return getOverridesFieldBuilder().getBuilder(index); } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder getOverridesOrBuilder( int index) { if (overridesBuilder_ == null) { return overrides_.get(index); } else { return overridesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public java.util.List<? extends com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder> getOverridesOrBuilderList() { if (overridesBuilder_ != null) { return overridesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(overrides_); } } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder() { return getOverridesFieldBuilder() .addBuilder(com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance()); } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public com.google.api.serviceusage.v1beta1.QuotaOverride.Builder addOverridesBuilder( int index) { return getOverridesFieldBuilder() .addBuilder( index, com.google.api.serviceusage.v1beta1.QuotaOverride.getDefaultInstance()); } /** * * * <pre> * Consumer overrides on this limit. * </pre> * * <code>repeated .google.api.serviceusage.v1beta1.QuotaOverride overrides = 1;</code> */ public java.util.List<com.google.api.serviceusage.v1beta1.QuotaOverride.Builder> getOverridesBuilderList() { return getOverridesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1beta1.QuotaOverride, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder, com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder> getOverridesFieldBuilder() { if (overridesBuilder_ == null) { overridesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.api.serviceusage.v1beta1.QuotaOverride, com.google.api.serviceusage.v1beta1.QuotaOverride.Builder, com.google.api.serviceusage.v1beta1.QuotaOverrideOrBuilder>( overrides_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); overrides_ = null; } return overridesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token identifying which result to start with; returned by a previous list * call. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) } // @@protoc_insertion_point(class_scope:google.api.serviceusage.v1beta1.ListConsumerOverridesResponse) private static final com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse(); } public static com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListConsumerOverridesResponse> PARSER = new com.google.protobuf.AbstractParser<ListConsumerOverridesResponse>() { @java.lang.Override public ListConsumerOverridesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListConsumerOverridesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListConsumerOverridesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.api.serviceusage.v1beta1.ListConsumerOverridesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop-common
37,228
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2.hs; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic .NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.concurrent.atomic.AtomicInteger; import org.junit.Assert; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskID; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.jobhistory.EventReader; import org.apache.hadoop.mapreduce.jobhistory.HistoryEvent; import org.apache.hadoop.mapreduce.jobhistory.HistoryViewer; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo; import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo; import org.apache.hadoop.mapreduce.jobhistory.JobUnsuccessfulCompletionEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskFailedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskFinishedEvent; import org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobState; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl; import org.apache.hadoop.mapreduce.v2.app.MRApp; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.app.job.Task; import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.net.DNSToSwitchMapping; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.service.Service; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.util.RackResolver; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class TestJobHistoryParsing { private static final Log LOG = LogFactory.getLog(TestJobHistoryParsing.class); private static final String RACK_NAME = "/MyRackName"; private ByteArrayOutputStream outContent = new ByteArrayOutputStream(); public static class MyResolver implements DNSToSwitchMapping { @Override public List<String> resolve(List<String> names) { return Arrays.asList(new String[] { RACK_NAME }); } @Override public void reloadCachedMappings() { } @Override public void reloadCachedMappings(List<String> names) { } } @Test(timeout = 50000) public void testJobInfo() throws Exception { JobInfo info = new JobInfo(); Assert.assertEquals("NORMAL", info.getPriority()); info.printAll(); } @Test(timeout = 300000) public void testHistoryParsing() throws Exception { LOG.info("STARTING testHistoryParsing()"); try { checkHistoryParsing(2, 1, 2); } finally { LOG.info("FINISHED testHistoryParsing()"); } } @Test(timeout = 50000) public void testHistoryParsingWithParseErrors() throws Exception { LOG.info("STARTING testHistoryParsingWithParseErrors()"); try { checkHistoryParsing(3, 0, 2); } finally { LOG.info("FINISHED testHistoryParsingWithParseErrors()"); } } private static String getJobSummary(FileContext fc, Path path) throws IOException { Path qPath = fc.makeQualified(path); FSDataInputStream in = fc.open(qPath); String jobSummaryString = in.readUTF(); in.close(); return jobSummaryString; } private void checkHistoryParsing(final int numMaps, final int numReduces, final int numSuccessfulMaps) throws Exception { Configuration conf = new Configuration(); conf.set(MRJobConfig.USER_NAME, System.getProperty("user.name")); long amStartTimeEst = System.currentTimeMillis(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistory(numMaps, numReduces, true, this.getClass() .getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString()); app.waitForState(job, JobState.SUCCEEDED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); String jobhistoryDir = JobHistoryUtils .getHistoryIntermediateDoneDirForUser(conf); FileContext fc = null; try { fc = FileContext.getFileContext(conf); } catch (IOException ioe) { LOG.info("Can not get FileContext", ioe); throw (new Exception("Can not get File Context")); } if (numMaps == numSuccessfulMaps) { String summaryFileName = JobHistoryUtils .getIntermediateSummaryFileName(jobId); Path summaryFile = new Path(jobhistoryDir, summaryFileName); String jobSummaryString = getJobSummary(fc, summaryFile); Assert.assertNotNull(jobSummaryString); Assert.assertTrue(jobSummaryString.contains("resourcesPerMap=100")); Assert.assertTrue(jobSummaryString.contains("resourcesPerReduce=100")); Map<String, String> jobSummaryElements = new HashMap<String, String>(); StringTokenizer strToken = new StringTokenizer(jobSummaryString, ","); while (strToken.hasMoreTokens()) { String keypair = strToken.nextToken(); jobSummaryElements.put(keypair.split("=")[0], keypair.split("=")[1]); } Assert.assertEquals("JobId does not match", jobId.toString(), jobSummaryElements.get("jobId")); Assert.assertEquals("JobName does not match", "test", jobSummaryElements.get("jobName")); Assert.assertTrue("submitTime should not be 0", Long.parseLong(jobSummaryElements.get("submitTime")) != 0); Assert.assertTrue("launchTime should not be 0", Long.parseLong(jobSummaryElements.get("launchTime")) != 0); Assert .assertTrue( "firstMapTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements.get("firstMapTaskLaunchTime")) != 0); Assert .assertTrue("firstReduceTaskLaunchTime should not be 0", Long.parseLong(jobSummaryElements .get("firstReduceTaskLaunchTime")) != 0); Assert.assertTrue("finishTime should not be 0", Long.parseLong(jobSummaryElements.get("finishTime")) != 0); Assert.assertEquals("Mismatch in num map slots", numSuccessfulMaps, Integer.parseInt(jobSummaryElements.get("numMaps"))); Assert.assertEquals("Mismatch in num reduce slots", numReduces, Integer.parseInt(jobSummaryElements.get("numReduces"))); Assert.assertEquals("User does not match", System.getProperty("user.name"), jobSummaryElements.get("user")); Assert.assertEquals("Queue does not match", "default", jobSummaryElements.get("queue")); Assert.assertEquals("Status does not match", "SUCCEEDED", jobSummaryElements.get("status")); } JobHistory jobHistory = new JobHistory(); jobHistory.init(conf); HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId); JobInfo jobInfo; long numFinishedMaps; synchronized (fileInfo) { Path historyFilePath = fileInfo.getHistoryFile(); FSDataInputStream in = null; LOG.info("JobHistoryFile is: " + historyFilePath); try { in = fc.open(fc.makeQualified(historyFilePath)); } catch (IOException ioe) { LOG.info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } JobHistoryParser parser = new JobHistoryParser(in); final EventReader realReader = new EventReader(in); EventReader reader = Mockito.mock(EventReader.class); if (numMaps == numSuccessfulMaps) { reader = realReader; } else { final AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack! Mockito.when(reader.getNextEvent()).thenAnswer( new Answer<HistoryEvent>() { public HistoryEvent answer(InvocationOnMock invocation) throws IOException { HistoryEvent event = realReader.getNextEvent(); if (event instanceof TaskFinishedEvent) { numFinishedEvents.incrementAndGet(); } if (numFinishedEvents.get() <= numSuccessfulMaps) { return event; } else { throw new IOException("test"); } } }); } jobInfo = parser.parse(reader); numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps); if (numFinishedMaps != numMaps) { Exception parseException = parser.getParseException(); Assert.assertNotNull("Didn't get expected parse exception", parseException); } } Assert.assertEquals("Incorrect username ", System.getProperty("user.name"), jobInfo.getUsername()); Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname()); Assert.assertEquals("Incorrect queuename ", "default", jobInfo.getJobQueueName()); Assert .assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath()); Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps, numFinishedMaps); Assert.assertEquals("incorrect finishedReduces ", numReduces, jobInfo.getFinishedReduces()); Assert.assertEquals("incorrect uberized ", job.isUber(), jobInfo.getUberized()); Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks(); int totalTasks = allTasks.size(); Assert.assertEquals("total number of tasks is incorrect ", (numMaps + numReduces), totalTasks); // Verify aminfo Assert.assertEquals(1, jobInfo.getAMInfos().size()); Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0) .getNodeManagerHost()); AMInfo amInfo = jobInfo.getAMInfos().get(0); Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort()); Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort()); Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId()); Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId() .getApplicationAttemptId()); Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis() && amInfo.getStartTime() >= amStartTimeEst); ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1); // Assert at taskAttempt level for (TaskInfo taskInfo : allTasks.values()) { int taskAttemptCount = taskInfo.getAllTaskAttempts().size(); Assert .assertEquals("total number of task attempts ", 1, taskAttemptCount); TaskAttemptInfo taInfo = taskInfo.getAllTaskAttempts().values() .iterator().next(); Assert.assertNotNull(taInfo.getContainerId()); // Verify the wrong ctor is not being used. Remove after mrv1 is removed. Assert.assertFalse(taInfo.getContainerId().equals(fakeCid)); } // Deep compare Job and JobInfo for (Task task : job.getTasks().values()) { TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID())); Assert.assertNotNull("TaskInfo not found", taskInfo); for (TaskAttempt taskAttempt : task.getAttempts().values()) { TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get( TypeConverter.fromYarn((taskAttempt.getID()))); Assert.assertNotNull("TaskAttemptInfo not found", taskAttemptInfo); Assert.assertEquals("Incorrect shuffle port for task attempt", taskAttempt.getShufflePort(), taskAttemptInfo.getShufflePort()); if (numMaps == numSuccessfulMaps) { Assert.assertEquals(MRApp.NM_HOST, taskAttemptInfo.getHostname()); Assert.assertEquals(MRApp.NM_PORT, taskAttemptInfo.getPort()); // Verify rack-name Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME); } } } // test output for HistoryViewer PrintStream stdps = System.out; try { System.setOut(new PrintStream(outContent)); HistoryViewer viewer; synchronized (fileInfo) { viewer = new HistoryViewer(fc.makeQualified( fileInfo.getHistoryFile()).toString(), conf, true); } viewer.print(); for (TaskInfo taskInfo : allTasks.values()) { String test = (taskInfo.getTaskStatus() == null ? "" : taskInfo .getTaskStatus()) + " " + taskInfo.getTaskType() + " task list for " + taskInfo.getTaskId().getJobID(); Assert.assertTrue(outContent.toString().indexOf(test) > 0); Assert.assertTrue(outContent.toString().indexOf( taskInfo.getTaskId().toString()) > 0); } } finally { System.setOut(stdps); } } // Computes finished maps similar to RecoveryService... private long computeFinishedMaps(JobInfo jobInfo, int numMaps, int numSuccessfulMaps) { if (numMaps == numSuccessfulMaps) { return jobInfo.getFinishedMaps(); } long numFinishedMaps = 0; Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo .getAllTasks(); for (TaskInfo taskInfo : taskInfos.values()) { if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) { ++numFinishedMaps; } } return numFinishedMaps; } @Test(timeout = 30000) public void testHistoryParsingForFailedAttempts() throws Exception { LOG.info("STARTING testHistoryParsingForFailedAttempts"); try { Configuration conf = new Configuration(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistoryWithFailedAttempt(2, 1, true, this .getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); app.waitForState(job, JobState.SUCCEEDED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); JobHistory jobHistory = new JobHistory(); jobHistory.init(conf); HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId); JobHistoryParser parser; JobInfo jobInfo; synchronized (fileInfo) { Path historyFilePath = fileInfo.getHistoryFile(); FSDataInputStream in = null; FileContext fc = null; try { fc = FileContext.getFileContext(conf); in = fc.open(fc.makeQualified(historyFilePath)); } catch (IOException ioe) { LOG.info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } parser = new JobHistoryParser(in); jobInfo = parser.parse(); } Exception parseException = parser.getParseException(); Assert.assertNull("Caught an expected exception " + parseException, parseException); int noOffailedAttempts = 0; Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks(); for (Task task : job.getTasks().values()) { TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID())); for (TaskAttempt taskAttempt : task.getAttempts().values()) { TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get( TypeConverter.fromYarn((taskAttempt.getID()))); // Verify rack-name for all task attempts Assert.assertEquals("rack-name is incorrect", taskAttemptInfo.getRackname(), RACK_NAME); if (taskAttemptInfo.getTaskStatus().equals("FAILED")) { noOffailedAttempts++; } } } Assert.assertEquals("No of Failed tasks doesn't match.", 2, noOffailedAttempts); } finally { LOG.info("FINISHED testHistoryParsingForFailedAttempts"); } } @Test(timeout = 60000) public void testCountersForFailedTask() throws Exception { LOG.info("STARTING testCountersForFailedTask"); try { Configuration conf = new Configuration(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistoryWithFailedTask(2, 1, true, this .getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); app.waitForState(job, JobState.FAILED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); JobHistory jobHistory = new JobHistory(); jobHistory.init(conf); HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId); JobHistoryParser parser; JobInfo jobInfo; synchronized (fileInfo) { Path historyFilePath = fileInfo.getHistoryFile(); FSDataInputStream in = null; FileContext fc = null; try { fc = FileContext.getFileContext(conf); in = fc.open(fc.makeQualified(historyFilePath)); } catch (IOException ioe) { LOG.info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } parser = new JobHistoryParser(in); jobInfo = parser.parse(); } Exception parseException = parser.getParseException(); Assert.assertNull("Caught an expected exception " + parseException, parseException); for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) { TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey()); CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue()); Assert.assertNotNull("completed task report has null counters", ct .getReport().getCounters()); } final List<String> originalDiagnostics = job.getDiagnostics(); final String historyError = jobInfo.getErrorInfo(); assertTrue("No original diagnostics for a failed job", originalDiagnostics != null && !originalDiagnostics.isEmpty()); assertNotNull("No history error info for a failed job ", historyError); for (String diagString : originalDiagnostics) { assertTrue(historyError.contains(diagString)); } } finally { LOG.info("FINISHED testCountersForFailedTask"); } } @Test(timeout = 60000) public void testDiagnosticsForKilledJob() throws Exception { LOG.info("STARTING testDiagnosticsForKilledJob"); try { final Configuration conf = new Configuration(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistoryWithJobKilled(2, 1, true, this .getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); app.waitForState(job, JobState.KILLED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); JobHistory jobHistory = new JobHistory(); jobHistory.init(conf); HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId); JobHistoryParser parser; JobInfo jobInfo; synchronized (fileInfo) { Path historyFilePath = fileInfo.getHistoryFile(); FSDataInputStream in = null; FileContext fc = null; try { fc = FileContext.getFileContext(conf); in = fc.open(fc.makeQualified(historyFilePath)); } catch (IOException ioe) { LOG.info("Can not open history file: " + historyFilePath, ioe); throw (new Exception("Can not open History File")); } parser = new JobHistoryParser(in); jobInfo = parser.parse(); } Exception parseException = parser.getParseException(); assertNull("Caught an expected exception " + parseException, parseException); final List<String> originalDiagnostics = job.getDiagnostics(); final String historyError = jobInfo.getErrorInfo(); assertTrue("No original diagnostics for a failed job", originalDiagnostics != null && !originalDiagnostics.isEmpty()); assertNotNull("No history error info for a failed job ", historyError); for (String diagString : originalDiagnostics) { assertTrue(historyError.contains(diagString)); } assertTrue("No killed message in diagnostics", historyError.contains(JobImpl.JOB_KILLED_DIAG)); } finally { LOG.info("FINISHED testDiagnosticsForKilledJob"); } } @Test(timeout = 50000) public void testScanningOldDirs() throws Exception { LOG.info("STARTING testScanningOldDirs"); try { Configuration conf = new Configuration(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString()); app.waitForState(job, JobState.SUCCEEDED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); HistoryFileManagerForTest hfm = new HistoryFileManagerForTest(); hfm.init(conf); HistoryFileInfo fileInfo = hfm.getFileInfo(jobId); Assert.assertNotNull("Unable to locate job history", fileInfo); // force the manager to "forget" the job hfm.deleteJobFromJobListCache(fileInfo); final int msecPerSleep = 10; int msecToSleep = 10 * 1000; while (fileInfo.isMovePending() && msecToSleep > 0) { Assert.assertTrue(!fileInfo.didMoveFail()); msecToSleep -= msecPerSleep; Thread.sleep(msecPerSleep); } Assert.assertTrue("Timeout waiting for history move", msecToSleep > 0); fileInfo = hfm.getFileInfo(jobId); hfm.stop(); Assert.assertNotNull("Unable to locate old job history", fileInfo); Assert.assertTrue("HistoryFileManager not shutdown properly", hfm.moveToDoneExecutor.isTerminated()); } finally { LOG.info("FINISHED testScanningOldDirs"); } } static class MRAppWithHistoryWithFailedAttempt extends MRAppWithHistory { public MRAppWithHistoryWithFailedAttempt(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart) { super(maps, reduces, autoComplete, testName, cleanOnStart); } @SuppressWarnings("unchecked") @Override protected void attemptLaunched(TaskAttemptId attemptID) { if (attemptID.getTaskId().getId() == 0 && attemptID.getId() == 0) { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_FAILMSG)); } else { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); } } } static class MRAppWithHistoryWithFailedTask extends MRAppWithHistory { public MRAppWithHistoryWithFailedTask(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart) { super(maps, reduces, autoComplete, testName, cleanOnStart); } @SuppressWarnings("unchecked") @Override protected void attemptLaunched(TaskAttemptId attemptID) { if (attemptID.getTaskId().getId() == 0) { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_FAILMSG)); } else { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); } } } static class MRAppWithHistoryWithJobKilled extends MRAppWithHistory { public MRAppWithHistoryWithJobKilled(int maps, int reduces, boolean autoComplete, String testName, boolean cleanOnStart) { super(maps, reduces, autoComplete, testName, cleanOnStart); } @SuppressWarnings("unchecked") @Override protected void attemptLaunched(TaskAttemptId attemptID) { if (attemptID.getTaskId().getId() == 0) { getContext().getEventHandler().handle( new JobEvent(attemptID.getTaskId().getJobId(), JobEventType.JOB_KILL)); } else { getContext().getEventHandler().handle( new TaskAttemptEvent(attemptID, TaskAttemptEventType.TA_DONE)); } } } static class HistoryFileManagerForTest extends HistoryFileManager { void deleteJobFromJobListCache(HistoryFileInfo fileInfo) { jobListCache.delete(fileInfo); } } public static void main(String[] args) throws Exception { TestJobHistoryParsing t = new TestJobHistoryParsing(); t.testHistoryParsing(); t.testHistoryParsingForFailedAttempts(); } /** * Test clean old history files. Files should be deleted after 1 week by * default. */ @Test(timeout = 15000) public void testDeleteFileInfo() throws Exception { LOG.info("STARTING testDeleteFileInfo"); try { Configuration conf = new Configuration(); conf.setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(conf); MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true); app.submit(conf); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); app.waitForState(job, JobState.SUCCEEDED); // make sure all events are flushed app.waitForState(Service.STATE.STOPPED); HistoryFileManager hfm = new HistoryFileManager(); hfm.init(conf); HistoryFileInfo fileInfo = hfm.getFileInfo(jobId); hfm.initExisting(); // wait for move files form the done_intermediate directory to the gone // directory while (fileInfo.isMovePending()) { Thread.sleep(300); } Assert.assertNotNull(hfm.jobListCache.values()); // try to remove fileInfo hfm.clean(); // check that fileInfo does not deleted Assert.assertFalse(fileInfo.isDeleted()); // correct live time hfm.setMaxHistoryAge(-1); hfm.clean(); hfm.stop(); Assert.assertTrue("Thread pool shutdown", hfm.moveToDoneExecutor.isTerminated()); // should be deleted ! Assert.assertTrue("file should be deleted ", fileInfo.isDeleted()); } finally { LOG.info("FINISHED testDeleteFileInfo"); } } /** * Simple test some methods of JobHistory */ @Test(timeout = 20000) public void testJobHistoryMethods() throws Exception { LOG.info("STARTING testJobHistoryMethods"); try { Configuration configuration = new Configuration(); configuration .setClass( NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class); RackResolver.init(configuration); MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true); app.submit(configuration); Job job = app.getContext().getAllJobs().values().iterator().next(); JobId jobId = job.getID(); LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString()); app.waitForState(job, JobState.SUCCEEDED); JobHistory jobHistory = new JobHistory(); jobHistory.init(configuration); // Method getAllJobs Assert.assertEquals(1, jobHistory.getAllJobs().size()); // and with ApplicationId Assert.assertEquals(1, jobHistory.getAllJobs(app.getAppID()).size()); JobsInfo jobsinfo = jobHistory.getPartialJobs(0L, 10L, null, "default", 0L, System.currentTimeMillis() + 1, 0L, System.currentTimeMillis() + 1, JobState.SUCCEEDED); Assert.assertEquals(1, jobsinfo.getJobs().size()); Assert.assertNotNull(jobHistory.getApplicationAttemptId()); // test Application Id Assert.assertEquals("application_0_0000", jobHistory.getApplicationID() .toString()); Assert .assertEquals("Job History Server", jobHistory.getApplicationName()); // method does not work Assert.assertNull(jobHistory.getEventHandler()); // method does not work Assert.assertNull(jobHistory.getClock()); // method does not work Assert.assertNull(jobHistory.getClusterInfo()); } finally { LOG.info("FINISHED testJobHistoryMethods"); } } /** * Simple test PartialJob */ @Test(timeout = 1000) public void testPartialJob() throws Exception { JobId jobId = new JobIdPBImpl(); jobId.setId(0); JobIndexInfo jii = new JobIndexInfo(0L, System.currentTimeMillis(), "user", "jobName", jobId, 3, 2, "JobStatus"); PartialJob test = new PartialJob(jii, jobId); Assert.assertEquals(1.0f, test.getProgress(), 0.001f); assertNull(test.getAllCounters()); assertNull(test.getTasks()); assertNull(test.getTasks(TaskType.MAP)); assertNull(test.getTask(new TaskIdPBImpl())); assertNull(test.getTaskAttemptCompletionEvents(0, 100)); assertNull(test.getMapAttemptCompletionEvents(0, 100)); assertTrue(test.checkAccess(UserGroupInformation.getCurrentUser(), null)); assertNull(test.getAMInfos()); } @Test public void testMultipleFailedTasks() throws Exception { JobHistoryParser parser = new JobHistoryParser(Mockito.mock(FSDataInputStream.class)); EventReader reader = Mockito.mock(EventReader.class); final AtomicInteger numEventsRead = new AtomicInteger(0); // Hack! final org.apache.hadoop.mapreduce.TaskType taskType = org.apache.hadoop.mapreduce.TaskType.MAP; final TaskID[] tids = new TaskID[2]; final JobID jid = new JobID("1", 1); tids[0] = new TaskID(jid, taskType, 0); tids[1] = new TaskID(jid, taskType, 1); Mockito.when(reader.getNextEvent()).thenAnswer( new Answer<HistoryEvent>() { public HistoryEvent answer(InvocationOnMock invocation) throws IOException { // send two task start and two task fail events for tasks 0 and 1 int eventId = numEventsRead.getAndIncrement(); TaskID tid = tids[eventId & 0x1]; if (eventId < 2) { return new TaskStartedEvent(tid, 0, taskType, ""); } if (eventId < 4) { TaskFailedEvent tfe = new TaskFailedEvent(tid, 0, taskType, "failed", "FAILED", null, new Counters()); tfe.setDatum(tfe.getDatum()); return tfe; } if (eventId < 5) { JobUnsuccessfulCompletionEvent juce = new JobUnsuccessfulCompletionEvent(jid, 100L, 2, 0, "JOB_FAILED", Collections.singletonList( "Task failed: " + tids[0].toString())); return juce; } return null; } }); JobInfo info = parser.parse(reader); assertTrue("Task 0 not implicated", info.getErrorInfo().contains(tids[0].toString())); } @Test public void testFailedJobHistoryWithoutDiagnostics() throws Exception { final Path histPath = new Path(getClass().getClassLoader().getResource( "job_1393307629410_0001-1393307687476-user-Sleep+job-1393307723835-0-0-FAILED-default-1393307693920.jhist") .getFile()); final FileSystem lfs = FileSystem.getLocal(new Configuration()); final FSDataInputStream fsdis = lfs.open(histPath); try { JobHistoryParser parser = new JobHistoryParser(fsdis); JobInfo info = parser.parse(); assertEquals("History parsed jobId incorrectly", info.getJobId(), JobID.forName("job_1393307629410_0001") ); assertEquals("Default diagnostics incorrect ", "", info.getErrorInfo()); } finally { fsdis.close(); } } /** * Test compatibility of JobHistoryParser with 2.0.3-alpha history files * @throws IOException */ @Test public void testTaskAttemptUnsuccessfulCompletionWithoutCounters203() throws IOException { Path histPath = new Path(getClass().getClassLoader().getResource( "job_2.0.3-alpha-FAILED.jhist").getFile()); JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal (new Configuration()), histPath); JobInfo jobInfo = parser.parse(); LOG.info(" job info: " + jobInfo.getJobname() + " " + jobInfo.getFinishedMaps() + " " + jobInfo.getTotalMaps() + " " + jobInfo.getJobId() ) ; } /** * Test compatibility of JobHistoryParser with 2.4.0 history files * @throws IOException */ @Test public void testTaskAttemptUnsuccessfulCompletionWithoutCounters240() throws IOException { Path histPath = new Path(getClass().getClassLoader().getResource( "job_2.4.0-FAILED.jhist").getFile()); JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal (new Configuration()), histPath); JobInfo jobInfo = parser.parse(); LOG.info(" job info: " + jobInfo.getJobname() + " " + jobInfo.getFinishedMaps() + " " + jobInfo.getTotalMaps() + " " + jobInfo.getJobId() ); } /** * Test compatibility of JobHistoryParser with 0.23.9 history files * @throws IOException */ @Test public void testTaskAttemptUnsuccessfulCompletionWithoutCounters0239() throws IOException { Path histPath = new Path(getClass().getClassLoader().getResource( "job_0.23.9-FAILED.jhist").getFile()); JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal (new Configuration()), histPath); JobInfo jobInfo = parser.parse(); LOG.info(" job info: " + jobInfo.getJobname() + " " + jobInfo.getFinishedMaps() + " " + jobInfo.getTotalMaps() + " " + jobInfo.getJobId() ) ; } }
googlearchive/abelana
37,760
Android/app/src/main/java/com/examples/abelanav2/grpc/AbelanaGrpc.java
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.examples.abelanav2.grpc; import static io.grpc.stub.Calls.createMethodDescriptor; import static io.grpc.stub.Calls.asyncUnaryCall; import static io.grpc.stub.Calls.asyncServerStreamingCall; import static io.grpc.stub.Calls.asyncClientStreamingCall; import static io.grpc.stub.Calls.duplexStreamingCall; import static io.grpc.stub.Calls.blockingUnaryCall; import static io.grpc.stub.Calls.blockingServerStreamingCall; import static io.grpc.stub.Calls.unaryFutureCall; import static io.grpc.stub.ServerCalls.createMethodDefinition; import static io.grpc.stub.ServerCalls.asyncUnaryRequestCall; import static io.grpc.stub.ServerCalls.asyncStreamingRequestCall; import java.io.IOException; @javax.annotation.Generated("by gRPC proto compiler") public class AbelanaGrpc { private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.SignInRequest, com.examples.abelanav2.grpc.SignInResponse> METHOD_SIGN_IN = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "SignIn", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.SignInRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.SignInRequest>() { @Override public com.examples.abelanav2.grpc.SignInRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.SignInRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.SignInResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.SignInResponse>() { @Override public com.examples.abelanav2.grpc.SignInResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.SignInResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> METHOD_PHOTO_STREAM = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "PhotoStream", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListRequest>() { @Override public com.examples.abelanav2.grpc.PhotoListRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListResponse>() { @Override public com.examples.abelanav2.grpc.PhotoListResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.FlagRequest, com.examples.abelanav2.grpc.StatusResponse> METHOD_FLAG_PHOTO = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "FlagPhoto", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.FlagRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.FlagRequest>() { @Override public com.examples.abelanav2.grpc.FlagRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.FlagRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.StatusResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.StatusResponse>() { @Override public com.examples.abelanav2.grpc.StatusResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.StatusResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> METHOD_LIST_MY_PHOTOS = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "ListMyPhotos", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListRequest>() { @Override public com.examples.abelanav2.grpc.PhotoListRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListResponse>() { @Override public com.examples.abelanav2.grpc.PhotoListResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.NewPhotoRequest, com.examples.abelanav2.grpc.UploadPhotoResponse> METHOD_UPLOAD_PHOTO = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "UploadPhoto", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.NewPhotoRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.NewPhotoRequest>() { @Override public com.examples.abelanav2.grpc.NewPhotoRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.NewPhotoRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.UploadPhotoResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.UploadPhotoResponse>() { @Override public com.examples.abelanav2.grpc.UploadPhotoResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.UploadPhotoResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.EditPhotoRequest, com.examples.abelanav2.grpc.StatusResponse> METHOD_EDIT_PHOTO = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "EditPhoto", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.EditPhotoRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.EditPhotoRequest>() { @Override public com.examples.abelanav2.grpc.EditPhotoRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.EditPhotoRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.StatusResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.StatusResponse>() { @Override public com.examples.abelanav2.grpc.StatusResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.StatusResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.DeletePhotoRequest, com.examples.abelanav2.grpc.StatusResponse> METHOD_DELETE_PHOTO = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "DeletePhoto", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.DeletePhotoRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.DeletePhotoRequest>() { @Override public com.examples.abelanav2.grpc.DeletePhotoRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.DeletePhotoRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.StatusResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.StatusResponse>() { @Override public com.examples.abelanav2.grpc.StatusResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.StatusResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> METHOD_LIST_MY_LIKES = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "ListMyLikes", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListRequest>() { @Override public com.examples.abelanav2.grpc.PhotoListRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.PhotoListResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.PhotoListResponse>() { @Override public com.examples.abelanav2.grpc.PhotoListResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.PhotoListResponse.parseFrom(input); } })); private static final io.grpc.stub.Method<com.examples.abelanav2.grpc.VoteRequest, com.examples.abelanav2.grpc.StatusResponse> METHOD_RATE_PHOTO = io.grpc.stub.Method.create( io.grpc.MethodType.UNARY, "RatePhoto", io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.VoteRequest>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.VoteRequest>() { @Override public com.examples.abelanav2.grpc.VoteRequest parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.VoteRequest.parseFrom(input); } }), io.grpc.protobuf.nano.NanoUtils.<com.examples.abelanav2.grpc.StatusResponse>marshaller( new io.grpc.protobuf.nano.Parser<com.examples.abelanav2.grpc.StatusResponse>() { @Override public com.examples.abelanav2.grpc.StatusResponse parse(com.google.protobuf.nano.CodedInputByteBufferNano input) throws IOException { return com.examples.abelanav2.grpc.StatusResponse.parseFrom(input); } })); public static AbelanaStub newStub(io.grpc.Channel channel) { return new AbelanaStub(channel, CONFIG); } public static AbelanaBlockingStub newBlockingStub( io.grpc.Channel channel) { return new AbelanaBlockingStub(channel, CONFIG); } public static AbelanaFutureStub newFutureStub( io.grpc.Channel channel) { return new AbelanaFutureStub(channel, CONFIG); } public static final AbelanaServiceDescriptor CONFIG = new AbelanaServiceDescriptor(); @javax.annotation.concurrent.Immutable public static class AbelanaServiceDescriptor extends io.grpc.stub.AbstractServiceDescriptor<AbelanaServiceDescriptor> { public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.SignInRequest, com.examples.abelanav2.grpc.SignInResponse> signIn; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> photoStream; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.FlagRequest, com.examples.abelanav2.grpc.StatusResponse> flagPhoto; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> listMyPhotos; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.NewPhotoRequest, com.examples.abelanav2.grpc.UploadPhotoResponse> uploadPhoto; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.EditPhotoRequest, com.examples.abelanav2.grpc.StatusResponse> editPhoto; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.DeletePhotoRequest, com.examples.abelanav2.grpc.StatusResponse> deletePhoto; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse> listMyLikes; public final io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.VoteRequest, com.examples.abelanav2.grpc.StatusResponse> ratePhoto; private AbelanaServiceDescriptor() { signIn = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_SIGN_IN); photoStream = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_PHOTO_STREAM); flagPhoto = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_FLAG_PHOTO); listMyPhotos = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_LIST_MY_PHOTOS); uploadPhoto = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_UPLOAD_PHOTO); editPhoto = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_EDIT_PHOTO); deletePhoto = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_DELETE_PHOTO); listMyLikes = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_LIST_MY_LIKES); ratePhoto = createMethodDescriptor( "abelanav2.grpc.Abelana", METHOD_RATE_PHOTO); } @SuppressWarnings("unchecked") private AbelanaServiceDescriptor( java.util.Map<java.lang.String, io.grpc.MethodDescriptor<?, ?>> methodMap) { signIn = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.SignInRequest, com.examples.abelanav2.grpc.SignInResponse>) methodMap.get( CONFIG.signIn.getName()); photoStream = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>) methodMap.get( CONFIG.photoStream.getName()); flagPhoto = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.FlagRequest, com.examples.abelanav2.grpc.StatusResponse>) methodMap.get( CONFIG.flagPhoto.getName()); listMyPhotos = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>) methodMap.get( CONFIG.listMyPhotos.getName()); uploadPhoto = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.NewPhotoRequest, com.examples.abelanav2.grpc.UploadPhotoResponse>) methodMap.get( CONFIG.uploadPhoto.getName()); editPhoto = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.EditPhotoRequest, com.examples.abelanav2.grpc.StatusResponse>) methodMap.get( CONFIG.editPhoto.getName()); deletePhoto = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.DeletePhotoRequest, com.examples.abelanav2.grpc.StatusResponse>) methodMap.get( CONFIG.deletePhoto.getName()); listMyLikes = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>) methodMap.get( CONFIG.listMyLikes.getName()); ratePhoto = (io.grpc.MethodDescriptor<com.examples.abelanav2.grpc.VoteRequest, com.examples.abelanav2.grpc.StatusResponse>) methodMap.get( CONFIG.ratePhoto.getName()); } @java.lang.Override protected AbelanaServiceDescriptor build( java.util.Map<java.lang.String, io.grpc.MethodDescriptor<?, ?>> methodMap) { return new AbelanaServiceDescriptor(methodMap); } @java.lang.Override public com.google.common.collect.ImmutableList<io.grpc.MethodDescriptor<?, ?>> methods() { return com.google.common.collect.ImmutableList.<io.grpc.MethodDescriptor<?, ?>>of( signIn, photoStream, flagPhoto, listMyPhotos, uploadPhoto, editPhoto, deletePhoto, listMyLikes, ratePhoto); } } public static interface Abelana { public void signIn(com.examples.abelanav2.grpc.SignInRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.SignInResponse> responseObserver); public void photoStream(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver); public void flagPhoto(com.examples.abelanav2.grpc.FlagRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver); public void listMyPhotos(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver); public void uploadPhoto(com.examples.abelanav2.grpc.NewPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.UploadPhotoResponse> responseObserver); public void editPhoto(com.examples.abelanav2.grpc.EditPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver); public void deletePhoto(com.examples.abelanav2.grpc.DeletePhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver); public void listMyLikes(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver); public void ratePhoto(com.examples.abelanav2.grpc.VoteRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver); } public static interface AbelanaBlockingClient { public com.examples.abelanav2.grpc.SignInResponse signIn(com.examples.abelanav2.grpc.SignInRequest request); public com.examples.abelanav2.grpc.PhotoListResponse photoStream(com.examples.abelanav2.grpc.PhotoListRequest request); public com.examples.abelanav2.grpc.StatusResponse flagPhoto(com.examples.abelanav2.grpc.FlagRequest request); public com.examples.abelanav2.grpc.PhotoListResponse listMyPhotos(com.examples.abelanav2.grpc.PhotoListRequest request); public com.examples.abelanav2.grpc.UploadPhotoResponse uploadPhoto(com.examples.abelanav2.grpc.NewPhotoRequest request); public com.examples.abelanav2.grpc.StatusResponse editPhoto(com.examples.abelanav2.grpc.EditPhotoRequest request); public com.examples.abelanav2.grpc.StatusResponse deletePhoto(com.examples.abelanav2.grpc.DeletePhotoRequest request); public com.examples.abelanav2.grpc.PhotoListResponse listMyLikes(com.examples.abelanav2.grpc.PhotoListRequest request); public com.examples.abelanav2.grpc.StatusResponse ratePhoto(com.examples.abelanav2.grpc.VoteRequest request); } public static interface AbelanaFutureClient { public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.SignInResponse> signIn( com.examples.abelanav2.grpc.SignInRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> photoStream( com.examples.abelanav2.grpc.PhotoListRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> flagPhoto( com.examples.abelanav2.grpc.FlagRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> listMyPhotos( com.examples.abelanav2.grpc.PhotoListRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.UploadPhotoResponse> uploadPhoto( com.examples.abelanav2.grpc.NewPhotoRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> editPhoto( com.examples.abelanav2.grpc.EditPhotoRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> deletePhoto( com.examples.abelanav2.grpc.DeletePhotoRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> listMyLikes( com.examples.abelanav2.grpc.PhotoListRequest request); public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> ratePhoto( com.examples.abelanav2.grpc.VoteRequest request); } public static class AbelanaStub extends io.grpc.stub.AbstractStub<AbelanaStub, AbelanaServiceDescriptor> implements Abelana { private AbelanaStub(io.grpc.Channel channel, AbelanaServiceDescriptor config) { super(channel, config); } @java.lang.Override protected AbelanaStub build(io.grpc.Channel channel, AbelanaServiceDescriptor config) { return new AbelanaStub(channel, config); } @java.lang.Override public void signIn(com.examples.abelanav2.grpc.SignInRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.SignInResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.signIn), request, responseObserver); } @java.lang.Override public void photoStream(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.photoStream), request, responseObserver); } @java.lang.Override public void flagPhoto(com.examples.abelanav2.grpc.FlagRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.flagPhoto), request, responseObserver); } @java.lang.Override public void listMyPhotos(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.listMyPhotos), request, responseObserver); } @java.lang.Override public void uploadPhoto(com.examples.abelanav2.grpc.NewPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.UploadPhotoResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.uploadPhoto), request, responseObserver); } @java.lang.Override public void editPhoto(com.examples.abelanav2.grpc.EditPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.editPhoto), request, responseObserver); } @java.lang.Override public void deletePhoto(com.examples.abelanav2.grpc.DeletePhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.deletePhoto), request, responseObserver); } @java.lang.Override public void listMyLikes(com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.listMyLikes), request, responseObserver); } @java.lang.Override public void ratePhoto(com.examples.abelanav2.grpc.VoteRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { asyncUnaryCall( channel.newCall(config.ratePhoto), request, responseObserver); } } public static class AbelanaBlockingStub extends io.grpc.stub.AbstractStub<AbelanaBlockingStub, AbelanaServiceDescriptor> implements AbelanaBlockingClient { private AbelanaBlockingStub(io.grpc.Channel channel, AbelanaServiceDescriptor config) { super(channel, config); } @java.lang.Override protected AbelanaBlockingStub build(io.grpc.Channel channel, AbelanaServiceDescriptor config) { return new AbelanaBlockingStub(channel, config); } @java.lang.Override public com.examples.abelanav2.grpc.SignInResponse signIn(com.examples.abelanav2.grpc.SignInRequest request) { return blockingUnaryCall( channel.newCall(config.signIn), request); } @java.lang.Override public com.examples.abelanav2.grpc.PhotoListResponse photoStream(com.examples.abelanav2.grpc.PhotoListRequest request) { return blockingUnaryCall( channel.newCall(config.photoStream), request); } @java.lang.Override public com.examples.abelanav2.grpc.StatusResponse flagPhoto(com.examples.abelanav2.grpc.FlagRequest request) { return blockingUnaryCall( channel.newCall(config.flagPhoto), request); } @java.lang.Override public com.examples.abelanav2.grpc.PhotoListResponse listMyPhotos(com.examples.abelanav2.grpc.PhotoListRequest request) { return blockingUnaryCall( channel.newCall(config.listMyPhotos), request); } @java.lang.Override public com.examples.abelanav2.grpc.UploadPhotoResponse uploadPhoto(com.examples.abelanav2.grpc.NewPhotoRequest request) { return blockingUnaryCall( channel.newCall(config.uploadPhoto), request); } @java.lang.Override public com.examples.abelanav2.grpc.StatusResponse editPhoto(com.examples.abelanav2.grpc.EditPhotoRequest request) { return blockingUnaryCall( channel.newCall(config.editPhoto), request); } @java.lang.Override public com.examples.abelanav2.grpc.StatusResponse deletePhoto(com.examples.abelanav2.grpc.DeletePhotoRequest request) { return blockingUnaryCall( channel.newCall(config.deletePhoto), request); } @java.lang.Override public com.examples.abelanav2.grpc.PhotoListResponse listMyLikes(com.examples.abelanav2.grpc.PhotoListRequest request) { return blockingUnaryCall( channel.newCall(config.listMyLikes), request); } @java.lang.Override public com.examples.abelanav2.grpc.StatusResponse ratePhoto(com.examples.abelanav2.grpc.VoteRequest request) { return blockingUnaryCall( channel.newCall(config.ratePhoto), request); } } public static class AbelanaFutureStub extends io.grpc.stub.AbstractStub<AbelanaFutureStub, AbelanaServiceDescriptor> implements AbelanaFutureClient { private AbelanaFutureStub(io.grpc.Channel channel, AbelanaServiceDescriptor config) { super(channel, config); } @java.lang.Override protected AbelanaFutureStub build(io.grpc.Channel channel, AbelanaServiceDescriptor config) { return new AbelanaFutureStub(channel, config); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.SignInResponse> signIn( com.examples.abelanav2.grpc.SignInRequest request) { return unaryFutureCall( channel.newCall(config.signIn), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> photoStream( com.examples.abelanav2.grpc.PhotoListRequest request) { return unaryFutureCall( channel.newCall(config.photoStream), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> flagPhoto( com.examples.abelanav2.grpc.FlagRequest request) { return unaryFutureCall( channel.newCall(config.flagPhoto), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> listMyPhotos( com.examples.abelanav2.grpc.PhotoListRequest request) { return unaryFutureCall( channel.newCall(config.listMyPhotos), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.UploadPhotoResponse> uploadPhoto( com.examples.abelanav2.grpc.NewPhotoRequest request) { return unaryFutureCall( channel.newCall(config.uploadPhoto), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> editPhoto( com.examples.abelanav2.grpc.EditPhotoRequest request) { return unaryFutureCall( channel.newCall(config.editPhoto), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> deletePhoto( com.examples.abelanav2.grpc.DeletePhotoRequest request) { return unaryFutureCall( channel.newCall(config.deletePhoto), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.PhotoListResponse> listMyLikes( com.examples.abelanav2.grpc.PhotoListRequest request) { return unaryFutureCall( channel.newCall(config.listMyLikes), request); } @java.lang.Override public com.google.common.util.concurrent.ListenableFuture<com.examples.abelanav2.grpc.StatusResponse> ratePhoto( com.examples.abelanav2.grpc.VoteRequest request) { return unaryFutureCall( channel.newCall(config.ratePhoto), request); } } public static io.grpc.ServerServiceDefinition bindService( final Abelana serviceImpl) { return io.grpc.ServerServiceDefinition.builder("abelanav2.grpc.Abelana") .addMethod(createMethodDefinition( METHOD_SIGN_IN, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.SignInRequest, com.examples.abelanav2.grpc.SignInResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.SignInRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.SignInResponse> responseObserver) { serviceImpl.signIn(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_PHOTO_STREAM, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { serviceImpl.photoStream(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_FLAG_PHOTO, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.FlagRequest, com.examples.abelanav2.grpc.StatusResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.FlagRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { serviceImpl.flagPhoto(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_LIST_MY_PHOTOS, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { serviceImpl.listMyPhotos(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_UPLOAD_PHOTO, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.NewPhotoRequest, com.examples.abelanav2.grpc.UploadPhotoResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.NewPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.UploadPhotoResponse> responseObserver) { serviceImpl.uploadPhoto(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_EDIT_PHOTO, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.EditPhotoRequest, com.examples.abelanav2.grpc.StatusResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.EditPhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { serviceImpl.editPhoto(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_DELETE_PHOTO, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.DeletePhotoRequest, com.examples.abelanav2.grpc.StatusResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.DeletePhotoRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { serviceImpl.deletePhoto(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_LIST_MY_LIKES, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.PhotoListRequest, com.examples.abelanav2.grpc.PhotoListResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.PhotoListRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.PhotoListResponse> responseObserver) { serviceImpl.listMyLikes(request, responseObserver); } }))) .addMethod(createMethodDefinition( METHOD_RATE_PHOTO, asyncUnaryRequestCall( new io.grpc.stub.ServerCalls.UnaryRequestMethod< com.examples.abelanav2.grpc.VoteRequest, com.examples.abelanav2.grpc.StatusResponse>() { @java.lang.Override public void invoke( com.examples.abelanav2.grpc.VoteRequest request, io.grpc.stub.StreamObserver<com.examples.abelanav2.grpc.StatusResponse> responseObserver) { serviceImpl.ratePhoto(request, responseObserver); } }))).build(); } }
google/error-prone
37,518
core/src/main/java/com/google/errorprone/refaster/UTemplater.java
/* * Copyright 2013 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.refaster; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.errorprone.util.ASTHelpers.hasAnnotation; import static com.google.errorprone.util.ASTHelpers.isStatic; import static com.google.errorprone.util.AnnotationNames.REPEATED_ANNOTATION; import com.google.common.collect.ImmutableClassToInstanceMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.reflect.TypeToken; import com.google.errorprone.SubContext; import com.google.errorprone.VisitorState; import com.google.errorprone.matchers.Matcher; import com.google.errorprone.refaster.annotation.Matches; import com.google.errorprone.refaster.annotation.NotMatches; import com.google.errorprone.refaster.annotation.OfKind; import com.google.errorprone.refaster.annotation.Repeated; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.AnnotatedTypeTree; import com.sun.source.tree.AnnotationTree; import com.sun.source.tree.ArrayAccessTree; import com.sun.source.tree.ArrayTypeTree; import com.sun.source.tree.AssertTree; import com.sun.source.tree.AssignmentTree; import com.sun.source.tree.BinaryTree; import com.sun.source.tree.BlockTree; import com.sun.source.tree.BreakTree; import com.sun.source.tree.CatchTree; import com.sun.source.tree.ClassTree; import com.sun.source.tree.CompoundAssignmentTree; import com.sun.source.tree.ConditionalExpressionTree; import com.sun.source.tree.ContinueTree; import com.sun.source.tree.DoWhileLoopTree; import com.sun.source.tree.EmptyStatementTree; import com.sun.source.tree.EnhancedForLoopTree; import com.sun.source.tree.ExpressionStatementTree; import com.sun.source.tree.ExpressionTree; import com.sun.source.tree.ForLoopTree; import com.sun.source.tree.IdentifierTree; import com.sun.source.tree.IfTree; import com.sun.source.tree.InstanceOfTree; import com.sun.source.tree.IntersectionTypeTree; import com.sun.source.tree.LabeledStatementTree; import com.sun.source.tree.LambdaExpressionTree; import com.sun.source.tree.LiteralTree; import com.sun.source.tree.MemberReferenceTree; import com.sun.source.tree.MemberSelectTree; import com.sun.source.tree.MethodInvocationTree; import com.sun.source.tree.MethodTree; import com.sun.source.tree.ModifiersTree; import com.sun.source.tree.NewArrayTree; import com.sun.source.tree.NewClassTree; import com.sun.source.tree.ParameterizedTypeTree; import com.sun.source.tree.ParenthesizedTree; import com.sun.source.tree.PrimitiveTypeTree; import com.sun.source.tree.ReturnTree; import com.sun.source.tree.StatementTree; import com.sun.source.tree.SynchronizedTree; import com.sun.source.tree.ThrowTree; import com.sun.source.tree.Tree; import com.sun.source.tree.Tree.Kind; import com.sun.source.tree.TryTree; import com.sun.source.tree.TypeCastTree; import com.sun.source.tree.TypeParameterTree; import com.sun.source.tree.UnaryTree; import com.sun.source.tree.UnionTypeTree; import com.sun.source.tree.VariableTree; import com.sun.source.tree.WhileLoopTree; import com.sun.source.tree.WildcardTree; import com.sun.source.util.SimpleTreeVisitor; import com.sun.tools.javac.code.Attribute.Compound; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.MethodSymbol; import com.sun.tools.javac.code.Symbol.TypeSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.code.Type.ArrayType; import com.sun.tools.javac.code.Type.ClassType; import com.sun.tools.javac.code.Type.ForAll; import com.sun.tools.javac.code.Type.IntersectionClassType; import com.sun.tools.javac.code.Type.MethodType; import com.sun.tools.javac.code.Type.TypeVar; import com.sun.tools.javac.code.Type.WildcardType; import com.sun.tools.javac.code.Types; import com.sun.tools.javac.model.AnnotationProxyMaker; import com.sun.tools.javac.tree.JCTree; import com.sun.tools.javac.tree.JCTree.JCLambda; import com.sun.tools.javac.tree.JCTree.JCModifiers; import com.sun.tools.javac.tree.JCTree.JCPrimitiveTypeTree; import com.sun.tools.javac.util.Context; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.lang.model.element.ElementKind; import javax.lang.model.element.TypeElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.MirroredTypeException; import org.jspecify.annotations.Nullable; /** * Converts a type-checked syntax tree to a portable {@code UTree} template. * * @author lowasser@google.com (Louis Wasserman) */ public class UTemplater extends SimpleTreeVisitor<Tree, Void> { /** * Context key to indicate that templates should be treated as BlockTemplates, regardless of their * structure. */ public static final Context.Key<Boolean> REQUIRE_BLOCK_KEY = new Context.Key<>(); /** * Returns a template based on a method. One-line methods starting with a {@code return} statement * are guessed to be expression templates, and all other methods are guessed to be block * templates. */ public static Template<?> createTemplate(Context context, MethodTree decl) { MethodSymbol declSym = ASTHelpers.getSymbol(decl); ImmutableClassToInstanceMap<Annotation> annotations = UTemplater.annotationMap(declSym); ImmutableMap<String, VarSymbol> freeExpressionVars = freeExpressionVariables(decl); Context subContext = new SubContext(context); UTemplater templater = new UTemplater(freeExpressionVars, subContext); ImmutableMap<String, UType> expressionVarTypes = ImmutableMap.copyOf( Maps.transformValues( freeExpressionVars, (VarSymbol sym) -> templater.template(sym.type))); UType genericType = templater.template(declSym.type); ImmutableList<UTypeVar> typeParameters; UMethodType methodType; if (genericType instanceof UForAll forAllType) { typeParameters = forAllType.getTypeVars(); methodType = (UMethodType) forAllType.getQuantifiedType(); } else if (genericType instanceof UMethodType uMethodType) { typeParameters = ImmutableList.of(); methodType = uMethodType; } else { throw new IllegalArgumentException( "Expected genericType to be either a ForAll or a UMethodType, but was " + genericType); } List<? extends StatementTree> bodyStatements = decl.getBody().getStatements(); if (bodyStatements.size() == 1 && Iterables.getOnlyElement(bodyStatements) instanceof ReturnTree returnTree && context.get(REQUIRE_BLOCK_KEY) == null) { ExpressionTree expression = returnTree.getExpression(); return ExpressionTemplate.create( annotations, typeParameters, expressionVarTypes, templater.template(expression), methodType.getReturnType()); } else { List<UStatement> templateStatements = new ArrayList<>(); for (StatementTree statement : bodyStatements) { templateStatements.add(templater.template(statement)); } return BlockTemplate.create( annotations, typeParameters, expressionVarTypes, templateStatements); } } public static ImmutableMap<String, VarSymbol> freeExpressionVariables( MethodTree templateMethodDecl) { ImmutableMap.Builder<String, VarSymbol> builder = ImmutableMap.builder(); for (VariableTree param : templateMethodDecl.getParameters()) { builder.put(param.getName().toString(), ASTHelpers.getSymbol(param)); } return builder.buildOrThrow(); } private final ImmutableMap<String, VarSymbol> freeVariables; private final Context context; public UTemplater(Map<String, VarSymbol> freeVariables, Context context) { this.freeVariables = ImmutableMap.copyOf(freeVariables); this.context = context; } UTemplater(Context context) { this(ImmutableMap.<String, VarSymbol>of(), context); } public Tree template(Tree tree) { return tree.accept(this, null); } private @Nullable ImmutableList<Tree> templateTrees(@Nullable Iterable<? extends Tree> trees) { if (trees == null) { return null; } ImmutableList.Builder<Tree> builder = ImmutableList.builder(); for (Tree tree : trees) { builder.add(template(tree)); } return builder.build(); } private static <T> ImmutableList<T> cast(Iterable<?> elements, Class<T> clazz) { ImmutableList.Builder<T> builder = ImmutableList.builder(); for (Object element : elements) { builder.add(clazz.cast(element)); } return builder.build(); } @Override public UMethodDecl visitMethod(MethodTree decl, Void v) { return UMethodDecl.create( visitModifiers(decl.getModifiers(), null), decl.getName(), templateType(decl.getReturnType()), cast(templateStatements(decl.getParameters()), UVariableDecl.class), templateExpressions(decl.getThrows()), (UBlock) template(decl.getBody())); } @Override public UModifiers visitModifiers(ModifiersTree modifiers, Void v) { return UModifiers.create( ((JCModifiers) modifiers).flags, cast(templateExpressions(modifiers.getAnnotations()), UAnnotation.class)); } public UExpression template(ExpressionTree tree) { return (UExpression) tree.accept(this, null); } private @Nullable ImmutableList<UExpression> templateExpressions( @Nullable Iterable<? extends ExpressionTree> expressions) { if (expressions == null) { return null; } ImmutableList.Builder<UExpression> builder = ImmutableList.builder(); for (ExpressionTree expression : expressions) { builder.add(template(expression)); } return builder.build(); } public UExpression templateType(Tree tree) { checkArgument( tree instanceof ExpressionTree, "Trees representing types are expected to implement ExpressionTree, but %s does not", tree); return template((ExpressionTree) tree); } private @Nullable ImmutableList<UExpression> templateTypeExpressions( @Nullable Iterable<? extends Tree> types) { if (types == null) { return null; } ImmutableList.Builder<UExpression> builder = ImmutableList.builder(); for (Tree type : types) { builder.add(templateType(type)); } return builder.build(); } @Override public UInstanceOf visitInstanceOf(InstanceOfTree tree, Void v) { return UInstanceOf.create(template(tree.getExpression()), templateType(tree.getType())); } @Override public UPrimitiveTypeTree visitPrimitiveType(PrimitiveTypeTree tree, Void v) { return UPrimitiveTypeTree.create(((JCPrimitiveTypeTree) tree).typetag); } @Override public ULiteral visitLiteral(LiteralTree tree, Void v) { return ULiteral.create(tree.getKind(), tree.getValue()); } @Override public UParens visitParenthesized(ParenthesizedTree tree, Void v) { return UParens.create(template(tree.getExpression())); } @Override public UAssign visitAssignment(AssignmentTree tree, Void v) { return UAssign.create(template(tree.getVariable()), template(tree.getExpression())); } @Override public UArrayAccess visitArrayAccess(ArrayAccessTree tree, Void v) { return UArrayAccess.create(template(tree.getExpression()), template(tree.getIndex())); } @Override public UAnnotation visitAnnotation(AnnotationTree tree, Void v) { return UAnnotation.create( templateType(tree.getAnnotationType()), templateExpressions(tree.getArguments())); } @Override public UAnnotatedType visitAnnotatedType(AnnotatedTypeTree tree, Void v) { return UAnnotatedType.create( cast(templateExpressions(tree.getAnnotations()), UAnnotation.class), template(tree.getUnderlyingType())); } @Override public UExpression visitMemberSelect(MemberSelectTree tree, Void v) { Symbol sym = ASTHelpers.getSymbol(tree); if (sym instanceof ClassSymbol classSymbol) { return UClassIdent.create(classSymbol); } else if (isStatic(sym)) { ExpressionTree selected = tree.getExpression(); checkState( ASTHelpers.getSymbol(selected) instanceof ClassSymbol, "Refaster cannot match static methods used on instances"); return staticMember(sym); } return UMemberSelect.create( template(tree.getExpression()), tree.getIdentifier(), template(sym.type)); } private UStaticIdent staticMember(Symbol symbol) { return UStaticIdent.create( (ClassSymbol) symbol.getEnclosingElement(), symbol.getSimpleName(), template(symbol.asType())); } private UMethodIdent method(Symbol symbol) { return UMethodIdent.create( (ClassSymbol) symbol.getEnclosingElement(), symbol.getSimpleName(), template(symbol.asType())); } private static final UStaticIdent ANY_OF; private static final UStaticIdent IS_INSTANCE; private static final UStaticIdent CLAZZ; private static final UStaticIdent NEW_ARRAY; private static final UStaticIdent ENUM_VALUE_OF; private static final UStaticIdent AS_VARARGS; static { UTypeVar tVar = UTypeVar.create("T"); ANY_OF = UStaticIdent.create( Refaster.class.getCanonicalName(), "anyOf", UForAll.create( ImmutableList.of(tVar), UMethodType.create(tVar, UArrayType.create(tVar)))); IS_INSTANCE = UStaticIdent.create( Refaster.class.getCanonicalName(), "isInstance", UForAll.create( ImmutableList.of(tVar), UMethodType.create( UPrimitiveType.BOOLEAN, UClassType.create(Object.class.getCanonicalName())))); CLAZZ = UStaticIdent.create( Refaster.class.getCanonicalName(), "clazz", UForAll.create( ImmutableList.of(tVar), UMethodType.create(UClassType.create(Class.class.getCanonicalName(), tVar)))); NEW_ARRAY = UStaticIdent.create( Refaster.class.getCanonicalName(), "newArray", UForAll.create( ImmutableList.of(tVar), UMethodType.create(UArrayType.create(tVar), UPrimitiveType.INT))); UTypeVar eVar = UTypeVar.create( "E", UClassType.create(Enum.class.getCanonicalName(), UTypeVar.create("E"))); ENUM_VALUE_OF = UStaticIdent.create( Refaster.class.getCanonicalName(), "enumValueOf", UForAll.create( ImmutableList.of(eVar), UMethodType.create(eVar, UClassType.create(String.class.getCanonicalName())))); AS_VARARGS = UStaticIdent.create( Refaster.class.getCanonicalName(), "asVarargs", UForAll.create( ImmutableList.of(tVar), UMethodType.create(UArrayType.create(tVar), tVar))); } private static Tree getSingleExplicitTypeArgument(MethodInvocationTree tree) { if (tree.getTypeArguments().isEmpty()) { throw new IllegalArgumentException( "Methods in the Refaster class must be invoked with " + "an explicit type parameter; for example, 'Refaster.<T>isInstance(o)'."); } return Iterables.getOnlyElement(tree.getTypeArguments()); } static <T, U extends Unifiable<? super T>> boolean anyMatch( U toUnify, T target, Unifier unifier) { return toUnify.unify(target, unifier).findFirst().isPresent(); } @Override public UExpression visitMethodInvocation(MethodInvocationTree tree, Void v) { if (anyMatch(ANY_OF, tree.getMethodSelect(), new Unifier(context))) { return UAnyOf.create(templateExpressions(tree.getArguments())); } else if (anyMatch(IS_INSTANCE, tree.getMethodSelect(), new Unifier(context))) { return UInstanceOf.create( template(Iterables.getOnlyElement(tree.getArguments())), templateType(getSingleExplicitTypeArgument(tree))); } else if (anyMatch(CLAZZ, tree.getMethodSelect(), new Unifier(context))) { Tree typeArg = getSingleExplicitTypeArgument(tree); return UMemberSelect.create( templateType(typeArg), "class", UClassType.create("java.lang.Class", template(((JCTree) typeArg).type))); } else if (anyMatch(NEW_ARRAY, tree.getMethodSelect(), new Unifier(context))) { Tree typeArg = getSingleExplicitTypeArgument(tree); ExpressionTree lengthArg = Iterables.getOnlyElement(tree.getArguments()); return UNewArray.create(templateType(typeArg), ImmutableList.of(template(lengthArg)), null); } else if (anyMatch(ENUM_VALUE_OF, tree.getMethodSelect(), new Unifier(context))) { Tree typeArg = getSingleExplicitTypeArgument(tree); ExpressionTree strArg = Iterables.getOnlyElement(tree.getArguments()); return UMethodInvocation.create( UMemberSelect.create( templateType(typeArg), "valueOf", UMethodType.create( template(((JCTree) typeArg).type), UClassType.create("java.lang.String"))), template(strArg)); } else if (anyMatch(AS_VARARGS, tree.getMethodSelect(), new Unifier(context))) { ExpressionTree arg = Iterables.getOnlyElement(tree.getArguments()); checkArgument( hasAnnotation(ASTHelpers.getSymbol(arg), REPEATED_ANNOTATION, new VisitorState(context))); return template(arg); } Map<MethodSymbol, PlaceholderMethod> placeholderMethods = context.get(RefasterRuleBuilderScanner.PLACEHOLDER_METHODS_KEY); if (placeholderMethods != null && placeholderMethods.containsKey(ASTHelpers.getSymbol(tree))) { return UPlaceholderExpression.create( placeholderMethods.get(ASTHelpers.getSymbol(tree)), templateExpressions(tree.getArguments())); } else { return UMethodInvocation.create( templateTypeExpressions(tree.getTypeArguments()), template(tree.getMethodSelect()), templateExpressions(tree.getArguments())); } } @Override public UBinary visitBinary(BinaryTree tree, Void v) { return UBinary.create( tree.getKind(), template(tree.getLeftOperand()), template(tree.getRightOperand())); } @Override public UAssignOp visitCompoundAssignment(CompoundAssignmentTree tree, Void v) { return UAssignOp.create( template(tree.getVariable()), tree.getKind(), template(tree.getExpression())); } @Override public UUnary visitUnary(UnaryTree tree, Void v) { return UUnary.create(tree.getKind(), template(tree.getExpression())); } @Override public UExpression visitConditionalExpression(ConditionalExpressionTree tree, Void v) { return UConditional.create( template(tree.getCondition()), template(tree.getTrueExpression()), template(tree.getFalseExpression())); } @Override public UNewArray visitNewArray(NewArrayTree tree, Void v) { return UNewArray.create( (UExpression) template(tree.getType()), templateExpressions(tree.getDimensions()), templateExpressions(tree.getInitializers())); } @Override public UNewClass visitNewClass(NewClassTree tree, Void v) { return UNewClass.create( tree.getEnclosingExpression() == null ? null : template(tree.getEnclosingExpression()), templateTypeExpressions(tree.getTypeArguments()), template(tree.getIdentifier()), templateExpressions(tree.getArguments()), (tree.getClassBody() == null) ? null : visitClass(tree.getClassBody(), null)); } @Override public UClassDecl visitClass(ClassTree tree, Void v) { ImmutableList.Builder<UMethodDecl> decls = ImmutableList.builder(); for (MethodTree decl : Iterables.filter(tree.getMembers(), MethodTree.class)) { if (ASTHelpers.isGeneratedConstructor(decl)) { // skip synthetic constructors continue; } decls.add(visitMethod(decl, null)); } return UClassDecl.create(decls.build()); } @Override public UArrayTypeTree visitArrayType(ArrayTypeTree tree, Void v) { return UArrayTypeTree.create(templateType(tree.getType())); } @Override public UTypeApply visitParameterizedType(ParameterizedTypeTree tree, Void v) { return UTypeApply.create( templateType(tree.getType()), templateTypeExpressions(tree.getTypeArguments())); } @Override public UUnionType visitUnionType(UnionTypeTree tree, Void v) { return UUnionType.create(templateTypeExpressions(tree.getTypeAlternatives())); } @Override public UWildcard visitWildcard(WildcardTree tree, Void v) { return UWildcard.create( tree.getKind(), (tree.getBound() == null) ? null : templateType(tree.getBound())); } @Override public UIntersectionType visitIntersectionType(IntersectionTypeTree tree, Void v) { return UIntersectionType.create(templateTypeExpressions(tree.getBounds())); } @Override public UTypeParameter visitTypeParameter(TypeParameterTree tree, Void v) { return UTypeParameter.create( tree.getName(), templateTypeExpressions(tree.getBounds()), cast(templateExpressions(tree.getAnnotations()), UAnnotation.class)); } @Override public UTypeCast visitTypeCast(TypeCastTree tree, Void v) { return UTypeCast.create(templateType(tree.getType()), template(tree.getExpression())); } @Override public ULambda visitLambdaExpression(LambdaExpressionTree tree, Void v) { return ULambda.create( ((JCLambda) tree).paramKind, cast(templateStatements(tree.getParameters()), UVariableDecl.class), (UTree<?>) template(tree.getBody())); } @Override public UMemberReference visitMemberReference(MemberReferenceTree tree, Void v) { return UMemberReference.create( tree.getMode(), template(tree.getQualifierExpression()), tree.getName(), (tree.getTypeArguments() == null) ? null : templateExpressions(tree.getTypeArguments())); } @Override public UExpression visitIdentifier(IdentifierTree tree, Void v) { Symbol sym = ASTHelpers.getSymbol(tree); if (sym instanceof ClassSymbol classSymbol) { return UClassIdent.create(classSymbol); } else if (sym != null && isStatic(sym)) { return staticMember(sym); } else if (freeVariables.containsKey(tree.getName().toString())) { VarSymbol symbol = freeVariables.get(tree.getName().toString()); checkState(symbol == sym); UExpression ident = UFreeIdent.create(tree.getName()); Matches matches = ASTHelpers.getAnnotation(symbol, Matches.class); if (matches != null) { ident = UMatches.create(getValue(matches), /* positive= */ true, ident); } NotMatches notMatches = ASTHelpers.getAnnotation(symbol, NotMatches.class); if (notMatches != null) { ident = UMatches.create(getValue(notMatches), /* positive= */ false, ident); } OfKind hasKind = ASTHelpers.getAnnotation(symbol, OfKind.class); if (hasKind != null) { EnumSet<Kind> allowed = EnumSet.copyOf(Arrays.asList(hasKind.value())); ident = UOfKind.create(ident, ImmutableSet.copyOf(allowed)); } // @Repeated annotations need to be checked last. Repeated repeated = ASTHelpers.getAnnotation(symbol, Repeated.class); if (repeated != null) { ident = URepeated.create(tree.getName(), ident); } return ident; } if (sym == null) { return UTypeVarIdent.create(tree.getName()); } return switch (sym.getKind()) { case TYPE_PARAMETER -> UTypeVarIdent.create(tree.getName()); case METHOD -> method(sym); default -> ULocalVarIdent.create(tree.getName()); }; } /** * Returns the {@link Class} instance for the {@link Matcher} associated with the provided {@link * Matches} annotation. This roundabout solution is recommended and explained by {@link * Element#getAnnotation(Class)}. */ static Class<? extends Matcher<? super ExpressionTree>> getValue(Matches matches) { String name; try { var unused = matches.value(); throw new RuntimeException("unreachable"); } catch (MirroredTypeException e) { DeclaredType type = (DeclaredType) e.getTypeMirror(); name = ((TypeElement) type.asElement()).getQualifiedName().toString(); } try { return asSubclass(Class.forName(name), new TypeToken<Matcher<? super ExpressionTree>>() {}); } catch (ClassNotFoundException | ClassCastException e) { throw new RuntimeException(e); } } /** * Returns the {@link Class} instance for the {@link Matcher} associated with the provided {@link * NotMatches} annotation. This roundabout solution is recommended and explained by {@link * Element#getAnnotation(Class)}. */ static Class<? extends Matcher<? super ExpressionTree>> getValue(NotMatches matches) { String name; try { var unused = matches.value(); throw new RuntimeException("unreachable"); } catch (MirroredTypeException e) { DeclaredType type = (DeclaredType) e.getTypeMirror(); name = ((TypeElement) type.asElement()).getQualifiedName().toString(); } try { return asSubclass(Class.forName(name), new TypeToken<Matcher<? super ExpressionTree>>() {}); } catch (ClassNotFoundException | ClassCastException e) { throw new RuntimeException(e); } } /** * Similar to {@link Class#asSubclass(Class)}, but it accepts a {@link TypeToken} so it handles * generics better. */ @SuppressWarnings("unchecked") private static <T> Class<? extends T> asSubclass(Class<?> klass, TypeToken<T> token) { if (!token.isSupertypeOf(klass)) { throw new ClassCastException(klass + " is not assignable to " + token); } return (Class<? extends T>) klass; } public UStatement template(StatementTree tree) { return (UStatement) tree.accept(this, null); } private @Nullable ImmutableList<UStatement> templateStatements( @Nullable List<? extends StatementTree> statements) { if (statements == null) { return null; } ImmutableList.Builder<UStatement> builder = ImmutableList.builder(); for (StatementTree statement : statements) { builder.add(template(statement)); } return builder.build(); } @Override public UTry visitTry(TryTree tree, Void v) { @SuppressWarnings({"unchecked", "rawtypes"}) ImmutableList<UTree<?>> resources = cast(templateTrees(tree.getResources()), (Class<UTree<?>>) (Class) UTree.class); UBlock block = visitBlock(tree.getBlock(), null); ImmutableList.Builder<UCatch> catchesBuilder = ImmutableList.builder(); for (CatchTree catchTree : tree.getCatches()) { catchesBuilder.add(visitCatch(catchTree, null)); } UBlock finallyBlock = (tree.getFinallyBlock() == null) ? null : visitBlock(tree.getFinallyBlock(), null); return UTry.create(resources, block, catchesBuilder.build(), finallyBlock); } @Override public UCatch visitCatch(CatchTree tree, Void v) { return UCatch.create( visitVariable(tree.getParameter(), null), visitBlock(tree.getBlock(), null)); } private @Nullable PlaceholderMethod placeholder(@Nullable ExpressionTree expr) { Map<MethodSymbol, PlaceholderMethod> placeholderMethods = context.get(RefasterRuleBuilderScanner.PLACEHOLDER_METHODS_KEY); return (placeholderMethods != null && expr != null) ? placeholderMethods.get(ASTHelpers.getSymbol(expr)) : null; } @Override public UStatement visitExpressionStatement(ExpressionStatementTree tree, Void v) { PlaceholderMethod placeholderMethod = placeholder(tree.getExpression()); if (placeholderMethod != null && placeholderMethod.returnType().equals(UPrimitiveType.VOID)) { MethodInvocationTree invocation = (MethodInvocationTree) tree.getExpression(); return UPlaceholderStatement.create( placeholderMethod, templateExpressions(invocation.getArguments()), ControlFlowVisitor.Result.NEVER_EXITS); } return UExpressionStatement.create(template(tree.getExpression())); } @Override public UStatement visitReturn(ReturnTree tree, Void v) { PlaceholderMethod placeholderMethod = placeholder(tree.getExpression()); if (placeholderMethod != null) { MethodInvocationTree invocation = (MethodInvocationTree) tree.getExpression(); return UPlaceholderStatement.create( placeholderMethod, templateExpressions(invocation.getArguments()), ControlFlowVisitor.Result.ALWAYS_RETURNS); } return UReturn.create((tree.getExpression() == null) ? null : template(tree.getExpression())); } @Override public UWhileLoop visitWhileLoop(WhileLoopTree tree, Void v) { return UWhileLoop.create(template(tree.getCondition()), template(tree.getStatement())); } @Override public UVariableDecl visitVariable(VariableTree tree, Void v) { return UVariableDecl.create( tree.getName(), templateType(tree.getType()), (tree.getInitializer() == null) ? null : template(tree.getInitializer())); } @Override public USkip visitEmptyStatement(EmptyStatementTree tree, Void v) { return USkip.INSTANCE; } @Override public UForLoop visitForLoop(ForLoopTree tree, Void v) { return UForLoop.create( templateStatements(tree.getInitializer()), (tree.getCondition() == null) ? null : template(tree.getCondition()), cast(templateStatements(tree.getUpdate()), UExpressionStatement.class), template(tree.getStatement())); } @Override public ULabeledStatement visitLabeledStatement(LabeledStatementTree tree, Void v) { return ULabeledStatement.create(tree.getLabel(), template(tree.getStatement())); } @Override public UBreak visitBreak(BreakTree tree, Void v) { return UBreak.create(tree.getLabel()); } @Override public UContinue visitContinue(ContinueTree tree, Void v) { return UContinue.create(tree.getLabel()); } @Override public UBlock visitBlock(BlockTree tree, Void v) { return UBlock.create(templateStatements(tree.getStatements())); } @Override public UThrow visitThrow(ThrowTree tree, Void v) { return UThrow.create(template(tree.getExpression())); } @Override public UDoWhileLoop visitDoWhileLoop(DoWhileLoopTree tree, Void v) { return UDoWhileLoop.create(template(tree.getStatement()), template(tree.getCondition())); } @Override public UEnhancedForLoop visitEnhancedForLoop(EnhancedForLoopTree tree, Void v) { return UEnhancedForLoop.create( visitVariable(tree.getVariable(), null), template(tree.getExpression()), template(tree.getStatement())); } @Override public USynchronized visitSynchronized(SynchronizedTree tree, Void v) { return USynchronized.create(template(tree.getExpression()), visitBlock(tree.getBlock(), null)); } @Override public UIf visitIf(IfTree tree, Void v) { return UIf.create( template(tree.getCondition()), template(tree.getThenStatement()), (tree.getElseStatement() == null) ? null : template(tree.getElseStatement())); } @Override public UAssert visitAssert(AssertTree tree, Void v) { return UAssert.create( template(tree.getCondition()), (tree.getDetail() == null) ? null : template(tree.getDetail())); } @Override protected UTree<?> defaultAction(Tree tree, Void v) { throw new IllegalArgumentException( "Refaster does not currently support syntax " + tree.getClass()); } public UType template(Type type) { return type.accept(typeTemplater, null); } List<UType> templateTypes(Iterable<? extends Type> types) { ImmutableList.Builder<UType> builder = ImmutableList.builder(); for (Type ty : types) { builder.add(template(ty)); } return builder.build(); } private final Type.Visitor<UType, Void> typeTemplater = new Types.SimpleVisitor<UType, Void>() { private final Map<TypeSymbol, UTypeVar> typeVariables = new HashMap<>(); @Override public UType visitType(Type type, Void v) { if (UPrimitiveType.isDeFactoPrimitive(type.getKind())) { return UPrimitiveType.create(type.getKind()); } else { throw new IllegalArgumentException( "Refaster does not currently support syntax " + type.getKind()); } } @Override public UArrayType visitArrayType(ArrayType type, Void v) { return UArrayType.create(type.getComponentType().accept(this, null)); } @Override public UMethodType visitMethodType(MethodType type, Void v) { return UMethodType.create( type.getReturnType().accept(this, null), templateTypes(type.getParameterTypes())); } @Override public UType visitClassType(ClassType type, Void v) { if (type instanceof IntersectionClassType intersectionClassType) { return UIntersectionClassType.create( templateTypes(intersectionClassType.getComponents())); } return UClassType.create( type.tsym.getQualifiedName().toString(), templateTypes(type.getTypeArguments())); } @Override public UWildcardType visitWildcardType(WildcardType type, Void v) { return UWildcardType.create(type.kind, type.type.accept(this, null)); } @Override public UTypeVar visitTypeVar(TypeVar type, Void v) { /* * In order to handle recursively bounded type variables without a stack overflow, we first * cache a type var with no bounds, then we template the bounds. */ TypeSymbol tsym = type.asElement(); if (typeVariables.containsKey(tsym)) { return typeVariables.get(tsym); } UTypeVar var = UTypeVar.create(tsym.getSimpleName().toString()); typeVariables.put( tsym, var); // so the type variable can be used recursively in the bounds var.setLowerBound(type.getLowerBound().accept(this, null)); var.setUpperBound(type.getUpperBound().accept(this, null)); return var; } @Override public UForAll visitForAll(ForAll type, Void v) { ImmutableList<UTypeVar> vars = cast(templateTypes(type.getTypeVariables()), UTypeVar.class); return UForAll.create(vars, type.qtype.accept(this, null)); } }; @SuppressWarnings("unchecked") public static ImmutableClassToInstanceMap<Annotation> annotationMap(Symbol symbol) { ImmutableClassToInstanceMap.Builder<Annotation> builder = ImmutableClassToInstanceMap.builder(); for (Compound compound : symbol.getAnnotationMirrors()) { String annotationClassName = classNameFrom((TypeElement) compound.getAnnotationType().asElement()); try { Class<? extends Annotation> annotationClazz = Class.forName(annotationClassName).asSubclass(Annotation.class); builder.put( (Class) annotationClazz, AnnotationProxyMaker.generateAnnotation(compound, annotationClazz)); } catch (ClassNotFoundException e) { String friendlyMessage = "Tried to instantiate an instance of the annotation " + annotationClassName + " while processing " + symbol.getSimpleName() + ", but the annotation class file was not present on the classpath."; throw new LinkageError(friendlyMessage, e); } } return builder.build(); } // Class.forName() needs nested classes as "foo.Bar$Baz$Quux", not "foo.Bar.Baz.Quux" // (which is what getQualifiedName() returns). private static String classNameFrom(TypeElement type) { // Get the full type name (e.g. "foo.Bar.Baz.Quux") before walking up the hierarchy. String typeName = type.getQualifiedName().toString(); // Find outermost enclosing type (e.g. "foo.Bar" in our example), possibly several levels up. // Packages enclose types, so we cannot just wait until we hit null. while (type.getEnclosingElement().getKind() == ElementKind.CLASS) { type = (TypeElement) type.getEnclosingElement(); } // Start with outermost class name and append remainder of full type name with '.' -> '$' String className = type.getQualifiedName().toString(); return className + typeName.substring(className.length()).replace('.', '$'); } }
apache/hadoop
37,271
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.service.webapp; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Sets; import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.api.records.Component; import org.apache.hadoop.yarn.service.api.records.ComponentContainers; import org.apache.hadoop.yarn.service.api.records.ComponentState; import org.apache.hadoop.yarn.service.api.records.Container; import org.apache.hadoop.yarn.service.api.records.ContainerState; import org.apache.hadoop.yarn.service.api.records.Service; import org.apache.hadoop.yarn.service.api.records.ServiceState; import org.apache.hadoop.yarn.service.api.records.ServiceStatus; import org.apache.hadoop.yarn.service.client.ServiceClient; import org.apache.hadoop.yarn.service.conf.RestApiConstants; import org.apache.hadoop.yarn.service.utils.ServiceApiUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.*; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.UndeclaredThrowableException; import java.security.PrivilegedExceptionAction; import java.util.*; import java.util.stream.Collectors; import static org.apache.hadoop.yarn.service.api.records.ServiceState.ACCEPTED; import static org.apache.hadoop.yarn.service.api.records.ServiceState.CANCEL_UPGRADING; import static org.apache.hadoop.yarn.service.conf.RestApiConstants.*; import static org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes.*; /** * The rest API endpoints for users to manage services on YARN. */ @Singleton @Path(CONTEXT_ROOT) public class ApiServer { public ApiServer() { super(); } @Inject public ApiServer(Configuration conf) { super(); } private static final Logger LOG = LoggerFactory.getLogger(ApiServer.class); private static Configuration YARN_CONFIG = new YarnConfiguration(); private ServiceClient serviceClientUnitTest; private boolean unitTest = false; static { init(); } // initialize all the common resources - order is important private static void init() { } @GET @Path(VERSION) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response getVersion() { String version = VersionInfo.getBuildVersion(); LOG.info(version); return Response.ok("{ \"hadoop_version\": \"" + version + "\"}").build(); } @POST @Path(SERVICE_ROOT_PATH) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response createService(@Context HttpServletRequest request, Service service) { ServiceStatus serviceStatus = new ServiceStatus(); try { UserGroupInformation ugi = getProxyUser(request); LOG.info("POST: createService = {} user = {}", service, ugi); if(service.getState()==ServiceState.STOPPED) { ugi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws YarnException, IOException { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); sc.actionBuild(service); } finally { sc.close(); } return null; } }); serviceStatus.setDiagnostics("Service " + service.getName() + " version " + service.getVersion() + " saved."); } else { ApplicationId applicationId = ugi .doAs(new PrivilegedExceptionAction<ApplicationId>() { @Override public ApplicationId run() throws IOException, YarnException { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); ApplicationId applicationId = sc.actionCreate(service); return applicationId; } finally { sc.close(); } } }); serviceStatus.setDiagnostics("Application ID: " + applicationId); } serviceStatus.setState(ACCEPTED); serviceStatus.setUri( CONTEXT_ROOT + SERVICE_ROOT_PATH + "/" + service .getName()); return formatResponse(Status.ACCEPTED, serviceStatus); } catch (AccessControlException e) { serviceStatus.setDiagnostics(e.getMessage()); return formatResponse(Status.FORBIDDEN, e.getCause().getMessage()); } catch (IllegalArgumentException e) { return formatResponse(Status.BAD_REQUEST, e.getMessage()); } catch (IOException | InterruptedException e) { String message = "Failed to create service " + service.getName() + ": {}"; LOG.error(message, e); return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { String message = "Failed to create service " + service.getName() + ": {}"; LOG.error(message, e); if (e.getCause().getMessage().contains("already exists")) { message = "Service name " + service.getName() + " is already taken."; } else { message = e.getCause().getMessage(); } return formatResponse(Status.INTERNAL_SERVER_ERROR, message); } } @GET @Path(SERVICE_PATH) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response getService(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String appName) { ServiceStatus serviceStatus = new ServiceStatus(); try { if (appName == null) { throw new IllegalArgumentException("Service name cannot be null."); } UserGroupInformation ugi = getProxyUser(request); LOG.info("GET: getService for appName = {} user = {}", appName, ugi); Service app = getServiceFromClient(ugi, appName); return Response.ok(app).build(); } catch (AccessControlException e) { return formatResponse(Status.FORBIDDEN, e.getMessage()); } catch (IllegalArgumentException e) { serviceStatus.setDiagnostics(e.getMessage()); serviceStatus.setCode(ERROR_CODE_APP_NAME_INVALID); return Response.status(Status.NOT_FOUND).entity(serviceStatus) .build(); } catch (FileNotFoundException e) { serviceStatus.setDiagnostics("Service " + appName + " not found"); serviceStatus.setCode(ERROR_CODE_APP_NAME_INVALID); return Response.status(Status.NOT_FOUND).entity(serviceStatus) .build(); } catch (IOException | InterruptedException e) { LOG.error("Get service failed: {}", e); return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { LOG.error("Get service failed: {}", e); return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } } @DELETE @Path(SERVICE_PATH) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response deleteService(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String appName) { try { if (appName == null) { throw new IllegalArgumentException("Service name can not be null."); } UserGroupInformation ugi = getProxyUser(request); LOG.info("DELETE: deleteService for appName = {} user = {}", appName, ugi); return stopService(appName, true, ugi); } catch (AccessControlException e) { return formatResponse(Status.FORBIDDEN, e.getMessage()); } catch (IllegalArgumentException e) { return formatResponse(Status.BAD_REQUEST, e.getMessage()); } catch (UndeclaredThrowableException e) { LOG.error("Fail to stop service: {}", e); return formatResponse(Status.BAD_REQUEST, e.getCause().getMessage()); } catch (YarnException | FileNotFoundException e) { return formatResponse(Status.NOT_FOUND, e.getMessage()); } catch (Exception e) { LOG.error("Fail to stop service: {}", e); return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage()); } } private Response stopService(String appName, boolean destroy, final UserGroupInformation ugi) throws Exception { int result = ugi.doAs(new PrivilegedExceptionAction<Integer>() { @Override public Integer run() throws Exception { int result = 0; ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); Exception stopException = null; try { result = sc.actionStop(appName, destroy); if (result == EXIT_SUCCESS) { LOG.info("Successfully stopped service {}", appName); } } catch (Exception e) { LOG.info("Got exception stopping service", e); stopException = e; } if (destroy) { result = sc.actionDestroy(appName); if (result == EXIT_SUCCESS) { LOG.info("Successfully deleted service {}", appName); } } else { if (stopException != null) { throw stopException; } } } finally { sc.close(); } return result; } }); ServiceStatus serviceStatus = new ServiceStatus(); if (destroy) { if (result == EXIT_SUCCESS) { serviceStatus.setDiagnostics("Successfully destroyed service " + appName); } else { if (result == EXIT_NOT_FOUND) { serviceStatus .setDiagnostics("Service " + appName + " doesn't exist"); return formatResponse(Status.BAD_REQUEST, serviceStatus); } else { serviceStatus .setDiagnostics("Service " + appName + " error cleaning up " + "registry"); return formatResponse(Status.INTERNAL_SERVER_ERROR, serviceStatus); } } } else { if (result == EXIT_COMMAND_ARGUMENT_ERROR) { serviceStatus .setDiagnostics("Service " + appName + " is already stopped"); return formatResponse(Status.BAD_REQUEST, serviceStatus); } else { serviceStatus.setDiagnostics("Successfully stopped service " + appName); } } return formatResponse(Status.OK, serviceStatus); } @PUT @Path(COMPONENTS_PATH) @Consumes({MediaType.APPLICATION_JSON}) @Produces({RestApiConstants.MEDIA_TYPE_JSON_UTF8, MediaType.TEXT_PLAIN}) public Response updateComponents(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String serviceName, List<Component> requestComponents) { try { if (requestComponents == null || requestComponents.isEmpty()) { throw new YarnException("No components provided."); } UserGroupInformation ugi = getProxyUser(request); Set<String> compNamesToUpgrade = new HashSet<>(); requestComponents.forEach(reqComp -> { if (reqComp.getState() != null && reqComp.getState().equals(ComponentState.UPGRADING)) { compNamesToUpgrade.add(reqComp.getName()); } }); LOG.info("PUT: upgrade components {} for service {} " + "user = {}", compNamesToUpgrade, serviceName, ugi); return processComponentsUpgrade(ugi, serviceName, compNamesToUpgrade); } catch (AccessControlException e) { return formatResponse(Response.Status.FORBIDDEN, e.getMessage()); } catch (YarnException e) { return formatResponse(Response.Status.BAD_REQUEST, e.getMessage()); } catch (IOException | InterruptedException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } } @PUT @Path(COMPONENT_PATH) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8", MediaType.TEXT_PLAIN }) public Response updateComponent(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String appName, @PathParam(COMPONENT_NAME) String componentName, Component component) { try { if (component == null) { throw new YarnException("No component data provided"); } if (component.getName() != null && !component.getName().equals(componentName)) { String msg = "Component name in the request object (" + component.getName() + ") does not match that in the URI path (" + componentName + ")"; throw new YarnException(msg); } UserGroupInformation ugi = getProxyUser(request); if (component.getState() != null && component.getState().equals(ComponentState.UPGRADING)) { LOG.info("PUT: upgrade component {} for service {} " + "user = {}", component.getName(), appName, ugi); return processComponentsUpgrade(ugi, appName, Sets.newHashSet(componentName)); } if (component.getNumberOfContainers() == null) { throw new YarnException("No container count provided"); } if (component.getNumberOfContainers() < 0) { String message = "Invalid number of containers specified " + component.getNumberOfContainers(); throw new YarnException(message); } Map<String, Long> original = ugi .doAs(new PrivilegedExceptionAction<Map<String, Long>>() { @Override public Map<String, Long> run() throws YarnException, IOException { ServiceClient sc = new ServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); Map<String, Long> original = sc.flexByRestService(appName, Collections.singletonMap(componentName, component.getNumberOfContainers())); return original; } finally { sc.close(); } } }); ServiceStatus status = new ServiceStatus(); status.setDiagnostics( "Updating component (" + componentName + ") size from " + original .get(componentName) + " to " + component.getNumberOfContainers()); return formatResponse(Status.OK, status); } catch (AccessControlException e) { return formatResponse(Status.FORBIDDEN, e.getMessage()); } catch (YarnException e) { return formatResponse(Status.BAD_REQUEST, e.getMessage()); } catch (IOException | InterruptedException e) { return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } } @PUT @Path(SERVICE_PATH) @Consumes({ MediaType.APPLICATION_JSON }) @Produces({ MediaType.APPLICATION_JSON + ";charset=utf-8" }) public Response updateService(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String appName, Service updateServiceData) { try { UserGroupInformation ugi = getProxyUser(request); LOG.info("PUT: updateService for app = {} with data = {} user = {}", appName, updateServiceData, ugi); // Ignore the app name provided in updateServiceData and always use // appName path param updateServiceData.setName(appName); if (updateServiceData.getState() != null && updateServiceData.getState() == ServiceState.FLEX) { return flexService(updateServiceData, ugi); } // For STOP the app should be running. If already stopped then this // operation will be a no-op. For START it should be in stopped state. // If already running then this operation will be a no-op. if (updateServiceData.getState() != null && updateServiceData.getState() == ServiceState.STOPPED) { return stopService(appName, false, ugi); } // If a START is requested if (updateServiceData.getState() != null && updateServiceData.getState() == ServiceState.STARTED) { return startService(appName, ugi); } // If an UPGRADE is requested if (updateServiceData.getState() != null && ( updateServiceData.getState() == ServiceState.UPGRADING || updateServiceData.getState() == ServiceState.UPGRADING_AUTO_FINALIZE) || updateServiceData.getState() == ServiceState.EXPRESS_UPGRADING) { return upgradeService(updateServiceData, ugi); } // If CANCEL_UPGRADING is requested if (updateServiceData.getState() != null && updateServiceData.getState() == CANCEL_UPGRADING) { return cancelUpgradeService(appName, ugi); } // If new lifetime value specified then update it if (updateServiceData.getLifetime() != null && updateServiceData.getLifetime() > 0) { return updateLifetime(appName, updateServiceData, ugi); } for (Component c : updateServiceData.getComponents()) { if (c.getDecommissionedInstances().size() > 0) { return decommissionInstances(updateServiceData, ugi); } } } catch (UndeclaredThrowableException e) { return formatResponse(Status.BAD_REQUEST, e.getCause().getMessage()); } catch (AccessControlException e) { return formatResponse(Status.FORBIDDEN, e.getMessage()); } catch (FileNotFoundException e) { String message = "Application is not found app: " + appName; LOG.error(message, e); return formatResponse(Status.NOT_FOUND, e.getMessage()); } catch (YarnException e) { LOG.error(e.getMessage(), e); return formatResponse(Status.NOT_FOUND, e.getMessage()); } catch (Exception e) { String message = "Error while performing operation for app: " + appName; LOG.error(message, e); return formatResponse(Status.INTERNAL_SERVER_ERROR, e.getMessage()); } // If nothing happens consider it a no-op return Response.status(Status.NO_CONTENT).build(); } @PUT @Path(COMP_INSTANCE_LONG_PATH) @Consumes({MediaType.APPLICATION_JSON}) @Produces({RestApiConstants.MEDIA_TYPE_JSON_UTF8, MediaType.TEXT_PLAIN}) public Response updateComponentInstance(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String serviceName, @PathParam(COMPONENT_NAME) String componentName, @PathParam(COMP_INSTANCE_NAME) String compInstanceName, Container reqContainer) { try { UserGroupInformation ugi = getProxyUser(request); LOG.info("PUT: update component instance {} for component = {}" + " service = {} user = {}", compInstanceName, componentName, serviceName, ugi); if (reqContainer == null) { throw new YarnException("No container data provided."); } Service service = getServiceFromClient(ugi, serviceName); Component component = service.getComponent(componentName); if (component == null) { throw new YarnException(String.format( "The component name in the URI path (%s) is invalid.", componentName)); } Container liveContainer = component.getComponentInstance( compInstanceName); if (liveContainer == null) { throw new YarnException(String.format( "The component (%s) does not have a component instance (%s).", componentName, compInstanceName)); } if (reqContainer.getState() != null && reqContainer.getState().equals(ContainerState.UPGRADING)) { return processContainersUpgrade(ugi, service, Lists.newArrayList(liveContainer)); } } catch (AccessControlException e) { return formatResponse(Response.Status.FORBIDDEN, e.getMessage()); } catch (YarnException e) { return formatResponse(Response.Status.BAD_REQUEST, e.getMessage()); } catch (IOException | InterruptedException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } return Response.status(Status.NO_CONTENT).build(); } @PUT @Path(COMP_INSTANCES_PATH) @Consumes({MediaType.APPLICATION_JSON}) @Produces({RestApiConstants.MEDIA_TYPE_JSON_UTF8, MediaType.TEXT_PLAIN}) public Response updateComponentInstances(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String serviceName, List<Container> requestContainers) { try { if (requestContainers == null || requestContainers.isEmpty()) { throw new YarnException("No containers provided."); } UserGroupInformation ugi = getProxyUser(request); List<String> toUpgrade = new ArrayList<>(); for (Container reqContainer : requestContainers) { if (reqContainer.getState() != null && reqContainer.getState().equals(ContainerState.UPGRADING)) { toUpgrade.add(reqContainer.getComponentInstanceName()); } } if (!toUpgrade.isEmpty()) { Service service = getServiceFromClient(ugi, serviceName); LOG.info("PUT: upgrade component instances {} for service = {} " + "user = {}", toUpgrade, serviceName, ugi); List<Container> liveContainers = ServiceApiUtil .getLiveContainers(service, toUpgrade); return processContainersUpgrade(ugi, service, liveContainers); } } catch (AccessControlException e) { return formatResponse(Response.Status.FORBIDDEN, e.getMessage()); } catch (YarnException e) { return formatResponse(Response.Status.BAD_REQUEST, e.getMessage()); } catch (IOException | InterruptedException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } return Response.status(Status.NO_CONTENT).build(); } @GET @Path(COMP_INSTANCES_PATH) @Produces({RestApiConstants.MEDIA_TYPE_JSON_UTF8}) public Response getComponentInstances(@Context HttpServletRequest request, @PathParam(SERVICE_NAME) String serviceName, @QueryParam(PARAM_COMP_NAME) List<String> componentNames, @QueryParam(PARAM_VERSION) String version, @QueryParam(PARAM_CONTAINER_STATE) List<String> containerStates) { try { UserGroupInformation ugi = getProxyUser(request); LOG.info("GET: component instances for service = {}, compNames in {}, " + "version = {}, containerStates in {}, user = {}", serviceName, Objects.toString(componentNames, "[]"), Objects.toString(version, ""), Objects.toString(containerStates, "[]"), ugi); List<ContainerState> containerStatesDe = containerStates.stream().map( ContainerState::valueOf).collect(Collectors.toList()); return Response.ok(getContainers(ugi, serviceName, componentNames, version, containerStatesDe)).build(); } catch (IllegalArgumentException iae) { return formatResponse(Status.BAD_REQUEST, "valid container states are: " + Arrays.toString(ContainerState.values())); } catch (AccessControlException e) { return formatResponse(Response.Status.FORBIDDEN, e.getMessage()); } catch (IOException | InterruptedException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); } catch (UndeclaredThrowableException e) { return formatResponse(Response.Status.INTERNAL_SERVER_ERROR, e.getCause().getMessage()); } } private Response flexService(Service service, UserGroupInformation ugi) throws IOException, InterruptedException { String appName = service.getName(); Response response = Response.status(Status.BAD_REQUEST).build(); Map<String, String> componentCountStrings = new HashMap<String, String>(); for (Component c : service.getComponents()) { componentCountStrings.put(c.getName(), c.getNumberOfContainers().toString()); } Integer result = ugi.doAs(new PrivilegedExceptionAction<Integer>() { @Override public Integer run() throws YarnException, IOException { int result = 0; ServiceClient sc = new ServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); result = sc .actionFlex(appName, componentCountStrings); return Integer.valueOf(result); } finally { sc.close(); } } }); if (result == EXIT_SUCCESS) { String message = "Service " + appName + " is successfully flexed."; LOG.info(message); ServiceStatus status = new ServiceStatus(); status.setDiagnostics(message); status.setState(ServiceState.ACCEPTED); response = formatResponse(Status.ACCEPTED, status); } return response; } private Response updateLifetime(String appName, Service updateAppData, final UserGroupInformation ugi) throws IOException, InterruptedException { String newLifeTime = ugi.doAs(new PrivilegedExceptionAction<String>() { @Override public String run() throws YarnException, IOException { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); String newLifeTime = sc.updateLifetime(appName, updateAppData.getLifetime()); return newLifeTime; } finally { sc.close(); } } }); ServiceStatus status = new ServiceStatus(); status.setDiagnostics( "Service (" + appName + ")'s lifeTime is updated to " + newLifeTime + ", " + updateAppData.getLifetime() + " seconds remaining"); return formatResponse(Status.OK, status); } private Response startService(String appName, final UserGroupInformation ugi) throws IOException, InterruptedException { ApplicationId appId = ugi.doAs(new PrivilegedExceptionAction<ApplicationId>() { @Override public ApplicationId run() throws YarnException, IOException { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); ApplicationId appId = sc.actionStartAndGetId(appName); return appId; } finally { sc.close(); } } }); LOG.info("Successfully started service " + appName); ServiceStatus status = new ServiceStatus(); status.setDiagnostics( "Service " + appName + " is successfully started with ApplicationId: " + appId); status.setState(ServiceState.ACCEPTED); return formatResponse(Status.OK, status); } private Response upgradeService(Service service, final UserGroupInformation ugi) throws IOException, InterruptedException { ServiceStatus status = new ServiceStatus(); ugi.doAs((PrivilegedExceptionAction<Void>) () -> { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); if (service.getState().equals(ServiceState.EXPRESS_UPGRADING)) { sc.actionUpgradeExpress(service); } else { sc.initiateUpgrade(service); } } finally { sc.close(); } return null; }); LOG.info("Service {} version {} upgrade initialized", service.getName(), service.getVersion()); status.setDiagnostics("Service " + service.getName() + " version " + service.getVersion() + " saved."); status.setState(ServiceState.ACCEPTED); return formatResponse(Status.ACCEPTED, status); } private Response cancelUpgradeService(String serviceName, final UserGroupInformation ugi) throws IOException, InterruptedException { int result = ugi.doAs((PrivilegedExceptionAction<Integer>) () -> { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); int exitCode = sc.actionCancelUpgrade(serviceName); return exitCode; } finally { sc.close(); } }); if (result == EXIT_SUCCESS) { ServiceStatus status = new ServiceStatus(); LOG.info("Service {} cancelling upgrade", serviceName); status.setDiagnostics("Service " + serviceName + " cancelling upgrade."); status.setState(ServiceState.ACCEPTED); return formatResponse(Status.ACCEPTED, status); } return Response.status(Status.BAD_REQUEST).build(); } private Response processComponentsUpgrade(UserGroupInformation ugi, String serviceName, Set<String> compNames) throws YarnException, IOException, InterruptedException { Service service = getServiceFromClient(ugi, serviceName); if (!service.getState().equals(ServiceState.UPGRADING) && !service.getState().equals(ServiceState.UPGRADING_AUTO_FINALIZE)) { throw new YarnException( String.format("The upgrade of service %s has not been initiated.", service.getName())); } List<Container> containersToUpgrade = ServiceApiUtil .validateAndResolveCompsUpgrade(service, compNames); Integer result = invokeContainersUpgrade(ugi, service, containersToUpgrade); if (result == EXIT_SUCCESS) { ServiceStatus status = new ServiceStatus(); status.setDiagnostics( "Upgrading components " + Joiner.on(',').join(compNames) + "."); return formatResponse(Response.Status.ACCEPTED, status); } // If result is not a success, consider it a no-op return Response.status(Response.Status.NO_CONTENT).build(); } private Response processContainersUpgrade(UserGroupInformation ugi, Service service, List<Container> containers) throws YarnException, IOException, InterruptedException { if (!service.getState().equals(ServiceState.UPGRADING) && !service.getState().equals(ServiceState.UPGRADING_AUTO_FINALIZE)) { throw new YarnException( String.format("The upgrade of service %s has not been initiated.", service.getName())); } ServiceApiUtil.validateInstancesUpgrade(containers); Integer result = invokeContainersUpgrade(ugi, service, containers); if (result == EXIT_SUCCESS) { ServiceStatus status = new ServiceStatus(); status.setDiagnostics( "Upgrading component instances " + containers.stream() .map(Container::getId).collect(Collectors.joining(",")) + "."); return formatResponse(Response.Status.ACCEPTED, status); } // If result is not a success, consider it a no-op return Response.status(Response.Status.NO_CONTENT).build(); } private int invokeContainersUpgrade(UserGroupInformation ugi, Service service, List<Container> containers) throws IOException, InterruptedException { return ugi.doAs((PrivilegedExceptionAction<Integer>) () -> { int result1; ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); result1 = sc.actionUpgrade(service, containers); } finally { sc.close(); } return result1; }); } private Response decommissionInstances(Service service, UserGroupInformation ugi) throws IOException, InterruptedException { String appName = service.getName(); Response response = Response.status(Status.BAD_REQUEST).build(); List<String> instances = new ArrayList<>(); for (Component c : service.getComponents()) { instances.addAll(c.getDecommissionedInstances()); } Integer result = ugi.doAs(new PrivilegedExceptionAction<Integer>() { @Override public Integer run() throws YarnException, IOException { int result = 0; ServiceClient sc = new ServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); result = sc .actionDecommissionInstances(appName, instances); return Integer.valueOf(result); } finally { sc.close(); } } }); if (result == EXIT_SUCCESS) { String message = "Service " + appName + " has successfully " + "decommissioned instances."; LOG.info(message); ServiceStatus status = new ServiceStatus(); status.setDiagnostics(message); status.setState(ServiceState.ACCEPTED); response = formatResponse(Status.ACCEPTED, status); } return response; } private Service getServiceFromClient(UserGroupInformation ugi, String serviceName) throws IOException, InterruptedException { return ugi.doAs((PrivilegedExceptionAction<Service>) () -> { ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); Service app1 = sc.getStatus(serviceName); return app1; } finally { sc.close(); } }); } private ComponentContainers[] getContainers(UserGroupInformation ugi, String serviceName, List<String> componentNames, String version, List<ContainerState> containerStates) throws IOException, InterruptedException { return ugi.doAs((PrivilegedExceptionAction<ComponentContainers[]>) () -> { ComponentContainers[] result; ServiceClient sc = getServiceClient(); try { sc.init(YARN_CONFIG); sc.start(); result = sc.getContainers(serviceName, componentNames, version, containerStates); return result; } finally { sc.close(); } }); } /** * Used by negative test case. * * @param mockServerClient - A mocked version of ServiceClient */ public void setServiceClient(ServiceClient mockServerClient) { serviceClientUnitTest = mockServerClient; unitTest = true; } private ServiceClient getServiceClient() { if (unitTest) { return serviceClientUnitTest; } else { return new ServiceClient(); } } /** * Configure impersonation callback. * * @param request - web request * @return - configured UGI class for proxy callback * @throws IOException - if user is not login. */ private UserGroupInformation getProxyUser(HttpServletRequest request) throws AccessControlException { UserGroupInformation proxyUser; UserGroupInformation ugi; String remoteUser = request.getRemoteUser(); try { if (UserGroupInformation.isSecurityEnabled()) { proxyUser = UserGroupInformation.getLoginUser(); ugi = UserGroupInformation.createProxyUser(remoteUser, proxyUser); } else { ugi = UserGroupInformation.createRemoteUser(remoteUser); } return ugi; } catch (IOException e) { throw new AccessControlException(e.getCause()); } } /** * Format HTTP response. * * @param status - HTTP Code * @param message - Diagnostic message * @return - HTTP response */ private Response formatResponse(Status status, String message) { ServiceStatus entity = new ServiceStatus(); entity.setDiagnostics(message); return formatResponse(status, entity); } /** * Format HTTP response. * * @param status - HTTP Code * @param entity - ServiceStatus object * @return - HTTP response */ private Response formatResponse(Status status, ServiceStatus entity) { return Response.status(status).entity(entity).build(); } }
googleapis/google-cloud-java
37,279
java-automl/proto-google-cloud-automl-v1/src/main/java/com/google/cloud/automl/v1/ListDatasetsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/automl/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.automl.v1; /** * * * <pre> * Request message for [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. * </pre> * * Protobuf type {@code google.cloud.automl.v1.ListDatasetsRequest} */ public final class ListDatasetsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.automl.v1.ListDatasetsRequest) ListDatasetsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListDatasetsRequest.newBuilder() to construct. private ListDatasetsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDatasetsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDatasetsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.automl.v1.AutoMlProto .internal_static_google_cloud_automl_v1_ListDatasetsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.automl.v1.AutoMlProto .internal_static_google_cloud_automl_v1_ListDatasetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.automl.v1.ListDatasetsRequest.class, com.google.cloud.automl.v1.ListDatasetsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 4; private int pageSize_ = 0; /** * * * <pre> * Requested page size. Server may return fewer results than requested. * If unspecified, server will pick a default size. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 6; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, filter_); } if (pageSize_ != 0) { output.writeInt32(4, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 6, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, filter_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(4, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.automl.v1.ListDatasetsRequest)) { return super.equals(obj); } com.google.cloud.automl.v1.ListDatasetsRequest other = (com.google.cloud.automl.v1.ListDatasetsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.automl.v1.ListDatasetsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.automl.v1.ListDatasetsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. * </pre> * * Protobuf type {@code google.cloud.automl.v1.ListDatasetsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.automl.v1.ListDatasetsRequest) com.google.cloud.automl.v1.ListDatasetsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.automl.v1.AutoMlProto .internal_static_google_cloud_automl_v1_ListDatasetsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.automl.v1.AutoMlProto .internal_static_google_cloud_automl_v1_ListDatasetsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.automl.v1.ListDatasetsRequest.class, com.google.cloud.automl.v1.ListDatasetsRequest.Builder.class); } // Construct using com.google.cloud.automl.v1.ListDatasetsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.automl.v1.AutoMlProto .internal_static_google_cloud_automl_v1_ListDatasetsRequest_descriptor; } @java.lang.Override public com.google.cloud.automl.v1.ListDatasetsRequest getDefaultInstanceForType() { return com.google.cloud.automl.v1.ListDatasetsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.automl.v1.ListDatasetsRequest build() { com.google.cloud.automl.v1.ListDatasetsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.automl.v1.ListDatasetsRequest buildPartial() { com.google.cloud.automl.v1.ListDatasetsRequest result = new com.google.cloud.automl.v1.ListDatasetsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.automl.v1.ListDatasetsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.automl.v1.ListDatasetsRequest) { return mergeFrom((com.google.cloud.automl.v1.ListDatasetsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.automl.v1.ListDatasetsRequest other) { if (other == com.google.cloud.automl.v1.ListDatasetsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 26: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 26 case 32: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 32 case 50: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 50 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the project from which to list datasets. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * An expression for filtering the results of the request. * * * `dataset_metadata` - for existence of the case (e.g. * `image_classification_dataset_metadata:*`). Some examples of using the filter are: * * * `translation_dataset_metadata:*` --&gt; The dataset has * `translation_dataset_metadata`. * </pre> * * <code>string filter = 3;</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Requested page size. Server may return fewer results than requested. * If unspecified, server will pick a default size. * </pre> * * <code>int32 page_size = 4;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Requested page size. Server may return fewer results than requested. * If unspecified, server will pick a default size. * </pre> * * <code>int32 page_size = 4;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Requested page size. Server may return fewer results than requested. * If unspecified, server will pick a default size. * </pre> * * <code>int32 page_size = 4;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results for the server to return * Typically obtained via * [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] of the previous * [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. * </pre> * * <code>string page_token = 6;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.automl.v1.ListDatasetsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsRequest) private static final com.google.cloud.automl.v1.ListDatasetsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.automl.v1.ListDatasetsRequest(); } public static com.google.cloud.automl.v1.ListDatasetsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDatasetsRequest> PARSER = new com.google.protobuf.AbstractParser<ListDatasetsRequest>() { @java.lang.Override public ListDatasetsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDatasetsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDatasetsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.automl.v1.ListDatasetsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,293
java-cloudbuild/proto-google-cloud-build-v2/src/main/java/com/google/cloudbuild/v2/CreateConnectionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v2/repositories.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v2; /** * * * <pre> * Message for creating a Connection * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.CreateConnectionRequest} */ public final class CreateConnectionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v2.CreateConnectionRequest) CreateConnectionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateConnectionRequest.newBuilder() to construct. private CreateConnectionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateConnectionRequest() { parent_ = ""; connectionId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateConnectionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_CreateConnectionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_CreateConnectionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.CreateConnectionRequest.class, com.google.cloudbuild.v2.CreateConnectionRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CONNECTION_FIELD_NUMBER = 2; private com.google.cloudbuild.v2.Connection connection_; /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the connection field is set. */ @java.lang.Override public boolean hasConnection() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The connection. */ @java.lang.Override public com.google.cloudbuild.v2.Connection getConnection() { return connection_ == null ? com.google.cloudbuild.v2.Connection.getDefaultInstance() : connection_; } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloudbuild.v2.ConnectionOrBuilder getConnectionOrBuilder() { return connection_ == null ? com.google.cloudbuild.v2.Connection.getDefaultInstance() : connection_; } public static final int CONNECTION_ID_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object connectionId_ = ""; /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The connectionId. */ @java.lang.Override public java.lang.String getConnectionId() { java.lang.Object ref = connectionId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); connectionId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for connectionId. */ @java.lang.Override public com.google.protobuf.ByteString getConnectionIdBytes() { java.lang.Object ref = connectionId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); connectionId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getConnection()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectionId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, connectionId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getConnection()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(connectionId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, connectionId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v2.CreateConnectionRequest)) { return super.equals(obj); } com.google.cloudbuild.v2.CreateConnectionRequest other = (com.google.cloudbuild.v2.CreateConnectionRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasConnection() != other.hasConnection()) return false; if (hasConnection()) { if (!getConnection().equals(other.getConnection())) return false; } if (!getConnectionId().equals(other.getConnectionId())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasConnection()) { hash = (37 * hash) + CONNECTION_FIELD_NUMBER; hash = (53 * hash) + getConnection().hashCode(); } hash = (37 * hash) + CONNECTION_ID_FIELD_NUMBER; hash = (53 * hash) + getConnectionId().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v2.CreateConnectionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v2.CreateConnectionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for creating a Connection * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v2.CreateConnectionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v2.CreateConnectionRequest) com.google.cloudbuild.v2.CreateConnectionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_CreateConnectionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_CreateConnectionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v2.CreateConnectionRequest.class, com.google.cloudbuild.v2.CreateConnectionRequest.Builder.class); } // Construct using com.google.cloudbuild.v2.CreateConnectionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getConnectionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; connection_ = null; if (connectionBuilder_ != null) { connectionBuilder_.dispose(); connectionBuilder_ = null; } connectionId_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v2.RepositoryManagerProto .internal_static_google_devtools_cloudbuild_v2_CreateConnectionRequest_descriptor; } @java.lang.Override public com.google.cloudbuild.v2.CreateConnectionRequest getDefaultInstanceForType() { return com.google.cloudbuild.v2.CreateConnectionRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v2.CreateConnectionRequest build() { com.google.cloudbuild.v2.CreateConnectionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v2.CreateConnectionRequest buildPartial() { com.google.cloudbuild.v2.CreateConnectionRequest result = new com.google.cloudbuild.v2.CreateConnectionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v2.CreateConnectionRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.connection_ = connectionBuilder_ == null ? connection_ : connectionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.connectionId_ = connectionId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v2.CreateConnectionRequest) { return mergeFrom((com.google.cloudbuild.v2.CreateConnectionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v2.CreateConnectionRequest other) { if (other == com.google.cloudbuild.v2.CreateConnectionRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasConnection()) { mergeConnection(other.getConnection()); } if (!other.getConnectionId().isEmpty()) { connectionId_ = other.connectionId_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getConnectionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { connectionId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Project and location where the connection will be created. * Format: `projects/&#42;&#47;locations/&#42;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloudbuild.v2.Connection connection_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v2.Connection, com.google.cloudbuild.v2.Connection.Builder, com.google.cloudbuild.v2.ConnectionOrBuilder> connectionBuilder_; /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the connection field is set. */ public boolean hasConnection() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The connection. */ public com.google.cloudbuild.v2.Connection getConnection() { if (connectionBuilder_ == null) { return connection_ == null ? com.google.cloudbuild.v2.Connection.getDefaultInstance() : connection_; } else { return connectionBuilder_.getMessage(); } } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConnection(com.google.cloudbuild.v2.Connection value) { if (connectionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } connection_ = value; } else { connectionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConnection(com.google.cloudbuild.v2.Connection.Builder builderForValue) { if (connectionBuilder_ == null) { connection_ = builderForValue.build(); } else { connectionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeConnection(com.google.cloudbuild.v2.Connection value) { if (connectionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && connection_ != null && connection_ != com.google.cloudbuild.v2.Connection.getDefaultInstance()) { getConnectionBuilder().mergeFrom(value); } else { connection_ = value; } } else { connectionBuilder_.mergeFrom(value); } if (connection_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearConnection() { bitField0_ = (bitField0_ & ~0x00000002); connection_ = null; if (connectionBuilder_ != null) { connectionBuilder_.dispose(); connectionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v2.Connection.Builder getConnectionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getConnectionFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v2.ConnectionOrBuilder getConnectionOrBuilder() { if (connectionBuilder_ != null) { return connectionBuilder_.getMessageOrBuilder(); } else { return connection_ == null ? com.google.cloudbuild.v2.Connection.getDefaultInstance() : connection_; } } /** * * * <pre> * Required. The Connection to create. * </pre> * * <code> * .google.devtools.cloudbuild.v2.Connection connection = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v2.Connection, com.google.cloudbuild.v2.Connection.Builder, com.google.cloudbuild.v2.ConnectionOrBuilder> getConnectionFieldBuilder() { if (connectionBuilder_ == null) { connectionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v2.Connection, com.google.cloudbuild.v2.Connection.Builder, com.google.cloudbuild.v2.ConnectionOrBuilder>( getConnection(), getParentForChildren(), isClean()); connection_ = null; } return connectionBuilder_; } private java.lang.Object connectionId_ = ""; /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The connectionId. */ public java.lang.String getConnectionId() { java.lang.Object ref = connectionId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); connectionId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for connectionId. */ public com.google.protobuf.ByteString getConnectionIdBytes() { java.lang.Object ref = connectionId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); connectionId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The connectionId to set. * @return This builder for chaining. */ public Builder setConnectionId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } connectionId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearConnectionId() { connectionId_ = getDefaultInstance().getConnectionId(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the Connection, which will become the final * component of the Connection's resource name. Names must be unique * per-project per-location. Allows alphanumeric characters and any of * -._~%!$&amp;'()*+,;=&#64;. * </pre> * * <code>string connection_id = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for connectionId to set. * @return This builder for chaining. */ public Builder setConnectionIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); connectionId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v2.CreateConnectionRequest) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v2.CreateConnectionRequest) private static final com.google.cloudbuild.v2.CreateConnectionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v2.CreateConnectionRequest(); } public static com.google.cloudbuild.v2.CreateConnectionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateConnectionRequest> PARSER = new com.google.protobuf.AbstractParser<CreateConnectionRequest>() { @java.lang.Override public CreateConnectionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateConnectionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateConnectionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v2.CreateConnectionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,412
java-iot/proto-google-cloud-iot-v1/src/main/java/com/google/cloud/iot/v1/UpdateDeviceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/iot/v1/device_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.iot.v1; /** * * * <pre> * Request for `UpdateDevice`. * </pre> * * Protobuf type {@code google.cloud.iot.v1.UpdateDeviceRequest} */ public final class UpdateDeviceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.iot.v1.UpdateDeviceRequest) UpdateDeviceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateDeviceRequest.newBuilder() to construct. private UpdateDeviceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateDeviceRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateDeviceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iot.v1.DeviceManagerProto .internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iot.v1.DeviceManagerProto .internal_static_google_cloud_iot_v1_UpdateDeviceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iot.v1.UpdateDeviceRequest.class, com.google.cloud.iot.v1.UpdateDeviceRequest.Builder.class); } private int bitField0_; public static final int DEVICE_FIELD_NUMBER = 2; private com.google.cloud.iot.v1.Device device_; /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the device field is set. */ @java.lang.Override public boolean hasDevice() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The device. */ @java.lang.Override public com.google.cloud.iot.v1.Device getDevice() { return device_ == null ? com.google.cloud.iot.v1.Device.getDefaultInstance() : device_; } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public com.google.cloud.iot.v1.DeviceOrBuilder getDeviceOrBuilder() { return device_ == null ? com.google.cloud.iot.v1.Device.getDefaultInstance() : device_; } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getDevice()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDevice()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.iot.v1.UpdateDeviceRequest)) { return super.equals(obj); } com.google.cloud.iot.v1.UpdateDeviceRequest other = (com.google.cloud.iot.v1.UpdateDeviceRequest) obj; if (hasDevice() != other.hasDevice()) return false; if (hasDevice()) { if (!getDevice().equals(other.getDevice())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDevice()) { hash = (37 * hash) + DEVICE_FIELD_NUMBER; hash = (53 * hash) + getDevice().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.iot.v1.UpdateDeviceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.iot.v1.UpdateDeviceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for `UpdateDevice`. * </pre> * * Protobuf type {@code google.cloud.iot.v1.UpdateDeviceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.iot.v1.UpdateDeviceRequest) com.google.cloud.iot.v1.UpdateDeviceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.iot.v1.DeviceManagerProto .internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.iot.v1.DeviceManagerProto .internal_static_google_cloud_iot_v1_UpdateDeviceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.iot.v1.UpdateDeviceRequest.class, com.google.cloud.iot.v1.UpdateDeviceRequest.Builder.class); } // Construct using com.google.cloud.iot.v1.UpdateDeviceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDeviceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; device_ = null; if (deviceBuilder_ != null) { deviceBuilder_.dispose(); deviceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.iot.v1.DeviceManagerProto .internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor; } @java.lang.Override public com.google.cloud.iot.v1.UpdateDeviceRequest getDefaultInstanceForType() { return com.google.cloud.iot.v1.UpdateDeviceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.iot.v1.UpdateDeviceRequest build() { com.google.cloud.iot.v1.UpdateDeviceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.iot.v1.UpdateDeviceRequest buildPartial() { com.google.cloud.iot.v1.UpdateDeviceRequest result = new com.google.cloud.iot.v1.UpdateDeviceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.iot.v1.UpdateDeviceRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.device_ = deviceBuilder_ == null ? device_ : deviceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.iot.v1.UpdateDeviceRequest) { return mergeFrom((com.google.cloud.iot.v1.UpdateDeviceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.iot.v1.UpdateDeviceRequest other) { if (other == com.google.cloud.iot.v1.UpdateDeviceRequest.getDefaultInstance()) return this; if (other.hasDevice()) { mergeDevice(other.getDevice()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { input.readMessage(getDeviceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 case 26: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.iot.v1.Device device_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iot.v1.Device, com.google.cloud.iot.v1.Device.Builder, com.google.cloud.iot.v1.DeviceOrBuilder> deviceBuilder_; /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the device field is set. */ public boolean hasDevice() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The device. */ public com.google.cloud.iot.v1.Device getDevice() { if (deviceBuilder_ == null) { return device_ == null ? com.google.cloud.iot.v1.Device.getDefaultInstance() : device_; } else { return deviceBuilder_.getMessage(); } } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDevice(com.google.cloud.iot.v1.Device value) { if (deviceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } device_ = value; } else { deviceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDevice(com.google.cloud.iot.v1.Device.Builder builderForValue) { if (deviceBuilder_ == null) { device_ = builderForValue.build(); } else { deviceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDevice(com.google.cloud.iot.v1.Device value) { if (deviceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && device_ != null && device_ != com.google.cloud.iot.v1.Device.getDefaultInstance()) { getDeviceBuilder().mergeFrom(value); } else { device_ = value; } } else { deviceBuilder_.mergeFrom(value); } if (device_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDevice() { bitField0_ = (bitField0_ & ~0x00000001); device_ = null; if (deviceBuilder_ != null) { deviceBuilder_.dispose(); deviceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.iot.v1.Device.Builder getDeviceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getDeviceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.iot.v1.DeviceOrBuilder getDeviceOrBuilder() { if (deviceBuilder_ != null) { return deviceBuilder_.getMessageOrBuilder(); } else { return device_ == null ? com.google.cloud.iot.v1.Device.getDefaultInstance() : device_; } } /** * * * <pre> * Required. The new values for the device. The `id` and `num_id` fields must * be empty, and the field `name` must specify the name path. For example, * `projects/p0/locations/us-central1/registries/registry0/devices/device0`or * `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. * </pre> * * <code>.google.cloud.iot.v1.Device device = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iot.v1.Device, com.google.cloud.iot.v1.Device.Builder, com.google.cloud.iot.v1.DeviceOrBuilder> getDeviceFieldBuilder() { if (deviceBuilder_ == null) { deviceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.iot.v1.Device, com.google.cloud.iot.v1.Device.Builder, com.google.cloud.iot.v1.DeviceOrBuilder>( getDevice(), getParentForChildren(), isClean()); device_ = null; } return deviceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Only updates the `device` fields indicated by this mask. * The field mask must not be empty, and it must not contain fields that * are immutable or only set by the server. * Mutable top-level fields: `credentials`, `blocked`, and `metadata` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.iot.v1.UpdateDeviceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.iot.v1.UpdateDeviceRequest) private static final com.google.cloud.iot.v1.UpdateDeviceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.iot.v1.UpdateDeviceRequest(); } public static com.google.cloud.iot.v1.UpdateDeviceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateDeviceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateDeviceRequest>() { @java.lang.Override public UpdateDeviceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateDeviceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateDeviceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.iot.v1.UpdateDeviceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,497
java-cloudcommerceconsumerprocurement/google-cloud-cloudcommerceconsumerprocurement/src/main/java/com/google/cloud/commerce/consumer/procurement/v1/LicenseManagementServiceClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.commerce.consumer.procurement.v1; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.commerce.consumer.procurement.v1.stub.LicenseManagementServiceStub; import com.google.cloud.commerce.consumer.procurement.v1.stub.LicenseManagementServiceStubSettings; import com.google.common.util.concurrent.MoreExecutors; import com.google.protobuf.FieldMask; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service for managing licenses. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * String name = "name3373707"; * LicensePool response = licenseManagementServiceClient.getLicensePool(name); * } * }</pre> * * <p>Note: close() needs to be called on the LicenseManagementServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> GetLicensePool</td> * <td><p> Gets the license pool.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getLicensePool(GetLicensePoolRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> getLicensePool(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getLicensePoolCallable() * </ul> * </td> * </tr> * <tr> * <td><p> UpdateLicensePool</td> * <td><p> Updates the license pool if one exists for this Order.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> updateLicensePool(UpdateLicensePoolRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> updateLicensePool(LicensePool licensePool, FieldMask updateMask) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> updateLicensePoolCallable() * </ul> * </td> * </tr> * <tr> * <td><p> Assign</td> * <td><p> Assigns a license to a user.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> assign(AssignRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> assign(String parent, List&lt;String&gt; usernames) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> assignCallable() * </ul> * </td> * </tr> * <tr> * <td><p> Unassign</td> * <td><p> Unassigns a license from a user.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> unassign(UnassignRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> unassign(String parent, List&lt;String&gt; usernames) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> unassignCallable() * </ul> * </td> * </tr> * <tr> * <td><p> EnumerateLicensedUsers</td> * <td><p> Enumerates all users assigned a license.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> enumerateLicensedUsers(EnumerateLicensedUsersRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> enumerateLicensedUsers(String parent) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> enumerateLicensedUsersPagedCallable() * <li><p> enumerateLicensedUsersCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of * LicenseManagementServiceSettings to create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * LicenseManagementServiceSettings licenseManagementServiceSettings = * LicenseManagementServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create(licenseManagementServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * LicenseManagementServiceSettings licenseManagementServiceSettings = * LicenseManagementServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create(licenseManagementServiceSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * LicenseManagementServiceSettings licenseManagementServiceSettings = * LicenseManagementServiceSettings.newHttpJsonBuilder().build(); * LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create(licenseManagementServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @Generated("by gapic-generator-java") public class LicenseManagementServiceClient implements BackgroundResource { private final LicenseManagementServiceSettings settings; private final LicenseManagementServiceStub stub; /** Constructs an instance of LicenseManagementServiceClient with default settings. */ public static final LicenseManagementServiceClient create() throws IOException { return create(LicenseManagementServiceSettings.newBuilder().build()); } /** * Constructs an instance of LicenseManagementServiceClient, using the given settings. The * channels are created based on the settings passed in, or defaults for any settings that are not * set. */ public static final LicenseManagementServiceClient create( LicenseManagementServiceSettings settings) throws IOException { return new LicenseManagementServiceClient(settings); } /** * Constructs an instance of LicenseManagementServiceClient, using the given stub for making * calls. This is for advanced usage - prefer using create(LicenseManagementServiceSettings). */ public static final LicenseManagementServiceClient create(LicenseManagementServiceStub stub) { return new LicenseManagementServiceClient(stub); } /** * Constructs an instance of LicenseManagementServiceClient, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected LicenseManagementServiceClient(LicenseManagementServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((LicenseManagementServiceStubSettings) settings.getStubSettings()).createStub(); } protected LicenseManagementServiceClient(LicenseManagementServiceStub stub) { this.settings = null; this.stub = stub; } public final LicenseManagementServiceSettings getSettings() { return settings; } public LicenseManagementServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the license pool. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * String name = "name3373707"; * LicensePool response = licenseManagementServiceClient.getLicensePool(name); * } * }</pre> * * @param name Required. The name of the license pool to get. Format: * `billingAccounts/{billing_account}/orders/{order}/licensePool` * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final LicensePool getLicensePool(String name) { GetLicensePoolRequest request = GetLicensePoolRequest.newBuilder().setName(name).build(); return getLicensePool(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the license pool. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * GetLicensePoolRequest request = * GetLicensePoolRequest.newBuilder().setName("name3373707").build(); * LicensePool response = licenseManagementServiceClient.getLicensePool(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final LicensePool getLicensePool(GetLicensePoolRequest request) { return getLicensePoolCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets the license pool. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * GetLicensePoolRequest request = * GetLicensePoolRequest.newBuilder().setName("name3373707").build(); * ApiFuture<LicensePool> future = * licenseManagementServiceClient.getLicensePoolCallable().futureCall(request); * // Do something. * LicensePool response = future.get(); * } * }</pre> */ public final UnaryCallable<GetLicensePoolRequest, LicensePool> getLicensePoolCallable() { return stub.getLicensePoolCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the license pool if one exists for this Order. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * LicensePool licensePool = LicensePool.newBuilder().build(); * FieldMask updateMask = FieldMask.newBuilder().build(); * LicensePool response = * licenseManagementServiceClient.updateLicensePool(licensePool, updateMask); * } * }</pre> * * @param licensePool Required. The license pool to update. * <p>The license pool's name field is used to identify the license pool to update. Format: * `billingAccounts/{billing_account}/orders/{order}/licensePool`. * @param updateMask Required. The list of fields to update. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final LicensePool updateLicensePool(LicensePool licensePool, FieldMask updateMask) { UpdateLicensePoolRequest request = UpdateLicensePoolRequest.newBuilder() .setLicensePool(licensePool) .setUpdateMask(updateMask) .build(); return updateLicensePool(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the license pool if one exists for this Order. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * UpdateLicensePoolRequest request = * UpdateLicensePoolRequest.newBuilder() * .setLicensePool(LicensePool.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * LicensePool response = licenseManagementServiceClient.updateLicensePool(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final LicensePool updateLicensePool(UpdateLicensePoolRequest request) { return updateLicensePoolCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Updates the license pool if one exists for this Order. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * UpdateLicensePoolRequest request = * UpdateLicensePoolRequest.newBuilder() * .setLicensePool(LicensePool.newBuilder().build()) * .setUpdateMask(FieldMask.newBuilder().build()) * .build(); * ApiFuture<LicensePool> future = * licenseManagementServiceClient.updateLicensePoolCallable().futureCall(request); * // Do something. * LicensePool response = future.get(); * } * }</pre> */ public final UnaryCallable<UpdateLicensePoolRequest, LicensePool> updateLicensePoolCallable() { return stub.updateLicensePoolCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Assigns a license to a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * String parent = "parent-995424086"; * List<String> usernames = new ArrayList<>(); * AssignResponse response = licenseManagementServiceClient.assign(parent, usernames); * } * }</pre> * * @param parent Required. License pool name. * @param usernames Required. Username. Format: `name{@literal @}domain.com`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AssignResponse assign(String parent, List<String> usernames) { AssignRequest request = AssignRequest.newBuilder().setParent(parent).addAllUsernames(usernames).build(); return assign(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Assigns a license to a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * AssignRequest request = * AssignRequest.newBuilder() * .setParent("parent-995424086") * .addAllUsernames(new ArrayList<String>()) * .build(); * AssignResponse response = licenseManagementServiceClient.assign(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final AssignResponse assign(AssignRequest request) { return assignCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Assigns a license to a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * AssignRequest request = * AssignRequest.newBuilder() * .setParent("parent-995424086") * .addAllUsernames(new ArrayList<String>()) * .build(); * ApiFuture<AssignResponse> future = * licenseManagementServiceClient.assignCallable().futureCall(request); * // Do something. * AssignResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<AssignRequest, AssignResponse> assignCallable() { return stub.assignCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Unassigns a license from a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * String parent = "parent-995424086"; * List<String> usernames = new ArrayList<>(); * UnassignResponse response = licenseManagementServiceClient.unassign(parent, usernames); * } * }</pre> * * @param parent Required. License pool name. * @param usernames Required. Username. Format: `name{@literal @}domain.com`. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final UnassignResponse unassign(String parent, List<String> usernames) { UnassignRequest request = UnassignRequest.newBuilder().setParent(parent).addAllUsernames(usernames).build(); return unassign(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Unassigns a license from a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * UnassignRequest request = * UnassignRequest.newBuilder() * .setParent("parent-995424086") * .addAllUsernames(new ArrayList<String>()) * .build(); * UnassignResponse response = licenseManagementServiceClient.unassign(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final UnassignResponse unassign(UnassignRequest request) { return unassignCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Unassigns a license from a user. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * UnassignRequest request = * UnassignRequest.newBuilder() * .setParent("parent-995424086") * .addAllUsernames(new ArrayList<String>()) * .build(); * ApiFuture<UnassignResponse> future = * licenseManagementServiceClient.unassignCallable().futureCall(request); * // Do something. * UnassignResponse response = future.get(); * } * }</pre> */ public final UnaryCallable<UnassignRequest, UnassignResponse> unassignCallable() { return stub.unassignCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Enumerates all users assigned a license. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * String parent = "parent-995424086"; * for (LicensedUser element : * licenseManagementServiceClient.enumerateLicensedUsers(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. License pool name. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final EnumerateLicensedUsersPagedResponse enumerateLicensedUsers(String parent) { EnumerateLicensedUsersRequest request = EnumerateLicensedUsersRequest.newBuilder().setParent(parent).build(); return enumerateLicensedUsers(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Enumerates all users assigned a license. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * EnumerateLicensedUsersRequest request = * EnumerateLicensedUsersRequest.newBuilder() * .setParent("parent-995424086") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (LicensedUser element : * licenseManagementServiceClient.enumerateLicensedUsers(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final EnumerateLicensedUsersPagedResponse enumerateLicensedUsers( EnumerateLicensedUsersRequest request) { return enumerateLicensedUsersPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Enumerates all users assigned a license. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * EnumerateLicensedUsersRequest request = * EnumerateLicensedUsersRequest.newBuilder() * .setParent("parent-995424086") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<LicensedUser> future = * licenseManagementServiceClient.enumerateLicensedUsersPagedCallable().futureCall(request); * // Do something. * for (LicensedUser element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<EnumerateLicensedUsersRequest, EnumerateLicensedUsersPagedResponse> enumerateLicensedUsersPagedCallable() { return stub.enumerateLicensedUsersPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Enumerates all users assigned a license. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (LicenseManagementServiceClient licenseManagementServiceClient = * LicenseManagementServiceClient.create()) { * EnumerateLicensedUsersRequest request = * EnumerateLicensedUsersRequest.newBuilder() * .setParent("parent-995424086") * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * EnumerateLicensedUsersResponse response = * licenseManagementServiceClient.enumerateLicensedUsersCallable().call(request); * for (LicensedUser element : response.getLicensedUsersList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse> enumerateLicensedUsersCallable() { return stub.enumerateLicensedUsersCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class EnumerateLicensedUsersPagedResponse extends AbstractPagedListResponse< EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser, EnumerateLicensedUsersPage, EnumerateLicensedUsersFixedSizeCollection> { public static ApiFuture<EnumerateLicensedUsersPagedResponse> createAsync( PageContext<EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser> context, ApiFuture<EnumerateLicensedUsersResponse> futureResponse) { ApiFuture<EnumerateLicensedUsersPage> futurePage = EnumerateLicensedUsersPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new EnumerateLicensedUsersPagedResponse(input), MoreExecutors.directExecutor()); } private EnumerateLicensedUsersPagedResponse(EnumerateLicensedUsersPage page) { super(page, EnumerateLicensedUsersFixedSizeCollection.createEmptyCollection()); } } public static class EnumerateLicensedUsersPage extends AbstractPage< EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser, EnumerateLicensedUsersPage> { private EnumerateLicensedUsersPage( PageContext<EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser> context, EnumerateLicensedUsersResponse response) { super(context, response); } private static EnumerateLicensedUsersPage createEmptyPage() { return new EnumerateLicensedUsersPage(null, null); } @Override protected EnumerateLicensedUsersPage createPage( PageContext<EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser> context, EnumerateLicensedUsersResponse response) { return new EnumerateLicensedUsersPage(context, response); } @Override public ApiFuture<EnumerateLicensedUsersPage> createPageAsync( PageContext<EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser> context, ApiFuture<EnumerateLicensedUsersResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class EnumerateLicensedUsersFixedSizeCollection extends AbstractFixedSizeCollection< EnumerateLicensedUsersRequest, EnumerateLicensedUsersResponse, LicensedUser, EnumerateLicensedUsersPage, EnumerateLicensedUsersFixedSizeCollection> { private EnumerateLicensedUsersFixedSizeCollection( List<EnumerateLicensedUsersPage> pages, int collectionSize) { super(pages, collectionSize); } private static EnumerateLicensedUsersFixedSizeCollection createEmptyCollection() { return new EnumerateLicensedUsersFixedSizeCollection(null, 0); } @Override protected EnumerateLicensedUsersFixedSizeCollection createCollection( List<EnumerateLicensedUsersPage> pages, int collectionSize) { return new EnumerateLicensedUsersFixedSizeCollection(pages, collectionSize); } } }
googleapis/google-cloud-java
37,276
java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/ListBranchRulesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securesourcemanager/v1/secure_source_manager.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securesourcemanager.v1; /** * * * <pre> * ListBranchRulesResponse is the response to listing branchRules. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.ListBranchRulesResponse} */ public final class ListBranchRulesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.ListBranchRulesResponse) ListBranchRulesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListBranchRulesResponse.newBuilder() to construct. private ListBranchRulesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListBranchRulesResponse() { branchRules_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListBranchRulesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListBranchRulesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListBranchRulesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.class, com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.Builder.class); } public static final int BRANCH_RULES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securesourcemanager.v1.BranchRule> branchRules_; /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.securesourcemanager.v1.BranchRule> getBranchRulesList() { return branchRules_; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder> getBranchRulesOrBuilderList() { return branchRules_; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ @java.lang.Override public int getBranchRulesCount() { return branchRules_.size(); } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.BranchRule getBranchRules(int index) { return branchRules_.get(index); } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ @java.lang.Override public com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder getBranchRulesOrBuilder( int index) { return branchRules_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < branchRules_.size(); i++) { output.writeMessage(1, branchRules_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < branchRules_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, branchRules_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse)) { return super.equals(obj); } com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse other = (com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse) obj; if (!getBranchRulesList().equals(other.getBranchRulesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBranchRulesCount() > 0) { hash = (37 * hash) + BRANCH_RULES_FIELD_NUMBER; hash = (53 * hash) + getBranchRulesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * ListBranchRulesResponse is the response to listing branchRules. * </pre> * * Protobuf type {@code google.cloud.securesourcemanager.v1.ListBranchRulesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.ListBranchRulesResponse) com.google.cloud.securesourcemanager.v1.ListBranchRulesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListBranchRulesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListBranchRulesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.class, com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.Builder.class); } // Construct using com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (branchRulesBuilder_ == null) { branchRules_ = java.util.Collections.emptyList(); } else { branchRules_ = null; branchRulesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto .internal_static_google_cloud_securesourcemanager_v1_ListBranchRulesResponse_descriptor; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse getDefaultInstanceForType() { return com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse build() { com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse buildPartial() { com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse result = new com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse result) { if (branchRulesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { branchRules_ = java.util.Collections.unmodifiableList(branchRules_); bitField0_ = (bitField0_ & ~0x00000001); } result.branchRules_ = branchRules_; } else { result.branchRules_ = branchRulesBuilder_.build(); } } private void buildPartial0( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse) { return mergeFrom((com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse other) { if (other == com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse.getDefaultInstance()) return this; if (branchRulesBuilder_ == null) { if (!other.branchRules_.isEmpty()) { if (branchRules_.isEmpty()) { branchRules_ = other.branchRules_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBranchRulesIsMutable(); branchRules_.addAll(other.branchRules_); } onChanged(); } } else { if (!other.branchRules_.isEmpty()) { if (branchRulesBuilder_.isEmpty()) { branchRulesBuilder_.dispose(); branchRulesBuilder_ = null; branchRules_ = other.branchRules_; bitField0_ = (bitField0_ & ~0x00000001); branchRulesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBranchRulesFieldBuilder() : null; } else { branchRulesBuilder_.addAllMessages(other.branchRules_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securesourcemanager.v1.BranchRule m = input.readMessage( com.google.cloud.securesourcemanager.v1.BranchRule.parser(), extensionRegistry); if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); branchRules_.add(m); } else { branchRulesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securesourcemanager.v1.BranchRule> branchRules_ = java.util.Collections.emptyList(); private void ensureBranchRulesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { branchRules_ = new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.BranchRule>( branchRules_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.BranchRule, com.google.cloud.securesourcemanager.v1.BranchRule.Builder, com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder> branchRulesBuilder_; /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.BranchRule> getBranchRulesList() { if (branchRulesBuilder_ == null) { return java.util.Collections.unmodifiableList(branchRules_); } else { return branchRulesBuilder_.getMessageList(); } } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public int getBranchRulesCount() { if (branchRulesBuilder_ == null) { return branchRules_.size(); } else { return branchRulesBuilder_.getCount(); } } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public com.google.cloud.securesourcemanager.v1.BranchRule getBranchRules(int index) { if (branchRulesBuilder_ == null) { return branchRules_.get(index); } else { return branchRulesBuilder_.getMessage(index); } } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder setBranchRules( int index, com.google.cloud.securesourcemanager.v1.BranchRule value) { if (branchRulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBranchRulesIsMutable(); branchRules_.set(index, value); onChanged(); } else { branchRulesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder setBranchRules( int index, com.google.cloud.securesourcemanager.v1.BranchRule.Builder builderForValue) { if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); branchRules_.set(index, builderForValue.build()); onChanged(); } else { branchRulesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder addBranchRules(com.google.cloud.securesourcemanager.v1.BranchRule value) { if (branchRulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBranchRulesIsMutable(); branchRules_.add(value); onChanged(); } else { branchRulesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder addBranchRules( int index, com.google.cloud.securesourcemanager.v1.BranchRule value) { if (branchRulesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBranchRulesIsMutable(); branchRules_.add(index, value); onChanged(); } else { branchRulesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder addBranchRules( com.google.cloud.securesourcemanager.v1.BranchRule.Builder builderForValue) { if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); branchRules_.add(builderForValue.build()); onChanged(); } else { branchRulesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder addBranchRules( int index, com.google.cloud.securesourcemanager.v1.BranchRule.Builder builderForValue) { if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); branchRules_.add(index, builderForValue.build()); onChanged(); } else { branchRulesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder addAllBranchRules( java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.BranchRule> values) { if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, branchRules_); onChanged(); } else { branchRulesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder clearBranchRules() { if (branchRulesBuilder_ == null) { branchRules_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { branchRulesBuilder_.clear(); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public Builder removeBranchRules(int index) { if (branchRulesBuilder_ == null) { ensureBranchRulesIsMutable(); branchRules_.remove(index); onChanged(); } else { branchRulesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public com.google.cloud.securesourcemanager.v1.BranchRule.Builder getBranchRulesBuilder( int index) { return getBranchRulesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder getBranchRulesOrBuilder( int index) { if (branchRulesBuilder_ == null) { return branchRules_.get(index); } else { return branchRulesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public java.util.List<? extends com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder> getBranchRulesOrBuilderList() { if (branchRulesBuilder_ != null) { return branchRulesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(branchRules_); } } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public com.google.cloud.securesourcemanager.v1.BranchRule.Builder addBranchRulesBuilder() { return getBranchRulesFieldBuilder() .addBuilder(com.google.cloud.securesourcemanager.v1.BranchRule.getDefaultInstance()); } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public com.google.cloud.securesourcemanager.v1.BranchRule.Builder addBranchRulesBuilder( int index) { return getBranchRulesFieldBuilder() .addBuilder( index, com.google.cloud.securesourcemanager.v1.BranchRule.getDefaultInstance()); } /** * * * <pre> * The list of branch rules. * </pre> * * <code>repeated .google.cloud.securesourcemanager.v1.BranchRule branch_rules = 1;</code> */ public java.util.List<com.google.cloud.securesourcemanager.v1.BranchRule.Builder> getBranchRulesBuilderList() { return getBranchRulesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.BranchRule, com.google.cloud.securesourcemanager.v1.BranchRule.Builder, com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder> getBranchRulesFieldBuilder() { if (branchRulesBuilder_ == null) { branchRulesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securesourcemanager.v1.BranchRule, com.google.cloud.securesourcemanager.v1.BranchRule.Builder, com.google.cloud.securesourcemanager.v1.BranchRuleOrBuilder>( branchRules_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); branchRules_ = null; } return branchRulesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying a page of results the server should return. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.ListBranchRulesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.ListBranchRulesResponse) private static final com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse(); } public static com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListBranchRulesResponse> PARSER = new com.google.protobuf.AbstractParser<ListBranchRulesResponse>() { @java.lang.Override public ListBranchRulesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListBranchRulesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListBranchRulesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securesourcemanager.v1.ListBranchRulesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,306
java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/ListCatalogsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2beta/catalog_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2beta; /** * * * <pre> * Response for * [CatalogService.ListCatalogs][google.cloud.retail.v2beta.CatalogService.ListCatalogs] * method. * </pre> * * Protobuf type {@code google.cloud.retail.v2beta.ListCatalogsResponse} */ public final class ListCatalogsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.ListCatalogsResponse) ListCatalogsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCatalogsResponse.newBuilder() to construct. private ListCatalogsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCatalogsResponse() { catalogs_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCatalogsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2beta.CatalogServiceProto .internal_static_google_cloud_retail_v2beta_ListCatalogsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2beta.CatalogServiceProto .internal_static_google_cloud_retail_v2beta_ListCatalogsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2beta.ListCatalogsResponse.class, com.google.cloud.retail.v2beta.ListCatalogsResponse.Builder.class); } public static final int CATALOGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.retail.v2beta.Catalog> catalogs_; /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.retail.v2beta.Catalog> getCatalogsList() { return catalogs_; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.retail.v2beta.CatalogOrBuilder> getCatalogsOrBuilderList() { return catalogs_; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ @java.lang.Override public int getCatalogsCount() { return catalogs_.size(); } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ @java.lang.Override public com.google.cloud.retail.v2beta.Catalog getCatalogs(int index) { return catalogs_.get(index); } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ @java.lang.Override public com.google.cloud.retail.v2beta.CatalogOrBuilder getCatalogsOrBuilder(int index) { return catalogs_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < catalogs_.size(); i++) { output.writeMessage(1, catalogs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < catalogs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, catalogs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2beta.ListCatalogsResponse)) { return super.equals(obj); } com.google.cloud.retail.v2beta.ListCatalogsResponse other = (com.google.cloud.retail.v2beta.ListCatalogsResponse) obj; if (!getCatalogsList().equals(other.getCatalogsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCatalogsCount() > 0) { hash = (37 * hash) + CATALOGS_FIELD_NUMBER; hash = (53 * hash) + getCatalogsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.retail.v2beta.ListCatalogsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for * [CatalogService.ListCatalogs][google.cloud.retail.v2beta.CatalogService.ListCatalogs] * method. * </pre> * * Protobuf type {@code google.cloud.retail.v2beta.ListCatalogsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.ListCatalogsResponse) com.google.cloud.retail.v2beta.ListCatalogsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2beta.CatalogServiceProto .internal_static_google_cloud_retail_v2beta_ListCatalogsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2beta.CatalogServiceProto .internal_static_google_cloud_retail_v2beta_ListCatalogsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2beta.ListCatalogsResponse.class, com.google.cloud.retail.v2beta.ListCatalogsResponse.Builder.class); } // Construct using com.google.cloud.retail.v2beta.ListCatalogsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (catalogsBuilder_ == null) { catalogs_ = java.util.Collections.emptyList(); } else { catalogs_ = null; catalogsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2beta.CatalogServiceProto .internal_static_google_cloud_retail_v2beta_ListCatalogsResponse_descriptor; } @java.lang.Override public com.google.cloud.retail.v2beta.ListCatalogsResponse getDefaultInstanceForType() { return com.google.cloud.retail.v2beta.ListCatalogsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2beta.ListCatalogsResponse build() { com.google.cloud.retail.v2beta.ListCatalogsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2beta.ListCatalogsResponse buildPartial() { com.google.cloud.retail.v2beta.ListCatalogsResponse result = new com.google.cloud.retail.v2beta.ListCatalogsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.retail.v2beta.ListCatalogsResponse result) { if (catalogsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { catalogs_ = java.util.Collections.unmodifiableList(catalogs_); bitField0_ = (bitField0_ & ~0x00000001); } result.catalogs_ = catalogs_; } else { result.catalogs_ = catalogsBuilder_.build(); } } private void buildPartial0(com.google.cloud.retail.v2beta.ListCatalogsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2beta.ListCatalogsResponse) { return mergeFrom((com.google.cloud.retail.v2beta.ListCatalogsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.retail.v2beta.ListCatalogsResponse other) { if (other == com.google.cloud.retail.v2beta.ListCatalogsResponse.getDefaultInstance()) return this; if (catalogsBuilder_ == null) { if (!other.catalogs_.isEmpty()) { if (catalogs_.isEmpty()) { catalogs_ = other.catalogs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCatalogsIsMutable(); catalogs_.addAll(other.catalogs_); } onChanged(); } } else { if (!other.catalogs_.isEmpty()) { if (catalogsBuilder_.isEmpty()) { catalogsBuilder_.dispose(); catalogsBuilder_ = null; catalogs_ = other.catalogs_; bitField0_ = (bitField0_ & ~0x00000001); catalogsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCatalogsFieldBuilder() : null; } else { catalogsBuilder_.addAllMessages(other.catalogs_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.retail.v2beta.Catalog m = input.readMessage( com.google.cloud.retail.v2beta.Catalog.parser(), extensionRegistry); if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); catalogs_.add(m); } else { catalogsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.retail.v2beta.Catalog> catalogs_ = java.util.Collections.emptyList(); private void ensureCatalogsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { catalogs_ = new java.util.ArrayList<com.google.cloud.retail.v2beta.Catalog>(catalogs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2beta.Catalog, com.google.cloud.retail.v2beta.Catalog.Builder, com.google.cloud.retail.v2beta.CatalogOrBuilder> catalogsBuilder_; /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public java.util.List<com.google.cloud.retail.v2beta.Catalog> getCatalogsList() { if (catalogsBuilder_ == null) { return java.util.Collections.unmodifiableList(catalogs_); } else { return catalogsBuilder_.getMessageList(); } } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public int getCatalogsCount() { if (catalogsBuilder_ == null) { return catalogs_.size(); } else { return catalogsBuilder_.getCount(); } } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public com.google.cloud.retail.v2beta.Catalog getCatalogs(int index) { if (catalogsBuilder_ == null) { return catalogs_.get(index); } else { return catalogsBuilder_.getMessage(index); } } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder setCatalogs(int index, com.google.cloud.retail.v2beta.Catalog value) { if (catalogsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCatalogsIsMutable(); catalogs_.set(index, value); onChanged(); } else { catalogsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder setCatalogs( int index, com.google.cloud.retail.v2beta.Catalog.Builder builderForValue) { if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); catalogs_.set(index, builderForValue.build()); onChanged(); } else { catalogsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder addCatalogs(com.google.cloud.retail.v2beta.Catalog value) { if (catalogsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCatalogsIsMutable(); catalogs_.add(value); onChanged(); } else { catalogsBuilder_.addMessage(value); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder addCatalogs(int index, com.google.cloud.retail.v2beta.Catalog value) { if (catalogsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCatalogsIsMutable(); catalogs_.add(index, value); onChanged(); } else { catalogsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder addCatalogs(com.google.cloud.retail.v2beta.Catalog.Builder builderForValue) { if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); catalogs_.add(builderForValue.build()); onChanged(); } else { catalogsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder addCatalogs( int index, com.google.cloud.retail.v2beta.Catalog.Builder builderForValue) { if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); catalogs_.add(index, builderForValue.build()); onChanged(); } else { catalogsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder addAllCatalogs( java.lang.Iterable<? extends com.google.cloud.retail.v2beta.Catalog> values) { if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, catalogs_); onChanged(); } else { catalogsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder clearCatalogs() { if (catalogsBuilder_ == null) { catalogs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { catalogsBuilder_.clear(); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public Builder removeCatalogs(int index) { if (catalogsBuilder_ == null) { ensureCatalogsIsMutable(); catalogs_.remove(index); onChanged(); } else { catalogsBuilder_.remove(index); } return this; } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public com.google.cloud.retail.v2beta.Catalog.Builder getCatalogsBuilder(int index) { return getCatalogsFieldBuilder().getBuilder(index); } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public com.google.cloud.retail.v2beta.CatalogOrBuilder getCatalogsOrBuilder(int index) { if (catalogsBuilder_ == null) { return catalogs_.get(index); } else { return catalogsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public java.util.List<? extends com.google.cloud.retail.v2beta.CatalogOrBuilder> getCatalogsOrBuilderList() { if (catalogsBuilder_ != null) { return catalogsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(catalogs_); } } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public com.google.cloud.retail.v2beta.Catalog.Builder addCatalogsBuilder() { return getCatalogsFieldBuilder() .addBuilder(com.google.cloud.retail.v2beta.Catalog.getDefaultInstance()); } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public com.google.cloud.retail.v2beta.Catalog.Builder addCatalogsBuilder(int index) { return getCatalogsFieldBuilder() .addBuilder(index, com.google.cloud.retail.v2beta.Catalog.getDefaultInstance()); } /** * * * <pre> * All the customer's [Catalog][google.cloud.retail.v2beta.Catalog]s. * </pre> * * <code>repeated .google.cloud.retail.v2beta.Catalog catalogs = 1;</code> */ public java.util.List<com.google.cloud.retail.v2beta.Catalog.Builder> getCatalogsBuilderList() { return getCatalogsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2beta.Catalog, com.google.cloud.retail.v2beta.Catalog.Builder, com.google.cloud.retail.v2beta.CatalogOrBuilder> getCatalogsFieldBuilder() { if (catalogsBuilder_ == null) { catalogsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2beta.Catalog, com.google.cloud.retail.v2beta.Catalog.Builder, com.google.cloud.retail.v2beta.CatalogOrBuilder>( catalogs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); catalogs_ = null; } return catalogsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token that can be sent as * [ListCatalogsRequest.page_token][google.cloud.retail.v2beta.ListCatalogsRequest.page_token] * to retrieve the next page. If this field is omitted, there are no * subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.ListCatalogsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.ListCatalogsResponse) private static final com.google.cloud.retail.v2beta.ListCatalogsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.ListCatalogsResponse(); } public static com.google.cloud.retail.v2beta.ListCatalogsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCatalogsResponse> PARSER = new com.google.protobuf.AbstractParser<ListCatalogsResponse>() { @java.lang.Override public ListCatalogsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCatalogsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCatalogsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2beta.ListCatalogsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/olingo-odata4
36,964
lib/server-core/src/test/java/org/apache/olingo/server/core/uri/parser/UriTokenizerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.server.core.uri.parser; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Locale; import org.apache.olingo.server.core.uri.parser.UriTokenizer.TokenKind; import org.junit.Test; public class UriTokenizerTest { @Test public void nullOK() { assertFalse(new UriTokenizer(null).next(null)); assertTrue(new UriTokenizer(null).next(TokenKind.EOF)); } @Test public void constants() { final UriTokenizer tokenizer = new UriTokenizer("$ref"); assertTrue(tokenizer.next(TokenKind.REF)); assertEquals("$ref", tokenizer.getText()); assertTrue(tokenizer.next(TokenKind.EOF)); assertTrue(tokenizer.next(TokenKind.EOF)); assertTrue(new UriTokenizer("$value").next(TokenKind.VALUE)); assertTrue(new UriTokenizer("$count").next(TokenKind.COUNT)); assertTrue(new UriTokenizer("$crossjoin").next(TokenKind.CROSSJOIN)); assertTrue(new UriTokenizer("$root").next(TokenKind.ROOT)); assertTrue(new UriTokenizer("$it").next(TokenKind.IT)); assertTrue(new UriTokenizer("null").next(TokenKind.NULL)); wrongToken(TokenKind.REF, "$ref", 'x'); } @Test public void sequence() { UriTokenizer tokenizer = new UriTokenizer("(A=1,B=2);.*/+-"); assertTrue(tokenizer.next(TokenKind.OPEN)); assertFalse(tokenizer.next(TokenKind.OPEN)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertEquals("A", tokenizer.getText()); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertEquals("1", tokenizer.getText()); assertTrue(tokenizer.next(TokenKind.COMMA)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertEquals("B", tokenizer.getText()); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertEquals("2", tokenizer.getText()); assertFalse(tokenizer.next(TokenKind.EOF)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.DOT)); assertTrue(tokenizer.next(TokenKind.STAR)); assertTrue(tokenizer.next(TokenKind.SLASH)); assertTrue(tokenizer.next(TokenKind.PLUS)); assertTrue(tokenizer.next(TokenKind.MinusOperator)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer = new UriTokenizer("any(a:true) or all(b:false)"); assertTrue(tokenizer.next(TokenKind.ANY)); assertTrue(tokenizer.next(TokenKind.OPEN)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.COLON)); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.OrOperator)); assertTrue(tokenizer.next(TokenKind.ALL)); assertTrue(tokenizer.next(TokenKind.OPEN)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.COLON)); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.EOF)); } @Test public void saveState() { UriTokenizer tokenizer = new UriTokenizer("a*"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); tokenizer.saveState(); assertTrue(tokenizer.next(TokenKind.STAR)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer.returnToSavedState(); assertTrue(tokenizer.next(TokenKind.STAR)); assertTrue(tokenizer.next(TokenKind.EOF)); } @Test public void systemQueryOptions() { UriTokenizer tokenizer = new UriTokenizer("$expand=*;$filter=true;$levels=max;$orderby=false"); assertTrue(tokenizer.next(TokenKind.EXPAND)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.STAR)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.FILTER)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.LEVELS)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.MAX)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.ORDERBY)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer = new UriTokenizer("$search=A;$select=*;$skip=1;$top=2"); assertTrue(tokenizer.next(TokenKind.SEARCH)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.SELECT)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.STAR)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.SKIP)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.SEMI)); assertTrue(tokenizer.next(TokenKind.TOP)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EOF)); } @Test public void identifier() { assertTrue(new UriTokenizer("name").next(TokenKind.ODataIdentifier)); assertTrue(new UriTokenizer("_name").next(TokenKind.ODataIdentifier)); assertFalse(new UriTokenizer("1name").next(TokenKind.ODataIdentifier)); assertFalse(new UriTokenizer("").next(TokenKind.ODataIdentifier)); final String outsideBmpLetter = String.valueOf(Character.toChars(0x10330)); UriTokenizer tokenizer = new UriTokenizer( outsideBmpLetter + "name1\u0300a\u0600b\uFE4F" + outsideBmpLetter + "end\b"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertEquals(outsideBmpLetter + "name1\u0300a\u0600b\uFE4F" + outsideBmpLetter + "end", tokenizer.getText()); // Identifiers consist of up to 128 characters. Check that the identifier does not have more characters. final String name = "Llanfairpwllgwyngyllgogerychwyrndrobwllllantysiliogogogoch"; // Do you know this village? tokenizer = new UriTokenizer(name + '_' + name + "_0123456789X"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertEquals(name + '_' + name + "_0123456789", tokenizer.getText()); tokenizer.next(TokenKind.ODataIdentifier); assertEquals("X", tokenizer.getText()); wrongToken(TokenKind.ODataIdentifier, "_", '.'); wrongToken(TokenKind.ODataIdentifier, "_", ','); } @Test public void qualifiedName() { assertTrue(new UriTokenizer("namespace.name").next(TokenKind.QualifiedName)); final UriTokenizer tokenizer = new UriTokenizer("multi.part.namespace.name.1"); assertTrue(tokenizer.next(TokenKind.QualifiedName)); assertTrue(tokenizer.next(TokenKind.DOT)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EOF)); assertFalse(new UriTokenizer("name").next(TokenKind.QualifiedName)); assertFalse(new UriTokenizer("namespace..name").next(TokenKind.QualifiedName)); assertFalse(new UriTokenizer("").next(TokenKind.QualifiedName)); wrongToken(TokenKind.QualifiedName, "namespace._", ','); } @Test public void alias() { assertTrue(new UriTokenizer("@name").next(TokenKind.ParameterAliasName)); assertTrue(new UriTokenizer("@_name").next(TokenKind.ParameterAliasName)); assertFalse(new UriTokenizer("name").next(TokenKind.ParameterAliasName)); assertFalse(new UriTokenizer("@").next(TokenKind.ParameterAliasName)); assertFalse(new UriTokenizer("@1").next(TokenKind.ParameterAliasName)); } @Test public void booleanValue() { assertTrue(new UriTokenizer("true").next(TokenKind.BooleanValue)); assertTrue(new UriTokenizer("tRuE").next(TokenKind.BooleanValue)); assertTrue(new UriTokenizer("false").next(TokenKind.BooleanValue)); assertTrue(new UriTokenizer("False").next(TokenKind.BooleanValue)); wrongToken(TokenKind.BooleanValue, "true", 'x'); } @Test public void string() { assertTrue(new UriTokenizer("'ABC'").next(TokenKind.StringValue)); assertTrue(new UriTokenizer("'€\uFDFC'").next(TokenKind.StringValue)); assertTrue(new UriTokenizer('\'' + String.valueOf(Character.toChars(0x1F603)) + '\'') .next(TokenKind.StringValue)); final UriTokenizer tokenizer = new UriTokenizer("'AB''''C'''D"); assertTrue(tokenizer.next(TokenKind.StringValue)); assertEquals("'AB''''C'''", tokenizer.getText()); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertEquals("D", tokenizer.getText()); assertFalse(new UriTokenizer("A").next(TokenKind.StringValue)); assertFalse(new UriTokenizer("'A").next(TokenKind.StringValue)); } @Test public void integer() { assertTrue(new UriTokenizer("1").next(TokenKind.IntegerValue)); assertTrue(new UriTokenizer("1.").next(TokenKind.IntegerValue)); assertFalse(new UriTokenizer(".1").next(TokenKind.IntegerValue)); assertTrue(new UriTokenizer("-1").next(TokenKind.IntegerValue)); assertTrue(new UriTokenizer("1234567890").next(TokenKind.IntegerValue)); } @Test public void guid() { assertTrue(new UriTokenizer("12345678-abcd-ef12-1234-567890ABCDEF").next(TokenKind.GuidValue)); wrongToken(TokenKind.GuidValue, "12345678-1234-1234-1234-123456789ABC", 'G'); } @Test public void date() { assertTrue(new UriTokenizer("12345-12-25").next(TokenKind.DateValue)); assertTrue(new UriTokenizer("-0001-12-24").next(TokenKind.DateValue)); assertFalse(new UriTokenizer("1234-13-01").next(TokenKind.DateValue)); assertFalse(new UriTokenizer("1234-12-32").next(TokenKind.DateValue)); assertFalse(new UriTokenizer("123-01-01").next(TokenKind.DateValue)); assertFalse(new UriTokenizer("1234-00-01").next(TokenKind.DateValue)); assertFalse(new UriTokenizer("1234-01-00").next(TokenKind.DateValue)); wrongToken(TokenKind.DateValue, "2000-12-29", 'A'); wrongToken(TokenKind.DateValue, "0001-01-01", 'A'); wrongToken(TokenKind.DateValue, "-12345-01-31", 'A'); } @Test public void dateTimeOffset() { assertTrue(new UriTokenizer("1234-12-25T11:12:13.456Z").next(TokenKind.DateTimeOffsetValue)); assertTrue(new UriTokenizer("-1234-12-25t01:12z").next(TokenKind.DateTimeOffsetValue)); assertTrue(new UriTokenizer("-1234-12-25T21:22:23+01:00").next(TokenKind.DateTimeOffsetValue)); assertTrue(new UriTokenizer("1234-12-25T11:12:13-00:30").next(TokenKind.DateTimeOffsetValue)); assertFalse(new UriTokenizer("1234-10-01").next(TokenKind.DateTimeOffsetValue)); wrongToken(TokenKind.DateTimeOffsetValue, "-1234-12-25T11:12:13.456+01:00", 'P'); } @Test public void timeOfDay() { assertTrue(new UriTokenizer("11:12:13").next(TokenKind.TimeOfDayValue)); assertTrue(new UriTokenizer("11:12:13.456").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("24:00:00").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("01:60:00").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("01:00:60").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("01:00:00.").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("0:02:03").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("01:0:03").next(TokenKind.TimeOfDayValue)); assertFalse(new UriTokenizer("01:02:0").next(TokenKind.TimeOfDayValue)); wrongToken(TokenKind.TimeOfDayValue, "11:12", '-'); } @Test public void decimal() { assertTrue(new UriTokenizer("1.2").next(TokenKind.DecimalValue)); assertFalse(new UriTokenizer(".1").next(TokenKind.DecimalValue)); assertTrue(new UriTokenizer("-12.34").next(TokenKind.DecimalValue)); assertTrue(new UriTokenizer("1234567890.0123456789").next(TokenKind.DecimalValue)); assertFalse(new UriTokenizer("0,1").next(TokenKind.DecimalValue)); assertFalse(new UriTokenizer("0..1").next(TokenKind.DecimalValue)); } @Test public void doubleValue() { assertTrue(new UriTokenizer("NaN").next(TokenKind.DoubleValue)); assertTrue(new UriTokenizer("-INF").next(TokenKind.DoubleValue)); assertTrue(new UriTokenizer("INF").next(TokenKind.DoubleValue)); assertFalse(new UriTokenizer("inf").next(TokenKind.DoubleValue)); assertTrue(new UriTokenizer("1.2E3").next(TokenKind.DoubleValue)); assertTrue(new UriTokenizer("-12.34e-05").next(TokenKind.DoubleValue)); assertTrue(new UriTokenizer("1E2").next(TokenKind.DoubleValue)); assertFalse(new UriTokenizer("1.E2").next(TokenKind.DoubleValue)); wrongToken(TokenKind.DoubleValue, "-12.34E+5", 'i'); } @Test public void duration() { assertTrue(new UriTokenizer("duration'P'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("DURATION'P1D'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'PT'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'PT1H'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'pt1M'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'PT1S'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'PT1.2s'").next(TokenKind.DurationValue)); assertTrue(new UriTokenizer("duration'-p1dt2h3m4.5s'").next(TokenKind.DurationValue)); assertFalse(new UriTokenizer("-p1dt2h3m4.5s").next(TokenKind.DurationValue)); assertFalse(new UriTokenizer("duration'-p1dt2h3m4.5s").next(TokenKind.DurationValue)); assertFalse(new UriTokenizer("duration'2h3m4s'").next(TokenKind.DurationValue)); wrongToken(TokenKind.DurationValue, "duration'P1DT2H3M4.5S'", ':'); } @Test public void binary() { assertTrue(new UriTokenizer("binary''").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("Binary'bm93'").next(TokenKind.BinaryValue)); // all cases with three base64 characters (and one fill character) at the end assertTrue(new UriTokenizer("binary'QUA='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUE='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUI='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUM='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUQ='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUU='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUY='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUc='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUg='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUk='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUo='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUs='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QUw='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QU0='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QU4='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'QU8='").next(TokenKind.BinaryValue)); assertFalse(new UriTokenizer("binary'QUB='").next(TokenKind.BinaryValue)); // all cases with two base64 characters (and two fill characters) at the end assertTrue(new UriTokenizer("BINARY'VGVzdA=='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'U-RnZQ=='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'Yg=='").next(TokenKind.BinaryValue)); assertTrue(new UriTokenizer("binary'Yw=='").next(TokenKind.BinaryValue)); // without optional fill character assertTrue(new UriTokenizer("binary'T0RhdGE'").next(TokenKind.BinaryValue)); // special character '_' (the other, '-', already has been used above) assertTrue(new UriTokenizer("binary'V_ZydGVy'").next(TokenKind.BinaryValue)); wrongToken(TokenKind.BinaryValue, "binary'VGVzdA=='", '+'); } @Test public void enumValue() { assertTrue(new UriTokenizer("namespace.name'value'").next(TokenKind.EnumValue)); assertTrue(new UriTokenizer("namespace.name'flag1,flag2,-3'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("namespace.name'1flag'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("namespace.name'flag1,,flag2'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("namespace.name',value'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("namespace.name'value,'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("namespace.name''").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("'1'").next(TokenKind.EnumValue)); assertFalse(new UriTokenizer("1").next(TokenKind.EnumValue)); wrongToken(TokenKind.EnumValue, "namespace.name'_1,_2,3'", ';'); } @Test public void json() { // Empty string or JSON values are not allowed. assertFalse(new UriTokenizer("").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("1").next(TokenKind.jsonArrayOrObject)); // object with values assertTrue(new UriTokenizer("{}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":0}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":true}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":false}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":null}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":\"value\"}").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":\"value\",\"name2\":null}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\"}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{0}").next(TokenKind.jsonArrayOrObject)); // array with values assertTrue(new UriTokenizer("[]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[1]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[true]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[false]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[null]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[\"value\"]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[\"\"]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[\"\\b\\t\\f\\r\\nn\\/\\\\x\\uFE4Fu\\\"\"]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[1,2.0,3.4E5]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("[\"value\",null]").next(TokenKind.jsonArrayOrObject)); // nesting assertTrue(new UriTokenizer("[{\"name\":\"value\"},{\"name\":\"value2\"}]").next(TokenKind.jsonArrayOrObject)); assertTrue(new UriTokenizer("{\"name\":{\"name2\":\"value\"}}").next(TokenKind.jsonArrayOrObject)); // unbalanced opening and closing assertFalse(new UriTokenizer("{").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{]").next(TokenKind.jsonArrayOrObject)); // missing values assertFalse(new UriTokenizer("[1,]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[,1]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[1,,2]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[1,x]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[+\"x\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":1,}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{,\"name\":1}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":1,,\"name2\":2}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":1,x}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":1,\"name2\"}").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("{\"name\":1,\"name2\":}").next(TokenKind.jsonArrayOrObject)); // wrong JSON strings assertFalse(new UriTokenizer("[\"a").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"a]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"a\"\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"\\x\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"\\ux\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"\\u1\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"\\u12x\"]").next(TokenKind.jsonArrayOrObject)); assertFalse(new UriTokenizer("[\"\\u123x\"]").next(TokenKind.jsonArrayOrObject)); wrongToken(TokenKind.jsonArrayOrObject, "[{\"name\":+123.456},null]", '\\'); } @Test public void operators() { UriTokenizer tokenizer = new UriTokenizer("1 ne 2"); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertFalse(tokenizer.next(TokenKind.EqualsOperator)); assertTrue(tokenizer.next(TokenKind.NotEqualsOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer = new UriTokenizer("-1ne 2"); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertFalse(tokenizer.next(TokenKind.NotEqualsOperator)); tokenizer = new UriTokenizer("1 ne-2"); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertFalse(tokenizer.next(TokenKind.NotEqualsOperator)); tokenizer = new UriTokenizer("1 \tle\t\t\t2"); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.LessThanOrEqualsOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EOF)); assertTrue(new UriTokenizer("-x").next(TokenKind.MinusOperator)); assertFalse(new UriTokenizer("-1").next(TokenKind.MinusOperator)); assertFalse(new UriTokenizer("-INF").next(TokenKind.MinusOperator)); assertFalse(new UriTokenizer("+").next(TokenKind.MinusOperator)); assertFalse(new UriTokenizer("nottrue").next(TokenKind.NotOperator)); assertFalse(new UriTokenizer("no true").next(TokenKind.NotOperator)); tokenizer = new UriTokenizer("true or not false and 1 eq 2 add 3 sub 4 mul 5 div 6 mod 7"); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.OrOperator)); assertTrue(tokenizer.next(TokenKind.NotOperator)); assertTrue(tokenizer.next(TokenKind.BooleanValue)); assertTrue(tokenizer.next(TokenKind.AndOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EqualsOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.AddOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.SubOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.MulOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.DivOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.ModOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer = new UriTokenizer("1 gt 2 or 3 ge 4 or 5 lt 6 or 7 has namespace.name'flag1,flag2'"); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.GreaterThanOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.OrOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.GreaterThanOrEqualsOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.OrOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.LessThanOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.OrOperator)); assertTrue(tokenizer.next(TokenKind.IntegerValue)); assertTrue(tokenizer.next(TokenKind.HasOperator)); assertTrue(tokenizer.next(TokenKind.EnumValue)); assertTrue(tokenizer.next(TokenKind.EOF)); } @Test public void methods() { UriTokenizer tokenizer = new UriTokenizer("now()"); assertTrue(tokenizer.next(TokenKind.NowMethod)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.EOF)); assertFalse(new UriTokenizer("no w()").next(TokenKind.NowMethod)); assertFalse(new UriTokenizer("now ()").next(TokenKind.NowMethod)); assertTrue(new UriTokenizer("maxdatetime()").next(TokenKind.MaxdatetimeMethod)); assertTrue(new UriTokenizer("mindatetime()").next(TokenKind.MindatetimeMethod)); for (final TokenKind tokenKind : TokenKind.values()) { if (tokenKind.name().endsWith("Method")) { assertTrue(tokenKind.name(), new UriTokenizer( tokenKind.name().substring(0, tokenKind.name().indexOf("Method")) .toLowerCase(Locale.ROOT).replace("geo", "geo.") + '(') .next(tokenKind)); } } } @Test public void suffixes() { UriTokenizer tokenizer = new UriTokenizer("p1 asc,p2 desc"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.AscSuffix)); assertTrue(tokenizer.next(TokenKind.COMMA)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.DescSuffix)); assertTrue(tokenizer.next(TokenKind.EOF)); wrongToken(TokenKind.DescSuffix, " desc", 'D'); } @Test public void search() { UriTokenizer tokenizer = new UriTokenizer("a AND b OR NOT \"c\" d"); assertTrue(tokenizer.next(TokenKind.Word)); assertTrue(tokenizer.next(TokenKind.AndOperatorSearch)); assertTrue(tokenizer.next(TokenKind.Word)); assertFalse(tokenizer.next(TokenKind.AndOperatorSearch)); assertTrue(tokenizer.next(TokenKind.OrOperatorSearch)); assertTrue(tokenizer.next(TokenKind.NotOperatorSearch)); assertTrue(tokenizer.next(TokenKind.Phrase)); assertTrue(tokenizer.next(TokenKind.AndOperatorSearch)); assertTrue(tokenizer.next(TokenKind.Word)); assertFalse(tokenizer.next(TokenKind.AndOperatorSearch)); assertFalse(tokenizer.next(TokenKind.Word)); assertFalse(tokenizer.next(TokenKind.Phrase)); assertTrue(tokenizer.next(TokenKind.EOF)); assertTrue(new UriTokenizer("\"a\\\\x\\\"\"").next(TokenKind.Phrase)); assertFalse(new UriTokenizer("\"a\\\"").next(TokenKind.Phrase)); assertFalse(new UriTokenizer("\"a\\x\"").next(TokenKind.Phrase)); wrongToken(TokenKind.Phrase, "\"a\"", '\\'); final String outsideBmpLetter = String.valueOf(Character.toChars(0x10330)); assertTrue(new UriTokenizer("\"" + outsideBmpLetter + "\"").next(TokenKind.Phrase)); assertTrue(new UriTokenizer(outsideBmpLetter).next(TokenKind.Word)); assertFalse(new UriTokenizer("1").next(TokenKind.Word)); assertFalse(new UriTokenizer("AND").next(TokenKind.Word)); assertFalse(new UriTokenizer("OR").next(TokenKind.Word)); assertFalse(new UriTokenizer("NOT").next(TokenKind.Word)); } @Test public void geoPoint() { assertTrue(new UriTokenizer("geography'SRID=4326;Point(1.23 4.56)'").next(TokenKind.GeographyPoint)); assertTrue(new UriTokenizer("GeOgRaPhY'SrId=4326;pOiNt(1 2)'").next(TokenKind.GeographyPoint)); assertTrue(new UriTokenizer("geography'srid=4326;point(1.2E3 4.5E-6)'").next(TokenKind.GeographyPoint)); wrongToken(TokenKind.GeographyPoint, "geography'SRID=4326;Point(1.23 4.56)'", 'x'); assertTrue(new UriTokenizer("geometry'SRID=0;Point(1 2)'").next(TokenKind.GeometryPoint)); assertFalse(new UriTokenizer("geometry'SRID=123456;Point(1 2)'").next(TokenKind.GeometryPoint)); assertFalse(new UriTokenizer("geometry'SRID=123456;Point(1)'").next(TokenKind.GeometryPoint)); wrongToken(TokenKind.GeometryPoint, "geometry'SRID=0;Point(1.23 4.56)'", ','); } @Test public void geoLineString() { assertTrue(new UriTokenizer("geography'SRID=4326;LineString(1.23 4.56,7 8)'") .next(TokenKind.GeographyLineString)); wrongToken(TokenKind.GeographyLineString, "geography'SRID=4326;LineString(1.23 4.56,7 8)'", '{'); assertTrue(new UriTokenizer("geometry'SRID=0;LineString(1 2,3 4,5 6,7 8)'") .next(TokenKind.GeometryLineString)); wrongToken(TokenKind.GeometryLineString, "geometry'SRID=0;LineString(1 2,3 4,5 6,7 8)'", '.'); } @Test public void geoPolygon() { assertTrue(new UriTokenizer("geography'SRID=4326;Polygon((0 0,1 0,0 1,0 0))'").next(TokenKind.GeographyPolygon)); assertTrue(new UriTokenizer("geometry'SRID=0;Polygon((0 0,1 0,0 1,0 0))'").next(TokenKind.GeometryPolygon)); assertTrue(new UriTokenizer("geometry'SRID=0;Polygon((1 1,2 1,2 2,1 2,1 1),(0 0,4 0,4 4,0 4,0 0))'") .next(TokenKind.GeometryPolygon)); assertTrue(new UriTokenizer( "geometry'SRID=0;Polygon((0 0,1 1,2 2,0 0),(1 1,2 1,2 2,1 2,1 1),(0 0,4 0,4 4,0 4,0 0))'") .next(TokenKind.GeometryPolygon)); wrongToken(TokenKind.GeometryPolygon, "geometry'SRID=0;Polygon((0 0,4 0,4 4,0 4,0 0),(1 1,2 1,2 2,1 2,1 1))'", 'x'); } @Test public void geoMultiPoint() { assertTrue(new UriTokenizer("geography'SRID=4326;MultiPoint()'").next(TokenKind.GeographyMultiPoint)); assertTrue(new UriTokenizer("geography'SRID=4326;MultiPoint((0 0))'").next(TokenKind.GeographyMultiPoint)); assertTrue(new UriTokenizer("geometry'SRID=0;MultiPoint((0 0),(1 1))'").next(TokenKind.GeometryMultiPoint)); wrongToken(TokenKind.GeometryMultiPoint, "geometry'SRID=0;MultiPoint((0 0),(1 1),(2.3 4.5))'", 'x'); } @Test public void geoMultiLineString() { assertTrue(new UriTokenizer("geography'SRID=4326;MultiLineString()'").next(TokenKind.GeographyMultiLineString)); assertTrue(new UriTokenizer("geography'SRID=4326;MultiLineString((1 2,3 4))'") .next(TokenKind.GeographyMultiLineString)); wrongToken(TokenKind.GeometryMultiLineString, "geometry'SRID=0;MultiLineString((1.23 4.56,7 8),(0 0,1 1),(2 2,3 3))'", '}'); } @Test public void geoMultiPolygon() { assertTrue(new UriTokenizer("geography'SRID=4326;MultiPolygon()'").next(TokenKind.GeographyMultiPolygon)); assertTrue(new UriTokenizer("geography'SRID=4326;MultiPolygon(((0 0,1 0,0 1,0 0)))'") .next(TokenKind.GeographyMultiPolygon)); wrongToken(TokenKind.GeometryMultiPolygon, "geometry'SRID=0;MultiPolygon(((0 0,4 0,4 4,0 4,0 0),(1 1,2 1,2 2,1 2,1 1))," + "((0 0,40 0,40 40,0 40,0 0),(10 10,20 10,20 20,10 20,10 10)))'", 'x'); } @Test public void geoCollection() { assertTrue(new UriTokenizer("geography'SRID=4326;Collection(Point(1 2))'").next(TokenKind.GeographyCollection)); assertTrue(new UriTokenizer("geography'SRID=4326;Collection(Collection(Point(1 2),Point(3 4)))'") .next(TokenKind.GeographyCollection)); assertTrue(new UriTokenizer("geography'SRID=4326;Collection(LineString(1 2,3 4))'") .next(TokenKind.GeographyCollection)); assertTrue(new UriTokenizer("geography'SRID=4326;Collection(Polygon((0 0,1 0,0 1,0 0)))'") .next(TokenKind.GeographyCollection)); assertTrue(new UriTokenizer("geography'SRID=4326;Collection(MultiPoint(),MultiLineString(),MultiPolygon())'") .next(TokenKind.GeographyCollection)); wrongToken(TokenKind.GeometryCollection, "geometry'SRID=0;Collection(Point(1 2),Point(3 4))'", 'x'); } @Test public void aggregation() { UriTokenizer tokenizer = new UriTokenizer("$apply=aggregate(a with sum as s from x with average)"); assertTrue(tokenizer.next(TokenKind.APPLY)); assertTrue(tokenizer.next(TokenKind.EQ)); assertTrue(tokenizer.next(TokenKind.AggregateTrafo)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.WithOperator)); assertTrue(tokenizer.next(TokenKind.SUM)); assertTrue(tokenizer.next(TokenKind.AsOperator)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.FromOperator)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.WithOperator)); assertTrue(tokenizer.next(TokenKind.AVERAGE)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.EOF)); tokenizer = new UriTokenizer("a with min as m"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.WithOperator)); assertTrue(tokenizer.next(TokenKind.MIN)); tokenizer = new UriTokenizer("a with countdistinct as c"); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.WithOperator)); assertTrue(tokenizer.next(TokenKind.COUNTDISTINCT)); assertTrue(new UriTokenizer("identity").next(TokenKind.IDENTITY)); assertTrue(new UriTokenizer("bottomcount(1,x)").next(TokenKind.BottomCountTrafo)); assertTrue(new UriTokenizer("bottompercent(1,x)").next(TokenKind.BottomPercentTrafo)); assertTrue(new UriTokenizer("bottomsum(1,x)").next(TokenKind.BottomSumTrafo)); assertTrue(new UriTokenizer("topcount(1,x)").next(TokenKind.TopCountTrafo)); assertTrue(new UriTokenizer("toppercent(1,x)").next(TokenKind.TopPercentTrafo)); assertTrue(new UriTokenizer("topsum(1,x)").next(TokenKind.TopSumTrafo)); assertTrue(new UriTokenizer("compute(a mul b as m)").next(TokenKind.ComputeTrafo)); assertTrue(new UriTokenizer("search(a)").next(TokenKind.SearchTrafo)); assertTrue(new UriTokenizer("expand(a)").next(TokenKind.ExpandTrafo)); assertTrue(new UriTokenizer("filter(true)").next(TokenKind.FilterTrafo)); tokenizer = new UriTokenizer("groupby((rollup($all,x,y)))"); assertTrue(tokenizer.next(TokenKind.GroupByTrafo)); assertTrue(tokenizer.next(TokenKind.OPEN)); assertTrue(tokenizer.next(TokenKind.RollUpSpec)); assertTrue(tokenizer.next(TokenKind.ROLLUP_ALL)); assertTrue(tokenizer.next(TokenKind.COMMA)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.COMMA)); assertTrue(tokenizer.next(TokenKind.ODataIdentifier)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.CLOSE)); assertTrue(tokenizer.next(TokenKind.EOF)); assertTrue(new UriTokenizer("isdefined(x)").next(TokenKind.IsDefinedMethod)); } private void wrongToken(final TokenKind kind, final String value, final char disturbCharacter) { assertFalse(new UriTokenizer(disturbCharacter + value).next(kind)); final UriTokenizer tokenizer = new UriTokenizer(value + disturbCharacter); assertTrue(tokenizer.next(kind)); assertEquals(value, tokenizer.getText()); assertFalse(tokenizer.next(TokenKind.EOF)); // Place the disturbing character at every position in the value string // and check that this leads to a failed token recognition. for (int index = 0; index < value.length(); index++) { assertFalse("Error at index " + index, new UriTokenizer(value.substring(0, index) + disturbCharacter + value.substring(index + 1)).next(kind)); } } }
apache/derby
34,319
java/org.apache.derby.engine/org/apache/derby/impl/sql/execute/TableScanResultSet.java
/* Derby - Class org.apache.derby.impl.sql.execute.TableScanResultSet Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.execute; import java.util.Properties; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.shared.common.reference.SQLState; import org.apache.derby.shared.common.i18n.MessageService; import org.apache.derby.iapi.services.loader.GeneratedMethod; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.sql.execute.CursorResultSet; import org.apache.derby.iapi.sql.execute.ExecIndexRow; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.sql.execute.NoPutResultSet; import org.apache.derby.iapi.store.access.BackingStoreHashtable; import org.apache.derby.iapi.store.access.ConglomerateController; import org.apache.derby.iapi.store.access.DynamicCompiledOpenConglomInfo; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.store.access.ScanController; import org.apache.derby.iapi.store.access.StaticCompiledOpenConglomInfo; import org.apache.derby.iapi.store.access.TransactionController; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.RowLocation; /** * Takes a table and a table filter and returns * the table's rows satisfying the filter as a result set. * * There are several things we could do during object * construction that are done in the open and next calls, to * improve performance. * */ class TableScanResultSet extends ScanResultSet implements CursorResultSet, Cloneable { protected ScanController scanController; protected boolean scanControllerOpened; protected boolean isKeyed; protected boolean firstScan = true; protected ExecIndexRow startPosition; protected ExecIndexRow stopPosition; // set in constructor and not altered during // life of object. protected long conglomId; protected DynamicCompiledOpenConglomInfo dcoci; protected StaticCompiledOpenConglomInfo scoci; protected GeneratedMethod startKeyGetter; protected int startSearchOperator; protected GeneratedMethod stopKeyGetter; protected int stopSearchOperator; public Qualifier[][] qualifiers; public String userSuppliedOptimizerOverrides; protected boolean runTimeStatisticsOn; protected int[] indexCols; //index keys base column position array public int rowsPerRead; public boolean forUpdate; final boolean sameStartStopPosition; protected boolean nextDone; private RowLocation rlTemplate; // Run time statistics private Properties scanProperties; public String startPositionString; public String stopPositionString; public boolean isConstraint; public boolean coarserLock; public boolean oneRowScan; protected long rowsThisScan; private long estimatedRowCount; /** * This field is used by beetle 3865, updateable cursor using index. It * is a hash table containing updated rows that are thrown into future * direction of the index scan, and as a result we'll hit it again but * should skip it. The hash table will spill to disk if it grows too big * to be kept in memory. */ protected BackingStoreHashtable past2FutureTbl; // For Scrollable insensitive updatable result sets, only qualify a row the // first time it's been read, since an update can change a row so that it // no longer qualifies protected boolean qualify; // currentRowIsValid is set to the result of positioning at a rowLocation. // It will be true if the positioning was successful and false if the row // was deleted under our feet. Whenenver currentRowIsValid is false it means // that the row has been deleted. protected boolean currentRowIsValid; // Indicates whether the scan has been positioned back to a previously read // row, or it is accessing a row for the first time. protected boolean scanRepositioned; // // class interface // TableScanResultSet(long conglomId, StaticCompiledOpenConglomInfo scoci, Activation activation, int resultRowTemplate, int resultSetNumber, GeneratedMethod startKeyGetter, int startSearchOperator, GeneratedMethod stopKeyGetter, int stopSearchOperator, boolean sameStartStopPosition, Qualifier[][] qualifiers, String tableName, String userSuppliedOptimizerOverrides, String indexName, boolean isConstraint, boolean forUpdate, int colRefItem, int indexColItem, int lockMode, boolean tableLocked, int isolationLevel, int rowsPerRead, boolean oneRowScan, double optimizerEstimatedRowCount, double optimizerEstimatedCost) throws StandardException { super(activation, resultSetNumber, resultRowTemplate, lockMode, tableLocked, isolationLevel, colRefItem, optimizerEstimatedRowCount, optimizerEstimatedCost); this.conglomId = conglomId; /* Static info created at compile time and can be shared across * instances of the plan. * Dynamic info created on 1st opening of this ResultSet as * it cannot be shared. */ this.scoci = scoci; if (SanityManager.DEBUG) { SanityManager.ASSERT( activation!=null, "table scan must get activation context"); if (sameStartStopPosition) { SanityManager.ASSERT(stopKeyGetter == null, "stopKeyGetter expected to be null when sameStartStopPosition is true"); } } this.startKeyGetter = startKeyGetter; this.startSearchOperator = startSearchOperator; this.stopKeyGetter = stopKeyGetter; this.stopSearchOperator = stopSearchOperator; this.sameStartStopPosition = sameStartStopPosition; this.qualifiers = qualifiers; this.tableName = tableName; this.userSuppliedOptimizerOverrides = userSuppliedOptimizerOverrides; this.indexName = indexName; this.isConstraint = isConstraint; this.forUpdate = forUpdate; this.rowsPerRead = rowsPerRead; this.oneRowScan = oneRowScan; if (indexColItem != -1) { this.indexCols = (int[])(activation.getPreparedStatement(). getSavedObject(indexColItem)); } if (indexCols != null) activation.setForUpdateIndexScan(this); runTimeStatisticsOn = (activation != null && activation.getLanguageConnectionContext().getRunTimeStatisticsMode()); /* Always qualify the first time a row is being read */ qualify = true; currentRowIsValid = false; scanRepositioned = false; recordConstructorTime(); } // // ResultSet interface (leftover from NoPutResultSet) // /** * open a scan on the table. scan parameters are evaluated * at each open, so there is probably some way of altering * their values... * * @exception StandardException thrown on failure to open */ public void openCore() throws StandardException { if (SanityManager.DEBUG) SanityManager.ASSERT( ! isOpen, "TableScanResultSet already open"); // Get the current transaction controller TransactionController tc = activation.getTransactionController(); initIsolationLevel(); if (dcoci == null) dcoci = tc.getDynamicCompiledConglomInfo(conglomId); initStartAndStopKey(); /* NOTE: We always open the ScanController on the 1st open * to do the keyed conglomerate check. */ // Determine whether the conglomerate is keyed. This determines // how we find the RowLocation for the base heap. For non-keyed // conglomerates, we ask the scan. For keyed conglomerates, it // is the last column in the row. // // Do this here, rather than in the constructor, so we can avoid // throwing exceptions from the constructor if (firstScan) { openScanController(tc); isKeyed = scanController.isKeyed(); /* ** If scan tracing is turned on, print information about this ** TableScanResultSet when it is first opened. We would like ** to do this when it is constructed, but it is not always ** possible to get the start and stop positioners at the time ** this object is constructed (because they may depend on outer ** rows). */ if (SanityManager.DEBUG) { if (SanityManager.DEBUG_ON("ScanTrace")) { //traceScanParameters(); } } } // Check whether there are any comparisons with unordered nulls // on either the start or stop position. If there are, we can // (and must) skip the scan, because no rows can qualify if (skipScan(startPosition, stopPosition)) { scanControllerOpened = false; } /* NOTE: We always open the ScanController on the 1st open * to do the keyed conglomerate check, so we only need to * do it here if not the 1st scan. */ else if (! firstScan) { openScanController(tc); } /* If the scan is on an index and opened for update, * then we cache the scan controller and conglomerate * number in the activation so that the scan controller * can be re-used by the update/delete if the index * that we are scanning also needs to be updated. */ if (forUpdate && isKeyed) { activation.setIndexScanController(scanController); activation.setIndexConglomerateNumber(conglomId); } firstScan = false; isOpen = true; numOpens++; nextDone = false; openTime += getElapsedMillis(beginTime); } /** * Initialize the {@code startPosition} and {@code stopPosition} fields * which are used to limit the rows returned by the scan. */ void initStartAndStopKey() throws StandardException { if (startKeyGetter != null) { startPosition = (ExecIndexRow) startKeyGetter.invoke(activation); if (sameStartStopPosition) { stopPosition = startPosition; } } if (stopKeyGetter != null) { stopPosition = (ExecIndexRow) stopKeyGetter.invoke(activation); } } /* ** Open the scan controller ** ** @param transaction controller will open one if null */ protected void openScanController(TransactionController tc) throws StandardException { DataValueDescriptor[] startPositionRow = startPosition == null ? null : startPosition.getRowArray(); DataValueDescriptor[] stopPositionRow = stopPosition == null ? null : stopPosition.getRowArray(); // Clear the Qualifiers's Orderable cache if (qualifiers != null) { clearOrderableCache(qualifiers); } // Get the current transaction controller if (tc == null) tc = activation.getTransactionController(); int openMode = 0; if (forUpdate) { openMode = TransactionController.OPENMODE_FORUPDATE; if (activation.isCursorActivation()) openMode |= TransactionController.OPENMODE_USE_UPDATE_LOCKS; } scanController = tc.openCompiledScan( activation.getResultSetHoldability(), openMode, lockMode, isolationLevel, accessedCols, startPositionRow, // not used when giving null start position startSearchOperator, qualifiers, stopPositionRow, // not used when giving null stop position stopSearchOperator, scoci, dcoci); /* Remember that we opened the scan */ scanControllerOpened = true; rowsThisScan = 0; /* ** Inform the activation of the estimated number of rows. Only ** do it here, not in reopen, so that we don't do this costly ** check too often. */ estimatedRowCount = scanController.getEstimatedRowCount(); activation.informOfRowCount( this, scanController.getEstimatedRowCount() ); } /* ** reopen the scan controller */ protected void reopenScanController() throws StandardException { DataValueDescriptor[] startPositionRow = startPosition == null ? null : startPosition.getRowArray(); DataValueDescriptor[] stopPositionRow = stopPosition == null ? null : stopPosition.getRowArray(); rowsThisScan = 0; // Clear the Qualifiers's Orderable cache if (qualifiers != null) { clearOrderableCache(qualifiers); } scanController.reopenScan( startPositionRow, startSearchOperator, qualifiers, stopPositionRow, stopSearchOperator); /* Remember that we opened the scan */ scanControllerOpened = true; } /** * Reopen a table scan. Here we take advantage * of the reopenScan() interface on scanController * for optimimal performance on joins where we are * an inner table. * * @exception StandardException thrown on failure to open */ public void reopenCore() throws StandardException { beginTime = getCurrentTimeMillis(); if (SanityManager.DEBUG) SanityManager.ASSERT(isOpen, "TableScanResultSet not open, cannot reopen"); initStartAndStopKey(); // Check whether there are any comparisons with unordered nulls // on either the start or stop position. If there are, we can // (and must) skip the scan, because no rows can qualify if (skipScan(startPosition, stopPosition)) { scanControllerOpened = false; } else { if (scanController == null) openScanController((TransactionController)null); else reopenScanController(); } numOpens++; nextDone = false; openTime += getElapsedMillis(beginTime); } boolean loopControl(boolean moreRows) throws StandardException { return scanController.fetchNext(candidate.getRowArray()); } /** * Return the next row (if any) from the scan (if open). * * @exception StandardException thrown on failure to get next row */ public ExecRow getNextRowCore() throws StandardException { if( isXplainOnlyMode() ) return null; checkCancellationFlag(); if (currentRow == null || scanRepositioned) { currentRow = getCompactRow(candidate, accessedCols, isKeyed); } beginTime = getCurrentTimeMillis(); ExecRow result = null; if ( isOpen && !nextDone) { /* Only need to do 1 next per scan * for 1 row scans. */ nextDone = oneRowScan; if (scanControllerOpened) { boolean moreRows = true; while (true) { // loop control overriden by subclass // ValidateCheckConstraintResultSet.. if (! (moreRows = loopControl(moreRows))) { break; } rowsSeen++; rowsThisScan++; /* ** Skip rows where there are start or stop positioners ** that do not implement ordered null semantics and ** there are columns in those positions that contain ** null. ** No need to check if start and stop positions are the ** same, since all predicates in both will be ='s, ** and hence evaluated in the store. */ if ((! sameStartStopPosition) && skipRow(candidate)) { rowsFiltered++; continue; } /* beetle 3865, updateable cursor use index. If we have a hash table that * holds updated records, and we hit it again, skip it, and remove it from * hash since we can't hit it again, and we have a space in hash, so can * stop scanning forward. */ if (past2FutureTbl != null) { RowLocation rowLoc = (RowLocation) currentRow.getColumn(currentRow.nColumns()); if (past2FutureTbl.remove(rowLoc) != null){ continue; } } result = currentRow; break; } /* ** If we just finished a full scan of the heap, update ** the number of rows in the scan controller. ** ** NOTE: It would be more efficient to only update the ** scan controller if the optimizer's estimated number of ** rows were wrong by more than some threshold (like 10%). ** This would require a little more work than I have the ** time for now, however, as the row estimate that is given ** to this result set is the total number of rows for all ** scans, not the number of rows per scan. */ if (! moreRows) { setRowCountIfPossible(rowsThisScan); currentRow = null; } } } setCurrentRow(result); currentRowIsValid = true; scanRepositioned = false; qualify = true; nextTime += getElapsedMillis(beginTime); return result; } /** * If the result set has been opened, * close the open scan. * @exception StandardException on error */ public void close() throws StandardException { beginTime = getCurrentTimeMillis(); if ( isOpen ) { /* ** If scan tracing is turned on, print information about this ** TableScanResultSet when it is closed. */ if (SanityManager.DEBUG) { if (SanityManager.DEBUG_ON("ScanTrace")) { //traceClose(); } } // we don't want to keep around a pointer to the // row ... so it can be thrown away. // REVISIT: does this need to be in a finally // block, to ensure that it is executed? clearCurrentRow(); ; if (scanController != null) { // This is where we get the positioner info for inner tables if (runTimeStatisticsOn) { // This is where we get the scan properties for a subquery scanProperties = getScanProperties(); startPositionString = printStartPosition(); stopPositionString = printStopPosition(); } scanController.close(); scanController = null; // should not access after close // Updatable index scans are cached in the activation for // easy access from IndexChanger. Remember to clear the cached // info here, but only if this is the result set that cached // it in the first place (DERBY-4585). if (forUpdate && isKeyed) { activation.clearIndexScanInfo(); } } scanControllerOpened = false; startPosition = null; stopPosition = null; super.close(); if (indexCols != null) { ConglomerateController borrowedBaseCC = activation.getHeapConglomerateController(); if (borrowedBaseCC != null) { borrowedBaseCC.close(); activation.clearHeapConglomerateController(); } } if (past2FutureTbl != null) { past2FutureTbl.close(); past2FutureTbl = null; } } else if (SanityManager.DEBUG) SanityManager.DEBUG("CloseRepeatInfo","Close of TableScanResultSet repeated"); closeTime += getElapsedMillis(beginTime); } /** * Return the total amount of time spent in this ResultSet * * @param type CURRENT_RESULTSET_ONLY - time spent only in this ResultSet * ENTIRE_RESULTSET_TREE - time spent in this ResultSet and below. * * @return long The total amount of time spent (in milliseconds). */ public long getTimeSpent(int type) { long totTime = constructorTime + openTime + nextTime + closeTime; /* RESOLVE - subtract out store time later, when available */ if (type == NoPutResultSet.CURRENT_RESULTSET_ONLY) { return totTime; } else { return totTime; } } // // CursorResultSet interface // /** * This result set has its row location from * the last fetch done. If the cursor is closed, * or the row has been deleted a null is returned. * * @see CursorResultSet * * @return the row location of the current cursor row. * @exception StandardException thrown on failure to get row location */ public RowLocation getRowLocation() throws StandardException { RowLocation rl; if (! isOpen) return null; if ( ! scanControllerOpened) return null; /* ** If the conglomerate is keyed, the row location of the base row ** is in the last column of the current row. If it's not keyed, ** we get the row location from the scan of the heap. */ if (isKeyed) { if (SanityManager.DEBUG) { SanityManager.ASSERT(currentRow != null, "There must be a current row when fetching the row location"); } rl = (RowLocation) currentRow.getColumn( currentRow.nColumns()); } else { if (currentRowIsValid) { // we reuse the same rowlocation object across several calls. if (rlTemplate == null) rlTemplate = scanController.newRowLocationTemplate(); rl = rlTemplate; try { scanController.fetchLocation(rl); } catch (StandardException se) { if (se.getMessageId(). equals(SQLState.HEAP_SCAN_NOT_POSITIONED)) { //Have a easier to understand error message than what //we get from store throw StandardException. newException(SQLState.NO_CURRENT_ROW); } throw se; } } else { rl = null; } } return rl; } /** * This result set has its row from the last fetch done. * If the cursor is closed, the row has been deleted, or * no longer qualifies (for forward only result sets) a * null is returned. * * @see CursorResultSet * * @return the last row returned; * @exception StandardException thrown on failure. */ /* RESOLVE - this should return activation.getCurrentRow(resultSetNumber), * once there is such a method. (currentRow is redundant) */ public ExecRow getCurrentRow() throws StandardException { if (SanityManager.DEBUG) SanityManager.ASSERT(isOpen, "TSRS expected to be open"); /* Nothing to do if we're not currently on a row or * if the current row get deleted out from under us * or if there is no current scan (can happen if the * scan is being skipped) or if the current position * no longer qualifies. */ try { if ((currentRow == null) || (!currentRowIsValid) || (!scanControllerOpened) || (qualify && scanController.isCurrentPositionDeleted()) || (qualify && (!scanController.doesCurrentPositionQualify()))) { return null; } } catch (StandardException se) { if (se.getMessageId().equals(SQLState.AM_SCAN_NOT_POSITIONED)) { //bug 4515 - Have a easier to understand error message than what we get from store se=StandardException.newException(SQLState.NO_CURRENT_ROW); throw se; } } resultRowBuilder.reset(candidate); currentRow = getCompactRow(candidate, accessedCols, isKeyed); try { scanController.fetchWithoutQualify(candidate.getRowArray()); } catch (StandardException se) { if (se.getMessageId().equals(SQLState.AM_RECORD_NOT_FOUND)) { // Somehow the row got deleted between the above // doesCurrentPositionQualify() call and here (one way is if // this scan is read uncommitted isolation level). return null; } else { throw se; } } setCurrentRow(candidate); return currentRow; } /** * @see NoPutResultSet#positionScanAtRowLocation * * Also sets qualify to false so that later calls to getCurrentRow * will not attempt to re-qualify the current row. */ public void positionScanAtRowLocation(RowLocation rl) throws StandardException { // Check if the scanController is a B-tree scan controller. Do not // attempt to re-position a b-tree controller. if (!isKeyed) { currentRowIsValid = scanController.positionAtRowLocation(rl); } qualify = false; scanRepositioned = true; } /** * Print the parameters that constructed this result set to the * trace stream. */ /* private final void traceScanParameters() { if (SanityManager.DEBUG) { HeaderPrintWriter traceStream = SanityManager.GET_DEBUG_STREAM(); traceStream.println(""); traceStream.println("TableScanResultSet number " + resultSetNumber + " parameters:"); traceStream.println(""); traceStream.println("\tTable name: " + tableName); if (indexName != null) { traceStream.println("\tIndex name: " + indexName); } traceStream.println(""); traceStream.println("\tStart position is: "); tracePrintPosition(traceStream, startSearchOperator, startKeyGetter); traceStream.println(""); traceStream.println("\tStop position is: " ); tracePrintPosition(traceStream, stopSearchOperator, stopKeyGetter); traceStream.println(""); traceStream.println("\tQualifiers are: "); tracePrintQualifiers(traceStream, qualifiers, 2); traceStream.println(""); } } */ /** * Print I/O statistics about a scan when it closes. */ /* private final void traceClose() { if (SanityManager.DEBUG) { InfoStreams infoStreams; HeaderPrintWriter traceStream; traceStream = SanityManager.GET_DEBUG_STREAM(); traceStream.println("TableScanResultSet number " + resultSetNumber + " closed."); if (isKeyed) { traceStream.println("\t" + rowCount() + " row(s) qualified from " + "keyed" + " table " + tableName + " using index " + indexName); } else { traceStream.println("\t" + rowCount() + " row(s) qualified from " + "non-keyed" + " table " + tableName); } traceStream.println(""); } } */ /** * Print a start or stop positioner to the trace stream. */ /* private final void tracePrintPosition(HeaderPrintWriter traceStream, int searchOperator, GeneratedMethod positionGetter) { if (SanityManager.DEBUG) { if (positionGetter == null) { traceStream.println("\t\tNone"); return; } ExecIndexRow positioner = null; try { positioner = (ExecIndexRow) positionGetter.invoke(activation); } catch (StandardException e) { traceStream.println("\t\tUnexpected exception " + e + " getting positioner."); e.printStackTrace(traceStream.getPrintWriter()); return; } if (positioner == null) { traceStream.println("\t\tNone"); return; } String searchOp = null; switch (searchOperator) { case ScanController.GE: searchOp = "GE"; break; case ScanController.GT: searchOp = "GT"; break; default: searchOp = "unknown value (" + searchOperator + ")"; break; } traceStream.println("\t\t" + searchOp + " on first " + positioner.nColumns() + " column(s)."); traceStream.print( "\t\tOrdered null semantics on the following columns: "); for (int position = 0; position < positioner.nColumns(); position++) { if (positioner.areNullsOrdered(position)) { traceStream.print(position + " "); } } traceStream.println(""); } } */ /** * Print an array of Qualifiers to the trace stream. */ /* private final void tracePrintQualifiers(HeaderPrintWriter traceStream, Qualifier[][] qualifiers, int depth) { if (SanityManager.DEBUG) { char[] indentchars = new char[depth]; /* ** Form an array of tab characters for indentation. * while (depth > 0) { indentchars[depth - 1] = '\t'; depth--; } String indent = new String(indentchars); if (qualifiers == null) { traceStream.println(indent + MessageService.getTextMessage( SQLState.LANG_NONE) ); return; } // RESOLVE (mikem) We don't support 2-d qualifiers yet. if (SanityManager.DEBUG) { SanityManager.ASSERT(qualifiers.length == 1); } for (int i = 0; i < qualifiers[0].length; i++) { Qualifier qual = qualifiers[0][i]; traceStream.println(""); traceStream.println(indent + "Column Id: " + qual.getColumnId()); int operator = qual.getOperator(); String opString = null; switch (operator) { case Orderable.ORDER_OP_EQUALS: opString = "="; break; case Orderable.ORDER_OP_LESSOREQUALS: opString = "<="; break; case Orderable.ORDER_OP_LESSTHAN: opString = "<"; break; default: opString = "unknown value (" + operator + ")"; break; } traceStream.println(indent + "Operator: " + opString); traceStream.println(indent + "Ordered nulls: " + qual.getOrderedNulls()); traceStream.println(indent + "Unknown return value: " + qual.getUnknownRV()); traceStream.println(indent + "Negate comparison result: " + qual.negateCompareResult()); traceStream.println(""); } } } */ public String printStartPosition() { return printPosition(startSearchOperator, startKeyGetter, startPosition); } public String printStopPosition() { if (sameStartStopPosition) { return printPosition(stopSearchOperator, startKeyGetter, startPosition); } else { return printPosition(stopSearchOperator, stopKeyGetter, stopPosition); } } /** * Return a start or stop positioner as a String. * * If we already generated the information, then use * that. Otherwise, invoke the activation to get it. */ private String printPosition(int searchOperator, GeneratedMethod positionGetter, ExecIndexRow positioner) { String output = ""; if (positionGetter == null) { return "\t" + MessageService.getTextMessage(SQLState.LANG_NONE) + "\n"; } if (positioner == null) { if (numOpens == 0) return "\t" + MessageService.getTextMessage( SQLState.LANG_POSITION_NOT_AVAIL) + "\n"; try { positioner = (ExecIndexRow)positionGetter.invoke(activation); } catch (StandardException e) { return "\t" + MessageService.getTextMessage( SQLState.LANG_UNEXPECTED_EXC_GETTING_POSITIONER, e.toString()); } } if (positioner == null) { return "\t" + MessageService.getTextMessage(SQLState.LANG_NONE) + "\n"; } String searchOp = null; switch (searchOperator) { case ScanController.GE: searchOp = ">="; break; case ScanController.GT: searchOp = ">"; break; default: if (SanityManager.DEBUG) { SanityManager.THROWASSERT("Unknown search operator " + searchOperator); } // NOTE: This does not have to be internationalized because // this code should never be reached. searchOp = "unknown value (" + searchOperator + ")"; break; } output = output + "\t" + MessageService.getTextMessage( SQLState.LANG_POSITIONER, searchOp, String.valueOf(positioner.nColumns())) + "\n"; output = output + "\t" + MessageService.getTextMessage( SQLState.LANG_ORDERED_NULL_SEMANTICS) + "\n"; boolean colSeen = false; for (int position = 0; position < positioner.nColumns(); position++) { if (positioner.areNullsOrdered(position)) { output = output + position + " "; colSeen = true; } if (colSeen && position == positioner.nColumns() - 1) { output = output + "\n"; } } return output; } public Properties getScanProperties() { if (scanProperties == null) { scanProperties = new Properties(); } try { if (scanController != null) { scanController.getScanInfo().getAllScanInfo(scanProperties); /* Did we get a coarser lock due to * a covering lock, lock escalation * or configuration? */ coarserLock = scanController.isTableLocked() && (lockMode == TransactionController.MODE_RECORD); } } catch(StandardException se) { // ignore } return scanProperties; } /** * @see NoPutResultSet#requiresRelocking */ public boolean requiresRelocking() { return( isolationLevel == TransactionController.ISOLATION_READ_COMMITTED_NOHOLDLOCK); } /** * Update the number of rows in the scan controller. * * NOTE: It would be more efficient to only update the * scan controller if the optimizer's estimated number of * rows were wrong by more than some threshold (like 10%). * This would require a little more work than I have the * time for now, however, as the row estimate that is given * to this result set is the total number of rows for all * scans, not the number of rows per scan. * * * @param rowsThisScan The number of rows to update the scanController to * * @exception StandardException Thrown on error */ protected final void setRowCountIfPossible(long rowsThisScan) throws StandardException { /* ** Is it a heap scan with no qualifiers (full table scan?) ** and is it not for update (we don't want to count rows we're ** about to delete. */ if ( ( ! scanController.isKeyed() ) && (qualifiers == null || qualifiers.length == 0) && ( ! forUpdate ) ) { // Only update rows if different by more than 10% long diff = rowsThisScan - estimatedRowCount; long tenPerCent = estimatedRowCount / 10; if (diff < 0) diff = -diff; if (diff > tenPerCent) scanController.setEstimatedRowCount(rowsThisScan); } } /** * Can we get instantaneous locks when getting share row * locks at READ COMMITTED. */ protected boolean canGetInstantaneousLocks() { return false; } /** * Is this ResultSet or it's source result set for update * * @return Whether or not the result set is for update. */ public boolean isForUpdate() { return forUpdate; } /** * Shallow clone this result set. Used in trigger reference. * beetle 4373. */ public Object clone() { Object clo = null; try { clo = super.clone(); } catch (CloneNotSupportedException e) {} return clo; } }
apache/qpid-jms
37,395
qpid-jms-client/src/test/java/org/apache/qpid/jms/transports/netty/NettyTcpTransportTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.qpid.jms.transports.netty; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.instanceOf; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.io.IOException; import java.lang.reflect.Field; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.net.URI; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import org.apache.qpid.jms.test.QpidJmsTestCase; import org.apache.qpid.jms.test.Wait; import org.apache.qpid.jms.test.proxy.TestProxy; import org.apache.qpid.jms.test.proxy.TestProxy.ProxyType; import org.apache.qpid.jms.transports.Transport; import org.apache.qpid.jms.transports.TransportListener; import org.apache.qpid.jms.transports.TransportOptions; import org.apache.qpid.jms.util.QpidJMSThreadFactory; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.EventLoopGroup; import io.netty.channel.epoll.Epoll; import io.netty.channel.epoll.EpollEventLoopGroup; import io.netty.channel.kqueue.KQueue; import io.netty.channel.kqueue.KQueueEventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.handler.proxy.ProxyHandler; import io.netty.handler.proxy.Socks5ProxyHandler; import io.netty.util.ResourceLeakDetector; import io.netty.util.ResourceLeakDetector.Level; /** * Test basic functionality of the Netty based TCP transport. */ public class NettyTcpTransportTest extends QpidJmsTestCase { private static final Logger LOG = LoggerFactory.getLogger(NettyTcpTransportTest.class); private static final int SEND_BYTE_COUNT = 1024; protected boolean transportClosed; protected final List<Throwable> exceptions = new ArrayList<Throwable>(); protected final List<ByteBuf> data = new ArrayList<ByteBuf>(); protected final AtomicInteger bytesRead = new AtomicInteger(); protected final TransportListener testListener = new NettyTransportListener(false); @Test @Timeout(60) public void testCloseOnNeverConnectedTransport() throws Exception { URI serverLocation = new URI("tcp://localhost:5762"); Transport transport = createTransport(serverLocation, testListener, createClientOptions()); assertFalse(transport.isConnected()); transport.close(); assertTrue(!transportClosed); assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testCreateWithNullOptionsThrowsIAE() throws Exception { URI serverLocation = new URI("tcp://localhost:5762"); try { createTransport(serverLocation, testListener, null); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException iae) { } } @Test @Timeout(60) public void testConnectWithCustomThreadFactoryConfigured() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); QpidJMSThreadFactory factory = new QpidJMSThreadFactory("NettyTransportTest", true); Transport transport = createTransport(serverLocation, testListener, createClientOptions()); transport.setThreadFactory(factory); try { transport.connect(null, null); } catch (Exception e) { LOG.info("Failed to connect to: {} as expected.", serverLocation); fail("Should have failed to connect to the server: " + serverLocation); } assertTrue(transport.isConnected()); assertSame(factory, transport.getThreadFactory()); try { transport.setThreadFactory(factory); } catch (IllegalStateException expected) { LOG.trace("Caught expected state exception"); } transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testConnectWithoutRunningServer() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); server.close(); Transport transport = createTransport(serverLocation, testListener, createClientOptions()); try { transport.connect(null, null); fail("Should have failed to connect to the server: " + serverLocation); } catch (Exception e) { LOG.info("Failed to connect to: {} as expected.", serverLocation); } assertFalse(transport.isConnected()); transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testConnectWithoutListenerFails() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createTransport(serverLocation, null, createClientOptions()); try { transport.connect(null, null); fail("Should have failed to connect to the server: " + serverLocation); } catch (Exception e) { LOG.info("Failed to connect to: {} as expected.", serverLocation); } assertFalse(transport.isConnected()); transport.close(); } } @Test @Timeout(60) public void testConnectAfterListenerSetWorks() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createTransport(serverLocation, null, createClientOptions()); assertNull(transport.getTransportListener()); transport.setTransportListener(testListener); assertNotNull(transport.getTransportListener()); try { transport.connect(null, null); LOG.info("Connected to server:{} as expected.", serverLocation); } catch (Exception e) { fail("Should not have failed to connect to the server at " + serverLocation + " but got exception: " + e); } assertTrue(transport.isConnected()); transport.close(); } } @Test @Timeout(60) public void testConnectToServer() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); assertEquals(serverLocation, transport.getRemoteLocation()); transport.close(); // Additional close should not fail or cause other problems. transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testMultipleConnectionsToServer() throws Exception { final int CONNECTION_COUNT = 10; try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); List<Transport> transports = new ArrayList<Transport>(); for (int i = 0; i < CONNECTION_COUNT; ++i) { Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); transports.add(transport); } for (Transport transport : transports) { transport.close(); } } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testMultipleConnectionsSendReceive() throws Exception { final int CONNECTION_COUNT = 10; final int FRAME_SIZE = 8; ByteBuf sendBuffer = Unpooled.buffer(FRAME_SIZE); for (int i = 0; i < 8; ++i) { sendBuffer.writeByte('A'); } try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); List<Transport> transports = new ArrayList<Transport>(); for (int i = 0; i < CONNECTION_COUNT; ++i) { Transport transport = createTransport(serverLocation, testListener, createClientOptions()); try { transport.connect(null, null); transport.writeAndFlush(sendBuffer.copy()); transports.add(transport); } catch (Exception e) { fail("Should have connected to the server at " + serverLocation + " but got exception: " + e); } } assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { LOG.debug("Checking completion: read {} expecting {}", bytesRead.get(), (FRAME_SIZE * CONNECTION_COUNT)); return bytesRead.get() == (FRAME_SIZE * CONNECTION_COUNT); } }, 10000, 50)); for (Transport transport : transports) { transport.close(); } } assertTrue(exceptions.isEmpty()); } @Test @Timeout(60) public void testDetectServerClose() throws Exception { Transport transport = null; try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); // Ensure client connects and event handlers triggered. Wait.waitFor(() -> server.getChannelActiveCount() > 0, 10000, 50); } final Transport connectedTransport = transport; assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return !connectedTransport.isConnected(); } }, 10000, 50)); assertTrue(data.isEmpty()); try { transport.close(); } catch (Exception ex) { fail("Close of a disconnect transport should not generate errors"); } } @Test @Timeout(60) public void testZeroSizedSentNoErrors() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); transport.writeAndFlush(Unpooled.buffer(0)); transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testCannotDereferenceSharedClosedEventLoopGroup() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); final TransportOptions sharedTransportOptions = createClientOptions(); sharedTransportOptions.setUseKQueue(false); sharedTransportOptions.setUseEpoll(false); sharedTransportOptions.setSharedEventLoopThreads(1); EventLoopGroupRef groupRef = null; Transport nioSharedTransport = createConnectedTransport(serverLocation, sharedTransportOptions); try { groupRef = getGroupRef(nioSharedTransport); assertNotNull(groupRef.group()); } finally { nioSharedTransport.close(); } try { groupRef.group(); fail("Should have thrown ISE due to being closed"); } catch (IllegalStateException expected) { // Ignore } catch (Throwable unexpected) { fail("Should have thrown IllegalStateException"); } } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testSharedEventLoopGroups() throws Exception { final Set<Transport> transports = new HashSet<>(); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); final TransportOptions sharedTransportOptions = createClientOptions(); sharedTransportOptions.setUseKQueue(false); sharedTransportOptions.setUseEpoll(false); sharedTransportOptions.setSharedEventLoopThreads(1); Transport sharedNioTransport1 = createConnectedTransport(serverLocation, sharedTransportOptions); transports.add(sharedNioTransport1); Transport sharedNioTransport2 = createConnectedTransport(serverLocation, sharedTransportOptions); transports.add(sharedNioTransport2); final EventLoopGroup sharedGroup = getGroupRef(sharedNioTransport1).group(); assertSame(sharedGroup, getGroupRef(sharedNioTransport2).group()); sharedNioTransport1.close(); assertFalse(sharedGroup.isShutdown()); assertFalse(sharedGroup.isTerminated()); sharedNioTransport2.close(); assertTrue(sharedGroup.isShutdown()); assertTrue(sharedGroup.isTerminated()); } finally { // Ensures that any not already closed, e.g due to test failure, are now closed. cleanUpTransports(transports); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testSharedEventLoopGroupsOfDifferentSizes() throws Exception { final Set<Transport> transports = new HashSet<>(); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); final TransportOptions sharedTransportOptions1 = createClientOptions(); sharedTransportOptions1.setUseKQueue(false); sharedTransportOptions1.setUseEpoll(false); sharedTransportOptions1.setSharedEventLoopThreads(1); Transport nioSharedTransport1 = createConnectedTransport(serverLocation, sharedTransportOptions1); transports.add(nioSharedTransport1); final TransportOptions sharedTransportOptions2 = createClientOptions(); sharedTransportOptions2.setUseKQueue(false); sharedTransportOptions2.setUseEpoll(false); sharedTransportOptions2.setSharedEventLoopThreads(2); Transport nioSharedTransport2 = createConnectedTransport(serverLocation, sharedTransportOptions2); transports.add(nioSharedTransport2); EventLoopGroup sharedGroup1 = getGroupRef(nioSharedTransport1).group(); EventLoopGroup sharedGroup2 = getGroupRef(nioSharedTransport2).group(); assertNotSame(sharedGroup1, sharedGroup2); nioSharedTransport1.close(); assertTrue(sharedGroup1.isShutdown()); assertTrue(sharedGroup1.isTerminated()); nioSharedTransport2.close(); assertTrue(sharedGroup2.isShutdown()); assertTrue(sharedGroup2.isTerminated()); } finally { // Ensures that any not already closed, e.g due to test failure, are now closed. cleanUpTransports(transports); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testUnsharedEventLoopGroups() throws Exception { final Set<Transport> transports = new HashSet<>(); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); final TransportOptions unsharedTransportOptions = createClientOptions(); unsharedTransportOptions.setUseKQueue(false); unsharedTransportOptions.setUseEpoll(false); unsharedTransportOptions.setSharedEventLoopThreads(0); Transport unsharedNioTransport1 = createConnectedTransport(serverLocation, unsharedTransportOptions); transports.add(unsharedNioTransport1); Transport unsharedNioTransport2 = createConnectedTransport(serverLocation, unsharedTransportOptions); transports.add(unsharedNioTransport2); final EventLoopGroup unsharedGroup1 = getGroupRef(unsharedNioTransport1).group(); final EventLoopGroup unsharedGroup2 = getGroupRef(unsharedNioTransport2).group(); assertNotSame(unsharedGroup1, unsharedNioTransport2); unsharedNioTransport1.close(); assertTrue(unsharedGroup1.isShutdown()); assertTrue(unsharedGroup1.isTerminated()); unsharedNioTransport2.close(); assertTrue(unsharedGroup2.isShutdown()); assertTrue(unsharedGroup2.isTerminated()); } finally { // Ensures that any not already closed, e.g due to test failure, are now closed. cleanUpTransports(transports); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testDataSentIsReceived() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); ByteBuf sendBuffer = transport.allocateSendBuffer(SEND_BYTE_COUNT); for (int i = 0; i < SEND_BYTE_COUNT; ++i) { sendBuffer.writeByte('A'); } transport.writeAndFlush(sendBuffer); assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return !data.isEmpty(); } }, 10000, 50)); assertEquals(SEND_BYTE_COUNT, data.get(0).readableBytes()); transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); } @Test @Timeout(60) public void testMultipleDataPacketsSentAreReceived() throws Exception { doMultipleDataPacketsSentAndReceive(SEND_BYTE_COUNT, 1); } @Test @Timeout(60) public void testMultipleDataPacketsSentAreReceivedRepeatedly() throws Exception { doMultipleDataPacketsSentAndReceive(SEND_BYTE_COUNT, 10); } public void doMultipleDataPacketsSentAndReceive(final int byteCount, final int iterations) throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); ByteBuf sendBuffer = Unpooled.buffer(byteCount); for (int i = 0; i < byteCount; ++i) { sendBuffer.writeByte('A'); } for (int i = 0; i < iterations; ++i) { transport.writeAndFlush(sendBuffer.copy()); } assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return bytesRead.get() == (byteCount * iterations); } }, 10000, 50)); transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); } @Test @Timeout(60) public void testSendToClosedTransportFails() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); transport.close(); ByteBuf sendBuffer = Unpooled.buffer(10); try { transport.writeAndFlush(sendBuffer); fail("Should throw on send of closed transport"); } catch (IOException ex) { } } } @Test @Timeout(60) public void testConnectRunsInitializationMethod() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); final AtomicBoolean initialized = new AtomicBoolean(); Transport transport = createTransport(serverLocation, testListener, createClientOptions()); try { transport.connect(() -> initialized.set(true), null); LOG.info("Connected to server:{} as expected.", serverLocation); } catch (Exception e) { fail("Should have connected to the server at " + serverLocation + " but got exception: " + e); } assertTrue(transport.isConnected()); assertEquals(serverLocation, transport.getRemoteLocation()); assertTrue(initialized.get()); transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Test @Timeout(60) public void testFailureInInitializationRoutineFailsConnect() throws Exception { try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); Transport transport = createTransport(serverLocation, testListener, createClientOptions()); try { transport.connect(() -> { throw new RuntimeException(); }, null); fail("Should not have connected to the server at " + serverLocation); } catch (Exception e) { LOG.info("Failed to connect to server:{} as expected", serverLocation); } assertFalse(transport.isConnected(), "Should not be connected"); assertEquals(serverLocation, transport.getRemoteLocation(), "Server location is incorrect"); transport.close(); } assertFalse(transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } @Disabled("Used for checking for transport level leaks, my be unstable on CI.") @Test @Timeout(60) public void testSendToClosedTransportFailsButDoesNotLeak() throws Exception { Transport transport = null; ResourceLeakDetector.setLevel(Level.PARANOID); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); for (int i = 0; i < 256; ++i) { transport = createConnectedTransport(serverLocation, createClientOptions()); assertTrue(transport.isConnected()); ByteBuf sendBuffer = transport.allocateSendBuffer(10 * 1024 * 1024); sendBuffer.writeBytes(new byte[] {0, 1, 2, 3, 4}); transport.close(); try { transport.writeAndFlush(sendBuffer); fail("Should throw on send of closed transport"); } catch (IOException ex) { } } System.gc(); } } @Test @Timeout(60) public void testConnectToServerWithEpollEnabled() throws Exception { doTestEpollSupport(true); } @Test @Timeout(60) public void testConnectToServerWithEpollDisabled() throws Exception { doTestEpollSupport(false); } @Test @Timeout(60) public void testConnectToServerViaProxy() throws Exception { try (TestProxy testProxy = new TestProxy(ProxyType.SOCKS5); NettyEchoServer server = createEchoServer(createServerOptions())) { testProxy.start(); server.start(); int port = server.getServerPort(); LOG.info("Echo server bound at: {}", port); URI serverLocation = new URI("tcp://localhost:" + port); TransportOptions clientOptions = createClientOptions(); SocketAddress proxyAddress = new InetSocketAddress("localhost", testProxy.getPort()); Supplier<ProxyHandler> proxyHandlerFactory = () -> { return new Socks5ProxyHandler(proxyAddress); }; clientOptions.setProxyHandlerSupplier(proxyHandlerFactory); Transport transport = createConnectedTransport(serverLocation, clientOptions); assertTrue(transport.isConnected()); assertEquals(serverLocation, transport.getRemoteLocation()); assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return server.getChannelActiveCount() == 1; } }, 10_000, 10)); assertTrue(Wait.waitFor(new Wait.Condition() { @Override public boolean isSatisfied() throws Exception { return testProxy.getSuccessCount() == 1; } }, 10_000, 10)); transport.close(); // Additional close should not fail or cause other problems. transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } private void doTestEpollSupport(boolean useEpoll) throws Exception { assumeTrue(Epoll.isAvailable()); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); TransportOptions options = createClientOptions(); options.setUseEpoll(useEpoll); options.setUseKQueue(false); Transport transport = createConnectedTransport(serverLocation, options); assertTrue(transport.isConnected()); assertEquals(serverLocation, transport.getRemoteLocation()); if(useEpoll) { assertEventLoopGroupType("Transport should be using Epoll", transport, EpollEventLoopGroup.class); } else { assertEventLoopGroupType("Transport should be using Nio", transport, NioEventLoopGroup.class); } transport.close(); // Additional close should not fail or cause other problems. transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } private static EventLoopGroupRef getGroupRef(final Transport transport) throws IllegalAccessException { Field groupRefField = null; Class<?> transportType = transport.getClass(); while (transportType != null && groupRefField == null) { try { groupRefField = transportType.getDeclaredField("groupRef"); } catch (NoSuchFieldException error) { transportType = transportType.getSuperclass(); if (Object.class.equals(transportType)) { transportType = null; } } } assertNotNull(groupRefField, "Transport implementation unknown"); groupRefField.setAccessible(true); return (EventLoopGroupRef) groupRefField.get(transport); } private static void assertEventLoopGroupType(String message, Transport transport, Class<? extends EventLoopGroup> eventLoopGroupClass) throws Exception { final EventLoopGroupRef groupRef = getGroupRef(transport); assertThat(message, groupRef.group(), instanceOf(eventLoopGroupClass)); } @Test @Timeout(60) public void testConnectToServerWithKQueueEnabled() throws Exception { doTestKQueueSupport(true); } @Test @Timeout(60) public void testConnectToServerWithKQueueDisabled() throws Exception { doTestKQueueSupport(false); } private void doTestKQueueSupport(boolean useKQueue) throws Exception { assumeTrue(KQueue.isAvailable()); try (NettyEchoServer server = createEchoServer(createServerOptions())) { server.start(); int port = server.getServerPort(); URI serverLocation = new URI("tcp://localhost:" + port); TransportOptions options = createClientOptions(); options.setUseKQueue(useKQueue); options.setUseEpoll(false); Transport transport = createConnectedTransport(serverLocation, options); assertTrue(transport.isConnected()); assertEquals(serverLocation, transport.getRemoteLocation()); if(useKQueue) { assertEventLoopGroupType("Transport should be using Kqueue", transport, KQueueEventLoopGroup.class); } else { assertEventLoopGroupType("Transport should be using Nio", transport, NioEventLoopGroup.class); } transport.close(); // Additional close should not fail or cause other problems. transport.close(); } assertTrue(!transportClosed); // Normal shutdown does not trigger the event. assertTrue(exceptions.isEmpty()); assertTrue(data.isEmpty()); } protected Transport createTransport(URI serverLocation, TransportListener listener, TransportOptions options) { if (listener == null) { return new NettyTcpTransport(serverLocation, options, false); } else { return new NettyTcpTransport(listener, serverLocation, options, false); } } private Transport createConnectedTransport(final URI serverLocation, final TransportOptions options) { Transport transport = createTransport(serverLocation, testListener, options); try { transport.connect(null, null); LOG.info("Connected to server:{} as expected.", serverLocation); } catch (Exception e) { fail("Should have connected to the server at " + serverLocation + " but got exception: " + e); } return transport; } private void cleanUpTransports(final Set<Transport> transports) { transports.forEach(transport -> { try { transport.close(); } catch (Throwable t) { LOG.warn(t.getMessage()); } }); } protected TransportOptions createClientOptions() { return new TransportOptions(); } protected TransportOptions createServerOptions() { return new TransportOptions(); } protected void logTransportErrors() { if (!exceptions.isEmpty()) { for(Throwable ex : exceptions) { LOG.info("Transport sent exception: {}", ex, ex); } } } protected NettyEchoServer createEchoServer(TransportOptions options) { return createEchoServer(options, false); } protected NettyEchoServer createEchoServer(TransportOptions options, boolean needClientAuth) { return new NettyEchoServer(options, false, needClientAuth, false); } public class NettyTransportListener implements TransportListener { final boolean retainDataBufs; NettyTransportListener(boolean retainDataBufs) { this.retainDataBufs = retainDataBufs; } @Override public void onData(ByteBuf incoming) { LOG.debug("Client has new incoming data of size: {}", incoming.readableBytes()); data.add(incoming); bytesRead.addAndGet(incoming.readableBytes()); if(retainDataBufs) { incoming.retain(); } } @Override public void onTransportClosed() { LOG.debug("Transport reports that it has closed."); transportClosed = true; } @Override public void onTransportError(Throwable cause) { LOG.info("Transport error caught: {}", cause.getMessage(), cause); exceptions.add(cause); } } }
googleapis/google-cloud-java
37,390
java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/ConnectAccessConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/managedkafka/v1/resources.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.managedkafka.v1; /** * * * <pre> * The configuration of access to the Kafka Connect cluster. * </pre> * * Protobuf type {@code google.cloud.managedkafka.v1.ConnectAccessConfig} */ public final class ConnectAccessConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.ConnectAccessConfig) ConnectAccessConfigOrBuilder { private static final long serialVersionUID = 0L; // Use ConnectAccessConfig.newBuilder() to construct. private ConnectAccessConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ConnectAccessConfig() { networkConfigs_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ConnectAccessConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.managedkafka.v1.ResourcesProto .internal_static_google_cloud_managedkafka_v1_ConnectAccessConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.managedkafka.v1.ResourcesProto .internal_static_google_cloud_managedkafka_v1_ConnectAccessConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.managedkafka.v1.ConnectAccessConfig.class, com.google.cloud.managedkafka.v1.ConnectAccessConfig.Builder.class); } public static final int NETWORK_CONFIGS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.managedkafka.v1.ConnectNetworkConfig> networkConfigs_; /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.managedkafka.v1.ConnectNetworkConfig> getNetworkConfigsList() { return networkConfigs_; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder> getNetworkConfigsOrBuilderList() { return networkConfigs_; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public int getNetworkConfigsCount() { return networkConfigs_.size(); } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectNetworkConfig getNetworkConfigs(int index) { return networkConfigs_.get(index); } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder getNetworkConfigsOrBuilder( int index) { return networkConfigs_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < networkConfigs_.size(); i++) { output.writeMessage(1, networkConfigs_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < networkConfigs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, networkConfigs_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.managedkafka.v1.ConnectAccessConfig)) { return super.equals(obj); } com.google.cloud.managedkafka.v1.ConnectAccessConfig other = (com.google.cloud.managedkafka.v1.ConnectAccessConfig) obj; if (!getNetworkConfigsList().equals(other.getNetworkConfigsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNetworkConfigsCount() > 0) { hash = (37 * hash) + NETWORK_CONFIGS_FIELD_NUMBER; hash = (53 * hash) + getNetworkConfigsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.managedkafka.v1.ConnectAccessConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The configuration of access to the Kafka Connect cluster. * </pre> * * Protobuf type {@code google.cloud.managedkafka.v1.ConnectAccessConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.ConnectAccessConfig) com.google.cloud.managedkafka.v1.ConnectAccessConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.managedkafka.v1.ResourcesProto .internal_static_google_cloud_managedkafka_v1_ConnectAccessConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.managedkafka.v1.ResourcesProto .internal_static_google_cloud_managedkafka_v1_ConnectAccessConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.managedkafka.v1.ConnectAccessConfig.class, com.google.cloud.managedkafka.v1.ConnectAccessConfig.Builder.class); } // Construct using com.google.cloud.managedkafka.v1.ConnectAccessConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (networkConfigsBuilder_ == null) { networkConfigs_ = java.util.Collections.emptyList(); } else { networkConfigs_ = null; networkConfigsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.managedkafka.v1.ResourcesProto .internal_static_google_cloud_managedkafka_v1_ConnectAccessConfig_descriptor; } @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectAccessConfig getDefaultInstanceForType() { return com.google.cloud.managedkafka.v1.ConnectAccessConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectAccessConfig build() { com.google.cloud.managedkafka.v1.ConnectAccessConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectAccessConfig buildPartial() { com.google.cloud.managedkafka.v1.ConnectAccessConfig result = new com.google.cloud.managedkafka.v1.ConnectAccessConfig(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.managedkafka.v1.ConnectAccessConfig result) { if (networkConfigsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { networkConfigs_ = java.util.Collections.unmodifiableList(networkConfigs_); bitField0_ = (bitField0_ & ~0x00000001); } result.networkConfigs_ = networkConfigs_; } else { result.networkConfigs_ = networkConfigsBuilder_.build(); } } private void buildPartial0(com.google.cloud.managedkafka.v1.ConnectAccessConfig result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.managedkafka.v1.ConnectAccessConfig) { return mergeFrom((com.google.cloud.managedkafka.v1.ConnectAccessConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.managedkafka.v1.ConnectAccessConfig other) { if (other == com.google.cloud.managedkafka.v1.ConnectAccessConfig.getDefaultInstance()) return this; if (networkConfigsBuilder_ == null) { if (!other.networkConfigs_.isEmpty()) { if (networkConfigs_.isEmpty()) { networkConfigs_ = other.networkConfigs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNetworkConfigsIsMutable(); networkConfigs_.addAll(other.networkConfigs_); } onChanged(); } } else { if (!other.networkConfigs_.isEmpty()) { if (networkConfigsBuilder_.isEmpty()) { networkConfigsBuilder_.dispose(); networkConfigsBuilder_ = null; networkConfigs_ = other.networkConfigs_; bitField0_ = (bitField0_ & ~0x00000001); networkConfigsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNetworkConfigsFieldBuilder() : null; } else { networkConfigsBuilder_.addAllMessages(other.networkConfigs_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.managedkafka.v1.ConnectNetworkConfig m = input.readMessage( com.google.cloud.managedkafka.v1.ConnectNetworkConfig.parser(), extensionRegistry); if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); networkConfigs_.add(m); } else { networkConfigsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.managedkafka.v1.ConnectNetworkConfig> networkConfigs_ = java.util.Collections.emptyList(); private void ensureNetworkConfigsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { networkConfigs_ = new java.util.ArrayList<com.google.cloud.managedkafka.v1.ConnectNetworkConfig>( networkConfigs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.managedkafka.v1.ConnectNetworkConfig, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder, com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder> networkConfigsBuilder_; /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.managedkafka.v1.ConnectNetworkConfig> getNetworkConfigsList() { if (networkConfigsBuilder_ == null) { return java.util.Collections.unmodifiableList(networkConfigs_); } else { return networkConfigsBuilder_.getMessageList(); } } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public int getNetworkConfigsCount() { if (networkConfigsBuilder_ == null) { return networkConfigs_.size(); } else { return networkConfigsBuilder_.getCount(); } } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.ConnectNetworkConfig getNetworkConfigs(int index) { if (networkConfigsBuilder_ == null) { return networkConfigs_.get(index); } else { return networkConfigsBuilder_.getMessage(index); } } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNetworkConfigs( int index, com.google.cloud.managedkafka.v1.ConnectNetworkConfig value) { if (networkConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNetworkConfigsIsMutable(); networkConfigs_.set(index, value); onChanged(); } else { networkConfigsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNetworkConfigs( int index, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder builderForValue) { if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); networkConfigs_.set(index, builderForValue.build()); onChanged(); } else { networkConfigsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addNetworkConfigs(com.google.cloud.managedkafka.v1.ConnectNetworkConfig value) { if (networkConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNetworkConfigsIsMutable(); networkConfigs_.add(value); onChanged(); } else { networkConfigsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addNetworkConfigs( int index, com.google.cloud.managedkafka.v1.ConnectNetworkConfig value) { if (networkConfigsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNetworkConfigsIsMutable(); networkConfigs_.add(index, value); onChanged(); } else { networkConfigsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addNetworkConfigs( com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder builderForValue) { if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); networkConfigs_.add(builderForValue.build()); onChanged(); } else { networkConfigsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addNetworkConfigs( int index, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder builderForValue) { if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); networkConfigs_.add(index, builderForValue.build()); onChanged(); } else { networkConfigsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder addAllNetworkConfigs( java.lang.Iterable<? extends com.google.cloud.managedkafka.v1.ConnectNetworkConfig> values) { if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, networkConfigs_); onChanged(); } else { networkConfigsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearNetworkConfigs() { if (networkConfigsBuilder_ == null) { networkConfigs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { networkConfigsBuilder_.clear(); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder removeNetworkConfigs(int index) { if (networkConfigsBuilder_ == null) { ensureNetworkConfigsIsMutable(); networkConfigs_.remove(index); onChanged(); } else { networkConfigsBuilder_.remove(index); } return this; } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder getNetworkConfigsBuilder( int index) { return getNetworkConfigsFieldBuilder().getBuilder(index); } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder getNetworkConfigsOrBuilder(int index) { if (networkConfigsBuilder_ == null) { return networkConfigs_.get(index); } else { return networkConfigsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<? extends com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder> getNetworkConfigsOrBuilderList() { if (networkConfigsBuilder_ != null) { return networkConfigsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(networkConfigs_); } } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder addNetworkConfigsBuilder() { return getNetworkConfigsFieldBuilder() .addBuilder(com.google.cloud.managedkafka.v1.ConnectNetworkConfig.getDefaultInstance()); } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder addNetworkConfigsBuilder( int index) { return getNetworkConfigsFieldBuilder() .addBuilder( index, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.getDefaultInstance()); } /** * * * <pre> * Required. * Virtual Private Cloud (VPC) networks that must be granted direct access to * the Kafka Connect cluster. Minimum of 1 network is required. Maximum 10 * networks can be specified. * </pre> * * <code> * repeated .google.cloud.managedkafka.v1.ConnectNetworkConfig network_configs = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public java.util.List<com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder> getNetworkConfigsBuilderList() { return getNetworkConfigsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.managedkafka.v1.ConnectNetworkConfig, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder, com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder> getNetworkConfigsFieldBuilder() { if (networkConfigsBuilder_ == null) { networkConfigsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.managedkafka.v1.ConnectNetworkConfig, com.google.cloud.managedkafka.v1.ConnectNetworkConfig.Builder, com.google.cloud.managedkafka.v1.ConnectNetworkConfigOrBuilder>( networkConfigs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); networkConfigs_ = null; } return networkConfigsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.ConnectAccessConfig) } // @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.ConnectAccessConfig) private static final com.google.cloud.managedkafka.v1.ConnectAccessConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.ConnectAccessConfig(); } public static com.google.cloud.managedkafka.v1.ConnectAccessConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ConnectAccessConfig> PARSER = new com.google.protobuf.AbstractParser<ConnectAccessConfig>() { @java.lang.Override public ConnectAccessConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ConnectAccessConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ConnectAccessConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.managedkafka.v1.ConnectAccessConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
37,408
flink-fs-tests/src/test/java/org/apache/flink/hdfstests/ContinuousFileProcessingTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.hdfstests; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.functions.DefaultOpenContext; import org.apache.flink.api.common.io.FileInputFormat; import org.apache.flink.api.common.io.FilePathFilter; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.typeutils.TypeExtractor; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.core.fs.FileInputSplit; import org.apache.flink.core.fs.Path; import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; import org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperator; import org.apache.flink.streaming.api.functions.source.ContinuousFileReaderOperatorFactory; import org.apache.flink.streaming.api.functions.source.FileProcessingMode; import org.apache.flink.streaming.api.functions.source.TimestampedFileInputSplit; import org.apache.flink.streaming.api.functions.source.legacy.ContinuousFileMonitoringFunction; import org.apache.flink.streaming.api.functions.source.legacy.SourceFunction; import org.apache.flink.streaming.api.legacy.io.TextInputFormat; import org.apache.flink.streaming.api.operators.StreamSource; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.AbstractStreamOperatorTestHarness; import org.apache.flink.streaming.util.MockStreamingRuntimeContext; import org.apache.flink.streaming.util.OneInputStreamOperatorTestHarness; import org.apache.flink.util.OperatingSystem; import org.apache.flink.util.Preconditions; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.UUID; /** * Tests for the {@link ContinuousFileMonitoringFunction} and {@link ContinuousFileReaderOperator}. */ public class ContinuousFileProcessingTest { private static final int NO_OF_FILES = 5; private static final int LINES_PER_FILE = 10; private static final long INTERVAL = 100; private static FileSystem hdfs; private static String hdfsURI; private static MiniDFSCluster hdfsCluster; @ClassRule public static TemporaryFolder tempFolder = new TemporaryFolder(); @BeforeClass public static void createHDFS() { Assume.assumeTrue( "HDFS cluster cannot be start on Windows without extensions.", !OperatingSystem.isWindows()); try { File hdfsDir = tempFolder.newFolder(); org.apache.hadoop.conf.Configuration hdConf = new org.apache.hadoop.conf.Configuration(); hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDir.getAbsolutePath()); hdConf.set( "dfs.block.size", String.valueOf(1048576)); // this is the minimum we can set. MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf); hdfsCluster = builder.build(); hdfsURI = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/"; hdfs = new org.apache.hadoop.fs.Path(hdfsURI).getFileSystem(hdConf); } catch (Throwable e) { e.printStackTrace(); Assert.fail("Test failed " + e.getMessage()); } } @AfterClass public static void destroyHDFS() { if (hdfsCluster != null) { hdfsCluster.shutdown(); } } @Test public void testInvalidPathSpecification() throws Exception { String invalidPath = "hdfs://" + hdfsCluster.getURI().getHost() + ":" + hdfsCluster.getNameNodePort() + "/invalid/"; TextInputFormat format = new TextInputFormat(new Path(invalidPath)); ContinuousFileMonitoringFunction<String> monitoringFunction = new ContinuousFileMonitoringFunction<>( format, FileProcessingMode.PROCESS_ONCE, 1, INTERVAL); try { monitoringFunction.run( new DummySourceContext() { @Override public void collect(TimestampedFileInputSplit element) { // we should never arrive here with an invalid path Assert.fail("Test passes with an invalid path."); } }); // we should never arrive here with an invalid path Assert.fail("Test passed with an invalid path."); } catch (FileNotFoundException e) { Assert.assertEquals( "The provided file path " + format.getFilePaths()[0] + " does not exist.", e.getMessage()); } } private <T> OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, T> createHarness( FileInputFormat<T> format) throws Exception { ExecutionConfig config = new ExecutionConfig(); return new OneInputStreamOperatorTestHarness<>( new ContinuousFileReaderOperatorFactory( format, TypeExtractor.getInputFormatTypes(format), config), TypeExtractor.getForClass(TimestampedFileInputSplit.class) .createSerializer(config.getSerializerConfig())); } @Test public void testFileReadingOperatorWithEventTime() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; Set<org.apache.hadoop.fs.Path> filesCreated = new HashSet<>(); Map<String, Long> modTimes = new HashMap<>(); Map<Integer, String> expectedFileContents = new HashMap<>(); for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); modTimes.put(file.f0.getName(), hdfs.getFileStatus(file.f0).getModificationTime()); filesCreated.add(file.f0); expectedFileContents.put(i, file.f1); } TextInputFormat format = new TextInputFormat(new Path(testBasePath)); TypeInformation<String> typeInfo = TypeExtractor.getInputFormatTypes(format); OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, String> tester = createHarness(format); tester.open(); // create the necessary splits for the test FileInputSplit[] splits = format.createInputSplits(tester.getExecutionConfig().getParallelism()); // and feed them to the operator for (FileInputSplit split : splits) { tester.processElement( new StreamRecord<>( new TimestampedFileInputSplit( modTimes.get(split.getPath().getName()), split.getSplitNumber(), split.getPath(), split.getStart(), split.getLength(), split.getHostnames()))); } // then close the reader gracefully (and wait to finish reading) synchronized (tester.getCheckpointLock()) { tester.close(); } // the lines received must be the elements in the files +1 for the longMax watermark // we are in event time, which emits no watermarks, so the last watermark will mark the // of the input stream. Assert.assertEquals(NO_OF_FILES * LINES_PER_FILE + 1, tester.getOutput().size()); Map<Integer, List<String>> actualFileContents = new HashMap<>(); Object lastElement = null; for (Object line : tester.getOutput()) { lastElement = line; if (line instanceof StreamRecord) { @SuppressWarnings("unchecked") StreamRecord<String> element = (StreamRecord<String>) line; int fileIdx = Character.getNumericValue(element.getValue().charAt(0)); List<String> content = actualFileContents.get(fileIdx); if (content == null) { content = new ArrayList<>(); actualFileContents.put(fileIdx, content); } content.add(element.getValue() + "\n"); } } // check if the last element is the LongMax watermark Assert.assertTrue(lastElement instanceof Watermark); Assert.assertEquals(Long.MAX_VALUE, ((Watermark) lastElement).getTimestamp()); Assert.assertEquals(expectedFileContents.size(), actualFileContents.size()); for (Integer fileIdx : expectedFileContents.keySet()) { Assert.assertTrue( "file" + fileIdx + " not found", actualFileContents.keySet().contains(fileIdx)); List<String> cntnt = actualFileContents.get(fileIdx); Collections.sort( cntnt, new Comparator<String>() { @Override public int compare(String o1, String o2) { return getLineNo(o1) - getLineNo(o2); } }); StringBuilder cntntStr = new StringBuilder(); for (String line : cntnt) { cntntStr.append(line); } Assert.assertEquals(expectedFileContents.get(fileIdx), cntntStr.toString()); } for (org.apache.hadoop.fs.Path file : filesCreated) { hdfs.delete(file, false); } } @Test public void testReaderSnapshotRestore() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; TimestampedFileInputSplit split1 = new TimestampedFileInputSplit(0, 3, new Path("test/test1"), 0, 100, null); TimestampedFileInputSplit split2 = new TimestampedFileInputSplit(10, 2, new Path("test/test2"), 101, 200, null); TimestampedFileInputSplit split3 = new TimestampedFileInputSplit(10, 1, new Path("test/test2"), 0, 100, null); TimestampedFileInputSplit split4 = new TimestampedFileInputSplit(11, 0, new Path("test/test3"), 0, 100, null); final OneShotLatch latch = new OneShotLatch(); BlockingFileInputFormat format = new BlockingFileInputFormat(latch, new Path(testBasePath)); TypeInformation<FileInputSplit> typeInfo = TypeExtractor.getInputFormatTypes(format); OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, FileInputSplit> initTestInstance = createHarness(format); initTestInstance.open(); // create some state in the reader initTestInstance.processElement(new StreamRecord<>(split1)); initTestInstance.processElement(new StreamRecord<>(split2)); initTestInstance.processElement(new StreamRecord<>(split3)); initTestInstance.processElement(new StreamRecord<>(split4)); // take a snapshot of the operator's state. This will be used // to initialize another reader and compare the results of the // two operators. final OperatorSubtaskState snapshot; synchronized (initTestInstance.getCheckpointLock()) { snapshot = initTestInstance.snapshot(0L, 0L); } OneInputStreamOperatorTestHarness<TimestampedFileInputSplit, FileInputSplit> restoredTestInstance = createHarness(new BlockingFileInputFormat(latch, new Path(testBasePath))); restoredTestInstance.initializeState(snapshot); restoredTestInstance.open(); // now let computation start latch.trigger(); // ... and wait for the operators to close gracefully synchronized (initTestInstance.getCheckpointLock()) { initTestInstance.close(); } synchronized (restoredTestInstance.getCheckpointLock()) { restoredTestInstance.close(); } FileInputSplit fsSplit1 = createSplitFromTimestampedSplit(split1); FileInputSplit fsSplit2 = createSplitFromTimestampedSplit(split2); FileInputSplit fsSplit3 = createSplitFromTimestampedSplit(split3); FileInputSplit fsSplit4 = createSplitFromTimestampedSplit(split4); // compare if the results contain what they should contain and also if // they are the same, as they should. Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit1))); Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit2))); Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit3))); Assert.assertTrue(initTestInstance.getOutput().contains(new StreamRecord<>(fsSplit4))); Assert.assertArrayEquals( initTestInstance.getOutput().toArray(), restoredTestInstance.getOutput().toArray()); } private FileInputSplit createSplitFromTimestampedSplit(TimestampedFileInputSplit split) { Preconditions.checkNotNull(split); return new FileInputSplit( split.getSplitNumber(), split.getPath(), split.getStart(), split.getLength(), split.getHostnames()); } private static class BlockingFileInputFormat extends FileInputFormat<FileInputSplit> { private static final long serialVersionUID = -6727603565381560267L; private final OneShotLatch latch; private FileInputSplit split; private boolean reachedEnd; BlockingFileInputFormat(OneShotLatch latch, Path filePath) { super(filePath); this.latch = latch; this.reachedEnd = false; } @Override public void open(FileInputSplit fileSplit) throws IOException { this.split = fileSplit; this.reachedEnd = false; } @Override public boolean reachedEnd() throws IOException { if (!latch.isTriggered()) { try { latch.await(); } catch (InterruptedException e) { e.printStackTrace(); } } return reachedEnd; } @Override public FileInputSplit nextRecord(FileInputSplit reuse) throws IOException { this.reachedEnd = true; return split; } @Override public void close() {} } //// Monitoring Function Tests ////// @Test public void testFilePathFiltering() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; Set<org.apache.hadoop.fs.Path> filesCreated = new HashSet<>(); Set<String> filesKept = new TreeSet<>(); // create the files to be discarded for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "**file", i, "This is test line."); filesCreated.add(file.f0); } // create the files to be kept for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); filesCreated.add(file.f0); filesKept.add(file.f0.getName()); } TextInputFormat format = new TextInputFormat(new Path(testBasePath)); format.setFilesFilter( new FilePathFilter() { private static final long serialVersionUID = 2611449927338589804L; @Override public boolean filterPath(Path filePath) { return filePath.getName().startsWith("**"); } }); ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction(format, FileProcessingMode.PROCESS_ONCE); final FileVerifyingSourceContext context = new FileVerifyingSourceContext(new OneShotLatch(), monitoringFunction); monitoringFunction.open(DefaultOpenContext.INSTANCE); monitoringFunction.run(context); Assert.assertArrayEquals(filesKept.toArray(), context.getSeenFiles().toArray()); // finally delete the files created for the test. for (org.apache.hadoop.fs.Path file : filesCreated) { hdfs.delete(file, false); } } @Test public void testNestedFilesProcessing() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; final Set<org.apache.hadoop.fs.Path> filesCreated = new HashSet<>(); final Set<String> filesToBeRead = new TreeSet<>(); // create two nested directories org.apache.hadoop.fs.Path firstLevelDir = new org.apache.hadoop.fs.Path(testBasePath + "/" + "firstLevelDir"); org.apache.hadoop.fs.Path secondLevelDir = new org.apache.hadoop.fs.Path( testBasePath + "/" + "firstLevelDir" + "/" + "secondLevelDir"); Assert.assertFalse(hdfs.exists(firstLevelDir)); hdfs.mkdirs(firstLevelDir); hdfs.mkdirs(secondLevelDir); // create files in the base dir, the first level dir and the second level dir for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData( testBasePath, "firstLevelFile", i, "This is test line."); filesCreated.add(file.f0); filesToBeRead.add(file.f0.getName()); } for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData( firstLevelDir.toString(), "secondLevelFile", i, "This is test line."); filesCreated.add(file.f0); filesToBeRead.add(file.f0.getName()); } for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData( secondLevelDir.toString(), "thirdLevelFile", i, "This is test line."); filesCreated.add(file.f0); filesToBeRead.add(file.f0.getName()); } TextInputFormat format = new TextInputFormat(new Path(testBasePath)); format.setFilesFilter(FilePathFilter.createDefaultFilter()); format.setNestedFileEnumeration(true); ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction(format, FileProcessingMode.PROCESS_ONCE); final FileVerifyingSourceContext context = new FileVerifyingSourceContext(new OneShotLatch(), monitoringFunction); monitoringFunction.open(DefaultOpenContext.INSTANCE); monitoringFunction.run(context); Assert.assertArrayEquals(filesToBeRead.toArray(), context.getSeenFiles().toArray()); // finally delete the dirs and the files created for the test. for (org.apache.hadoop.fs.Path file : filesCreated) { hdfs.delete(file, false); } hdfs.delete(secondLevelDir, false); hdfs.delete(firstLevelDir, false); } @Test public void testSortingOnModTime() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; final long[] modTimes = new long[NO_OF_FILES]; final org.apache.hadoop.fs.Path[] filesCreated = new org.apache.hadoop.fs.Path[NO_OF_FILES]; for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); Thread.sleep(400); filesCreated[i] = file.f0; modTimes[i] = hdfs.getFileStatus(file.f0).getModificationTime(); } TextInputFormat format = new TextInputFormat(new Path(testBasePath)); format.setFilesFilter(FilePathFilter.createDefaultFilter()); // this is just to verify that all splits have been forwarded later. FileInputSplit[] splits = format.createInputSplits(1); ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction(format, FileProcessingMode.PROCESS_ONCE); ModTimeVerifyingSourceContext context = new ModTimeVerifyingSourceContext(modTimes); monitoringFunction.open(DefaultOpenContext.INSTANCE); monitoringFunction.run(context); Assert.assertEquals(splits.length, context.getCounter()); // delete the created files. for (int i = 0; i < NO_OF_FILES; i++) { hdfs.delete(filesCreated[i], false); } } @Test public void testProcessOnce() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; final OneShotLatch latch = new OneShotLatch(); // create a single file in the directory Tuple2<org.apache.hadoop.fs.Path, String> bootstrap = createFileAndFillWithData( testBasePath, "file", NO_OF_FILES + 1, "This is test line."); Assert.assertTrue(hdfs.exists(bootstrap.f0)); // the source is supposed to read only this file. final Set<String> filesToBeRead = new TreeSet<>(); filesToBeRead.add(bootstrap.f0.getName()); TextInputFormat format = new TextInputFormat(new Path(testBasePath)); format.setFilesFilter(FilePathFilter.createDefaultFilter()); final ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction(format, FileProcessingMode.PROCESS_ONCE); final FileVerifyingSourceContext context = new FileVerifyingSourceContext(latch, monitoringFunction); final Thread t = new Thread() { @Override public void run() { try { monitoringFunction.open(DefaultOpenContext.INSTANCE); monitoringFunction.run(context); // we would never arrive here if we were in // PROCESS_CONTINUOUSLY mode. // this will trigger the latch context.close(); } catch (Exception e) { Assert.fail(e.getMessage()); } } }; t.start(); if (!latch.isTriggered()) { latch.await(); } // create some additional files that should be processed in the case of PROCESS_CONTINUOUSLY final org.apache.hadoop.fs.Path[] filesCreated = new org.apache.hadoop.fs.Path[NO_OF_FILES]; for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> ignoredFile = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); filesCreated[i] = ignoredFile.f0; } // wait until the monitoring thread exits t.join(); Assert.assertArrayEquals(filesToBeRead.toArray(), context.getSeenFiles().toArray()); // finally delete the files created for the test. hdfs.delete(bootstrap.f0, false); for (org.apache.hadoop.fs.Path path : filesCreated) { hdfs.delete(path, false); } } @Test public void testFunctionRestore() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; org.apache.hadoop.fs.Path path = null; long fileModTime = Long.MIN_VALUE; for (int i = 0; i < 1; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); path = file.f0; fileModTime = hdfs.getFileStatus(file.f0).getModificationTime(); } TextInputFormat format = new TextInputFormat(new Path(testBasePath)); final ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction( format, FileProcessingMode.PROCESS_CONTINUOUSLY); StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> src = new StreamSource<>(monitoringFunction); final AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarness = new AbstractStreamOperatorTestHarness<>(src, 1, 1, 0); testHarness.open(); final Throwable[] error = new Throwable[1]; final OneShotLatch latch = new OneShotLatch(); final DummySourceContext sourceContext = new DummySourceContext() { @Override public void collect(TimestampedFileInputSplit element) { latch.trigger(); } }; // run the source asynchronously Thread runner = new Thread() { @Override public void run() { try { monitoringFunction.run(sourceContext); } catch (Throwable t) { t.printStackTrace(); error[0] = t; } } }; runner.start(); // first condition for the source to have updated its state: emit at least one element if (!latch.isTriggered()) { latch.await(); } // second condition for the source to have updated its state: it's not on the lock anymore, // this means it has processed all the splits and updated its state. synchronized (sourceContext.getCheckpointLock()) { } OperatorSubtaskState snapshot = testHarness.snapshot(0, 0); monitoringFunction.cancel(); runner.join(); testHarness.close(); final ContinuousFileMonitoringFunction<String> monitoringFunctionCopy = createTestContinuousFileMonitoringFunction( format, FileProcessingMode.PROCESS_CONTINUOUSLY); StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> srcCopy = new StreamSource<>(monitoringFunctionCopy); AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarnessCopy = new AbstractStreamOperatorTestHarness<>(srcCopy, 1, 1, 0); testHarnessCopy.initializeState(snapshot); testHarnessCopy.open(); Assert.assertNull(error[0]); Assert.assertEquals(fileModTime, monitoringFunctionCopy.getGlobalModificationTime()); hdfs.delete(path, false); } @Test public void testProcessContinuously() throws Exception { String testBasePath = hdfsURI + "/" + UUID.randomUUID() + "/"; final OneShotLatch latch = new OneShotLatch(); // create a single file in the directory Tuple2<org.apache.hadoop.fs.Path, String> bootstrap = createFileAndFillWithData( testBasePath, "file", NO_OF_FILES + 1, "This is test line."); Assert.assertTrue(hdfs.exists(bootstrap.f0)); final Set<String> filesToBeRead = new TreeSet<>(); filesToBeRead.add(bootstrap.f0.getName()); TextInputFormat format = new TextInputFormat(new Path(testBasePath)); format.setFilesFilter(FilePathFilter.createDefaultFilter()); final ContinuousFileMonitoringFunction<String> monitoringFunction = createTestContinuousFileMonitoringFunction( format, FileProcessingMode.PROCESS_CONTINUOUSLY); final int totalNoOfFilesToBeRead = NO_OF_FILES + 1; // 1 for the bootstrap + NO_OF_FILES final FileVerifyingSourceContext context = new FileVerifyingSourceContext( latch, monitoringFunction, 1, totalNoOfFilesToBeRead); final Thread t = new Thread() { @Override public void run() { try { monitoringFunction.open(DefaultOpenContext.INSTANCE); monitoringFunction.run(context); } catch (Exception e) { Assert.fail(e.getMessage()); } } }; t.start(); if (!latch.isTriggered()) { latch.await(); } // create some additional files that will be processed in the case of PROCESS_CONTINUOUSLY final org.apache.hadoop.fs.Path[] filesCreated = new org.apache.hadoop.fs.Path[NO_OF_FILES]; for (int i = 0; i < NO_OF_FILES; i++) { Tuple2<org.apache.hadoop.fs.Path, String> file = createFileAndFillWithData(testBasePath, "file", i, "This is test line."); filesCreated[i] = file.f0; filesToBeRead.add(file.f0.getName()); } // wait until the monitoring thread exits t.join(); Assert.assertArrayEquals(filesToBeRead.toArray(), context.getSeenFiles().toArray()); // finally delete the files created for the test. hdfs.delete(bootstrap.f0, false); for (org.apache.hadoop.fs.Path path : filesCreated) { hdfs.delete(path, false); } } /////////// Source Contexts Used by the tests ///////////////// private static class FileVerifyingSourceContext extends DummySourceContext { private final ContinuousFileMonitoringFunction src; private final OneShotLatch latch; private final Set<String> seenFiles; private int elementsBeforeNotifying = -1; private int elementsBeforeCanceling = -1; FileVerifyingSourceContext(OneShotLatch latch, ContinuousFileMonitoringFunction src) { this(latch, src, -1, -1); } FileVerifyingSourceContext( OneShotLatch latch, ContinuousFileMonitoringFunction src, int elementsBeforeNotifying, int elementsBeforeCanceling) { this.latch = latch; this.seenFiles = new TreeSet<>(); this.src = src; this.elementsBeforeNotifying = elementsBeforeNotifying; this.elementsBeforeCanceling = elementsBeforeCanceling; } Set<String> getSeenFiles() { return this.seenFiles; } @Override public void collect(TimestampedFileInputSplit element) { String seenFileName = element.getPath().getName(); this.seenFiles.add(seenFileName); if (seenFiles.size() == elementsBeforeNotifying && !latch.isTriggered()) { latch.trigger(); } if (seenFiles.size() == elementsBeforeCanceling) { src.cancel(); } } @Override public void close() { // the context was terminated so trigger so // that all threads that were waiting for this // are un-blocked. if (!latch.isTriggered()) { latch.trigger(); } src.cancel(); } } private static class ModTimeVerifyingSourceContext extends DummySourceContext { final long[] expectedModificationTimes; int splitCounter; long lastSeenModTime; ModTimeVerifyingSourceContext(long[] modTimes) { this.expectedModificationTimes = modTimes; this.splitCounter = 0; this.lastSeenModTime = Long.MIN_VALUE; } int getCounter() { return splitCounter; } @Override public void collect(TimestampedFileInputSplit element) { try { long modTime = hdfs.getFileStatus( new org.apache.hadoop.fs.Path(element.getPath().getPath())) .getModificationTime(); Assert.assertTrue(modTime >= lastSeenModTime); Assert.assertEquals(expectedModificationTimes[splitCounter], modTime); lastSeenModTime = modTime; splitCounter++; } catch (IOException e) { Assert.fail(e.getMessage()); } } } private abstract static class DummySourceContext implements SourceFunction.SourceContext<TimestampedFileInputSplit> { private final Object lock = new Object(); @Override public void collectWithTimestamp(TimestampedFileInputSplit element, long timestamp) {} @Override public void emitWatermark(Watermark mark) {} @Override public void markAsTemporarilyIdle() {} @Override public Object getCheckpointLock() { return lock; } @Override public void close() {} } ///////// Auxiliary Methods ///////////// private static int getLineNo(String line) { String[] tkns = line.split("\\s"); Assert.assertEquals(6, tkns.length); return Integer.parseInt(tkns[tkns.length - 1]); } /** * Create a file with pre-determined String format of the form: {@code fileIdx +": "+ sampleLine * +" "+ lineNo}. */ private static Tuple2<org.apache.hadoop.fs.Path, String> createFileAndFillWithData( String base, String fileName, int fileIdx, String sampleLine) throws IOException { assert (hdfs != null); final String fileRandSuffix = UUID.randomUUID().toString(); org.apache.hadoop.fs.Path file = new org.apache.hadoop.fs.Path(base + "/" + fileName + fileRandSuffix); Assert.assertFalse(hdfs.exists(file)); org.apache.hadoop.fs.Path tmp = new org.apache.hadoop.fs.Path(base + "/." + fileName + fileRandSuffix); FSDataOutputStream stream = hdfs.create(tmp); StringBuilder str = new StringBuilder(); for (int i = 0; i < LINES_PER_FILE; i++) { String line = fileIdx + ": " + sampleLine + " " + i + "\n"; str.append(line); stream.write(line.getBytes(ConfigConstants.DEFAULT_CHARSET)); } stream.close(); hdfs.rename(tmp, file); Assert.assertTrue("No result file present", hdfs.exists(file)); return new Tuple2<>(file, str.toString()); } /** * Create continuous monitoring function with 1 reader-parallelism and interval: {@link * #INTERVAL}. */ private <OUT> ContinuousFileMonitoringFunction<OUT> createTestContinuousFileMonitoringFunction( FileInputFormat<OUT> format, FileProcessingMode fileProcessingMode) { ContinuousFileMonitoringFunction<OUT> monitoringFunction = new ContinuousFileMonitoringFunction<>(format, fileProcessingMode, 1, INTERVAL); monitoringFunction.setRuntimeContext(new MockStreamingRuntimeContext(1, 0)); return monitoringFunction; } }
apache/cxf
36,912
core/src/test/java/org/apache/cxf/attachment/AttachmentDeserializerTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.attachment; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.PushbackInputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.IntStream; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.helpers.DefaultHandler; import jakarta.activation.DataSource; import jakarta.activation.URLDataSource; import org.apache.cxf.helpers.IOUtils; import org.apache.cxf.message.Attachment; import org.apache.cxf.message.Exchange; import org.apache.cxf.message.ExchangeImpl; import org.apache.cxf.message.Message; import org.apache.cxf.message.MessageImpl; import org.apache.cxf.message.XMLMessage; import org.junit.Before; import org.junit.Test; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; public class AttachmentDeserializerTest { private MessageImpl msg; @Before public void setUp() throws Exception { msg = new MessageImpl(); Exchange exchange = new ExchangeImpl(); msg.setExchange(exchange); } @Test public void testNoBoundaryInCT() throws Exception { //CXF-2623 String message = "SomeHeader: foo\n" + "------=_Part_34950_1098328613.1263781527359\n" + "Content-Type: text/xml; charset=UTF-8\n" + "Content-Transfer-Encoding: binary\n" + "Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n" + "\n" + "<envelope/>\n" + "------=_Part_34950_1098328613.1263781527359\n" + "Content-Type: text/xml\n" + "Content-Transfer-Encoding: binary\n" + "Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n" + "\n" + "<message>\n" + "------=_Part_34950_1098328613.1263781527359--"; Matcher m = Pattern.compile("^--(\\S*)$").matcher(message); assertFalse(m.find()); m = Pattern.compile("^--(\\S*)$", Pattern.MULTILINE).matcher(message); assertTrue(m.find()); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(message.getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); assertEquals(1, msg.getAttachments().size()); } @Test public void testLazyAttachmentCollection() throws Exception { InputStream is = getClass().getResourceAsStream("mimedata2"); String ct = "multipart/related; type=\"application/xop+xml\"; " + "start=\"<soap.xml@xfire.codehaus.org>\"; " + "start-info=\"text/xml; charset=utf-8\"; " + "boundary=\"----=_Part_4_701508.1145579811786\""; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); attBody.close(); assertEquals(2, msg.getAttachments().size()); List<String> cidlist = new ArrayList<>(); cidlist.add("xfire_logo.jpg"); cidlist.add("xfire_logo2.jpg"); for (Iterator<Attachment> it = msg.getAttachments().iterator(); it.hasNext();) { Attachment a = it.next(); assertTrue(cidlist.remove(a.getId())); it.remove(); } assertEquals(0, cidlist.size()); assertEquals(0, msg.getAttachments().size()); is.close(); } @Test public void testDeserializerMtom() throws Exception { InputStream is = getClass().getResourceAsStream("mimedata"); String ct = "multipart/related; type=\"application/xop+xml\"; " + "start=\"<soap.xml@xfire.codehaus.org>\"; " + "start-info=\"text/xml; charset=utf-8\"; " + "boundary=\"----=_Part_4_701508.1145579811786\""; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); Collection<Attachment> atts = msg.getAttachments(); assertNotNull(atts); Iterator<Attachment> itr = atts.iterator(); assertTrue(itr.hasNext()); Attachment a = itr.next(); assertNotNull(a); InputStream attIs = a.getDataHandler().getInputStream(); // check the cached output stream try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { IOUtils.copy(attBody, out); assertTrue(out.toString().startsWith("<env:Envelope")); } // try streaming a character off the wire assertEquals(255, attIs.read()); assertEquals(216, (char)attIs.read()); // Attachment invalid = atts.get("INVALID"); // assertNull(invalid.getDataHandler().getInputStream()); // // assertTrue(attIs instanceof ByteArrayInputStream); is.close(); } @Test public void testDeserializerMtomWithAxis2StyleBoundaries() throws Exception { InputStream is = getClass().getResourceAsStream("axis2_mimedata"); String ct = "multipart/related; type=\"application/xop+xml\"; " + "start=\"<soap.xml@xfire.codehaus.org>\"; " + "start-info=\"text/xml; charset=utf-8\"; " + "boundary=MIMEBoundaryurn_uuid_6BC4984D5D38EB283C1177616488109"; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); Collection<Attachment> atts = msg.getAttachments(); assertNotNull(atts); Iterator<Attachment> itr = atts.iterator(); assertTrue(itr.hasNext()); Attachment a = itr.next(); assertNotNull(a); InputStream attIs = a.getDataHandler().getInputStream(); // check the cached output stream try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { IOUtils.copy(attBody, out); assertTrue(out.toString().startsWith("<env:Envelope")); } // try streaming a character off the wire assertEquals(255, attIs.read()); assertEquals(216, attIs.read()); // Attachment invalid = atts.get("INVALID"); // assertNull(invalid.getDataHandler().getInputStream()); // // assertTrue(attIs instanceof ByteArrayInputStream); is.close(); } @Test public void testDeserializerSwA() throws Exception { InputStream is = getClass().getResourceAsStream("swadata"); String ct = "multipart/related; type=\"text/xml\"; " + "start=\"<86048FF3556694F7DA1918466DDF8143>\"; " + "boundary=\"----=_Part_0_14158819.1167275505862\""; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); Collection<Attachment> atts = msg.getAttachments(); assertNotNull(atts); Iterator<Attachment> itr = atts.iterator(); assertTrue(itr.hasNext()); Attachment a = itr.next(); assertNotNull(a); InputStream attIs = a.getDataHandler().getInputStream(); // check the cached output stream try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { IOUtils.copy(attBody, out); assertTrue(out.toString().startsWith("<?xml")); } // try streaming a character off the wire assertTrue(attIs.read() == 'f'); assertTrue(attIs.read() == 'o'); assertTrue(attIs.read() == 'o'); assertTrue(attIs.read() == 'b'); assertTrue(attIs.read() == 'a'); assertTrue(attIs.read() == 'r'); assertTrue(attIs.read() == -1); is.close(); } @Test public void testDeserializerSwAWithoutBoundryInContentType() throws Exception { InputStream is = getClass().getResourceAsStream("swadata"); String ct = "multipart/related; type=\"text/xml\"; "; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); Collection<Attachment> atts = msg.getAttachments(); assertNotNull(atts); Iterator<Attachment> itr = atts.iterator(); assertTrue(itr.hasNext()); Attachment a = itr.next(); assertNotNull(a); InputStream attIs = a.getDataHandler().getInputStream(); // check the cached output stream try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { IOUtils.copy(attBody, out); assertTrue(out.toString().startsWith("<?xml")); } // try streaming a character off the wire assertTrue(attIs.read() == 'f'); assertTrue(attIs.read() == 'o'); assertTrue(attIs.read() == 'o'); assertTrue(attIs.read() == 'b'); assertTrue(attIs.read() == 'a'); assertTrue(attIs.read() == 'r'); assertTrue(attIs.read() == -1); assertFalse(itr.hasNext()); is.close(); } @Test public void testDeserializerWithCachedFile() throws Exception { InputStream is = getClass().getResourceAsStream("mimedata"); String ct = "multipart/related; type=\"application/xop+xml\"; " + "start=\"<soap.xml@xfire.codehaus.org>\"; " + "start-info=\"text/xml; charset=utf-8\"; " + "boundary=\"----=_Part_4_701508.1145579811786\""; msg.put(Message.CONTENT_TYPE, ct); msg.setContent(InputStream.class, is); msg.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, "10"); AttachmentDeserializer deserializer = new AttachmentDeserializer(msg); deserializer.initializeAttachments(); InputStream attBody = msg.getContent(InputStream.class); assertTrue(attBody != is); assertTrue(attBody instanceof DelegatingInputStream); Collection<Attachment> atts = msg.getAttachments(); assertNotNull(atts); Iterator<Attachment> itr = atts.iterator(); assertTrue(itr.hasNext()); Attachment a = itr.next(); assertNotNull(a); InputStream attIs = a.getDataHandler().getInputStream(); assertFalse(itr.hasNext()); try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { IOUtils.copy(attIs, out); assertTrue(out.size() > 1000); } is.close(); } @Test public void testSmallStream() throws Exception { byte[] messageBytes = ("------=_Part_1\n\nJJJJ\n------=_Part_1\n\n" + "Content-Transfer-Encoding: binary\n\n=3D=3D=3D\n------=_Part_1\n").getBytes(); PushbackInputStream pushbackStream = new PushbackInputStream(new ByteArrayInputStream(messageBytes), 2048); pushbackStream.read(new byte[4096], 0, 4015); pushbackStream.unread(messageBytes); pushbackStream.read(new byte[72]); MimeBodyPartInputStream m = new MimeBodyPartInputStream(pushbackStream, "------=_Part_1".getBytes(), 2048); assertEquals(10, m.read(new byte[1000])); assertEquals(-1, m.read(new byte[1000])); assertEquals(-1, m.read(new byte[1000])); m.close(); } @Test public void testCXF2542() throws Exception { StringBuilder buf = new StringBuilder(512); buf.append("------=_Part_0_2180223.1203118300920\n"); buf.append("Content-Type: application/xop+xml; charset=UTF-8; type=\"text/xml\"\n"); buf.append("Content-Transfer-Encoding: 8bit\n"); buf.append("Content-ID: <soap.xml@xfire.codehaus.org>\n"); buf.append('\n'); buf.append("<soap:Envelope xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\" " + "xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\" " + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">" + "<soap:Body><getNextMessage xmlns=\"http://foo.bar\" /></soap:Body>" + "</soap:Envelope>\n"); buf.append("------=_Part_0_2180223.1203118300920--\n"); InputStream rawInputStream = new ByteArrayInputStream(buf.toString().getBytes()); MessageImpl message = new MessageImpl(); message.setContent(InputStream.class, rawInputStream); message.put(Message.CONTENT_TYPE, "multipart/related; type=\"application/xop+xml\"; " + "start=\"<soap.xml@xfire.codehaus.org>\"; " + "start-info=\"text/xml\"; boundary=\"----=_Part_0_2180223.1203118300920\""); new AttachmentDeserializer(message).initializeAttachments(); InputStream inputStreamWithoutAttachments = message.getContent(InputStream.class); SAXParser parser = SAXParserFactory.newInstance().newSAXParser(); parser.parse(inputStreamWithoutAttachments, new DefaultHandler()); inputStreamWithoutAttachments.close(); rawInputStream.close(); } @Test public void imitateAttachmentInInterceptorForMessageWithMissingBoundary() throws Exception { String contentType = "multipart/mixed;boundary=abc123"; String data = "--abc123\r\n\r\n<Document></Document>\r\n\r\n"; ByteArrayInputStream inputStream = new ByteArrayInputStream(data.getBytes()); Message message = new XMLMessage(new MessageImpl()); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, inputStream); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/mixed")); ad.initializeAttachments(); assertEquals(0, message.getAttachments().size()); inputStream.close(); } @Test public void testDoesntReturnZero() throws Exception { String contentType = "multipart/mixed;boundary=----=_Part_1"; byte[] messageBytes = ( "------=_Part_1\n\n" + "JJJJ\n" + "------=_Part_1" + "\n\nContent-Transfer-Encoding: binary\n\n" + "ABCD1\r\n" + "------=_Part_1" + "\n\nContent-Transfer-Encoding: binary\n\n" + "ABCD2\r\n" + "------=_Part_1" + "\n\nContent-Transfer-Encoding: binary\n\n" + "ABCD3\r\n" + "------=_Part_1--").getBytes(StandardCharsets.UTF_8); ByteArrayInputStream in = new ByteArrayInputStream(messageBytes) { public int read(byte[] b, int off, int len) { return super.read(b, off, len >= 2 ? 2 : len); } }; Message message = new MessageImpl(); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, in); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/mixed")); ad.initializeAttachments(); String s = IOUtils.toString(message.getContent(InputStream.class)); assertEquals("JJJJ", s.trim()); int count = 1; for (Attachment a : message.getAttachments()) { s = IOUtils.toString(a.getDataHandler().getInputStream()); assertEquals("ABCD" + count++, s); } in.close(); } @Test public void testCXF3383() throws Exception { String contentType = "multipart/related; type=\"application/xop+xml\";" + " boundary=\"uuid:7a555f51-c9bb-4bd4-9929-706899e2f793\"; start=" + "\"<root.message@cxf.apache.org>\"; start-info=\"text/xml\""; Message message = new MessageImpl(); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, getClass().getResourceAsStream("cxf3383.data")); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/related")); ad.initializeAttachments(); for (int x = 1; x < 50; x++) { String cid = "1882f79d-e20a-4b36-a222-7a75518cf395-" + x + "@cxf.apache.org"; DataSource ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); byte[] bts = new byte[1024]; InputStream ins = ds.getInputStream(); int count = 0; int sz = ins.read(bts, 0, bts.length); while (sz != -1) { count += sz; // We do not expect the data to fill up the buffer: assertTrue(count < bts.length); sz = ins.read(bts, count, bts.length - count); } assertEquals(x + 1, count); ins.close(); } } @Test public void testCXF3582() throws Exception { String contentType = "multipart/related; type=\"application/xop+xml\"; " + "boundary=\"uuid:906fa67b-85f9-4ef5-8e3d-52416022d463\"; " + "start=\"<root.message@cxf.apache.org>\"; start-info=\"text/xml\""; Message message = new MessageImpl(); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, getClass().getResourceAsStream("cxf3582.data")); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/related")); ad.initializeAttachments(); String cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-1@apache.org"; DataSource ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); byte[] bts = new byte[1024]; InputStream ins = ds.getInputStream(); int count = ins.read(bts, 0, bts.length); assertEquals(500, count); assertEquals(-1, ins.read(new byte[1000], 500, 500)); cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-2@apache.org"; ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); bts = new byte[1024]; ins = ds.getInputStream(); count = ins.read(bts, 0, bts.length); assertEquals(1024, count); assertEquals(225, ins.read(new byte[1000], 500, 500)); assertEquals(-1, ins.read(new byte[1000], 500, 500)); ins.close(); } @Test public void testCXF3582b() throws Exception { String contentType = "multipart/related; type=\"application/xop+xml\"; " + "boundary=\"uuid:906fa67b-85f9-4ef5-8e3d-52416022d463\"; " + "start=\"<root.message@cxf.apache.org>\"; start-info=\"text/xml\""; Message message = new MessageImpl(); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, getClass().getResourceAsStream("cxf3582.data")); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/related")); ad.initializeAttachments(); String cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-1@apache.org"; DataSource ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); byte[] bts = new byte[1024]; InputStream ins = ds.getInputStream(); int count = 0; int x = ins.read(bts, 500, 200); while (x != -1) { count += x; x = ins.read(bts, 500, 200); } assertEquals(500, count); assertEquals(-1, ins.read(new byte[1000], 500, 500)); ins.close(); cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-2@apache.org"; ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); bts = new byte[1024]; ins = ds.getInputStream(); count = 0; x = ins.read(bts, 500, 200); while (x != -1) { count += x; x = ins.read(bts, 500, 200); } assertEquals(1249, count); assertEquals(-1, ins.read(new byte[1000], 500, 500)); ins.close(); } @Test public void testCXF3582c() throws Exception { String contentType = "multipart/related; type=\"application/xop+xml\"; " + "boundary=\"uuid:906fa67b-85f9-4ef5-8e3d-52416022d463\"; " + "start=\"<root.message@cxf.apache.org>\"; start-info=\"text/xml\""; Message message = new MessageImpl(); message.put(Message.CONTENT_TYPE, contentType); message.setContent(InputStream.class, getClass().getResourceAsStream("cxf3582.data")); message.put(AttachmentDeserializer.ATTACHMENT_DIRECTORY, System .getProperty("java.io.tmpdir")); message.put(AttachmentDeserializer.ATTACHMENT_MEMORY_THRESHOLD, String .valueOf(AttachmentDeserializer.THRESHOLD)); AttachmentDeserializer ad = new AttachmentDeserializer(message, Collections.singletonList("multipart/related")); ad.initializeAttachments(); String cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-1@apache.org"; DataSource ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); byte[] bts = new byte[1024]; InputStream ins = ds.getInputStream(); int count = 0; int x = ins.read(bts, 100, 600); while (x != -1) { count += x; x = ins.read(bts, 100, 600); } assertEquals(500, count); assertEquals(-1, ins.read(new byte[1000], 100, 600)); ins.close(); cid = "1a66bb35-67fc-4e89-9f33-48af417bf9fe-2@apache.org"; ds = AttachmentUtil.getAttachmentDataSource(cid, message.getAttachments()); bts = new byte[1024]; ins = ds.getInputStream(); count = 0; x = ins.read(bts, 100, 600); while (x != -1) { count += x; x = ins.read(bts, 100, 600); } assertEquals(1249, count); assertEquals(-1, ins.read(new byte[1000], 100, 600)); ins.close(); } @Test public void testManyAttachments() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); // Add many attachments IntStream.range(0, 100000).forEach(i -> { sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); }); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertThrows("Failure expected on too many attachments", RuntimeException.class, () -> msg.getAttachments().size()); } @Test public void testChangingMaxAttachmentCount() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); // Add many attachments IntStream.range(0, 40).forEach(i -> { sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); }); msg = new MessageImpl(); msg.put(AttachmentDeserializer.ATTACHMENT_MAX_COUNT, "30"); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertThrows("Failure expected on too many attachments", RuntimeException.class, () -> msg.getAttachments().size()); // Now we'll allow it msg = new MessageImpl(); msg.put(AttachmentDeserializer.ATTACHMENT_MAX_COUNT, "60"); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertEquals(40, msg.getAttachments().size()); } @Test public void testInvalidContentDispositionFilename() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append("Content-Disposition: attachment; filename=../../../../../../../../etc/passwd\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertEquals(1, msg.getAttachments().size()); Attachment attachment = msg.getAttachments().iterator().next(); AttachmentDataSource dataSource = (AttachmentDataSource)attachment.getDataHandler().getDataSource(); assertEquals("passwd", dataSource.getName()); } @Test public void testDefaultContentTypeIfNotSet() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertEquals(1, msg.getAttachments().size()); Attachment attachment = msg.getAttachments().iterator().next(); AttachmentDataSource dataSource = (AttachmentDataSource)attachment.getDataHandler().getDataSource(); assertEquals("application/octet-stream", dataSource.getContentType()); } @Test public void testContentTypeIfNotSet() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); msg.put(AttachmentUtil.ATTACHMENT_CONTENT_TYPE, "text/plain"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertEquals(1, msg.getAttachments().size()); Attachment attachment = msg.getAttachments().iterator().next(); AttachmentDataSource dataSource = (AttachmentDataSource)attachment.getDataHandler().getDataSource(); assertEquals("text/plain", dataSource.getContentType()); } @Test public void testContentType() throws Exception { StringBuilder sb = new StringBuilder(1000); sb.append("SomeHeader: foo\n") .append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <318731183421.1263781527359.IBM.WEBSERVICES@auhpap02>\n") .append('\n') .append("<envelope/>\n"); sb.append("------=_Part_34950_1098328613.1263781527359\n") .append("Content-Transfer-Encoding: binary\n") .append("Content-Id: <b86a5f2d-e7af-4e5e-b71a-9f6f2307cab0>\n") .append("Content-Type: text/xml; charset=UTF-8\n") .append('\n') .append("<message>\n") .append("------=_Part_34950_1098328613.1263781527359--\n"); msg = new MessageImpl(); msg.setContent(InputStream.class, new ByteArrayInputStream(sb.toString().getBytes(StandardCharsets.UTF_8))); msg.put(Message.CONTENT_TYPE, "multipart/related"); AttachmentDeserializer ad = new AttachmentDeserializer(msg); ad.initializeAttachments(); // Force it to load the attachments assertEquals(1, msg.getAttachments().size()); Attachment attachment = msg.getAttachments().iterator().next(); AttachmentDataSource dataSource = (AttachmentDataSource)attachment.getDataHandler().getDataSource(); assertEquals("text/xml; charset=UTF-8", dataSource.getContentType()); } @Test public void testCXF8706() { final DataSource ds = AttachmentUtil .getAttachmentDataSource("cid:http://image.com/1.gif", Collections.emptyList()); assertThat(ds, instanceOf(LazyDataSource.class)); } @Test public void testCXF8706followUrl() { System.setProperty(AttachmentUtil.ATTACHMENT_XOP_FOLLOW_URLS_PROPERTY, "true"); try { final DataSource ds = AttachmentUtil .getAttachmentDataSource("cid:http://image.com/1.gif", Collections.emptyList()); assertThat(ds, instanceOf(URLDataSource.class)); } finally { System.clearProperty(AttachmentUtil.ATTACHMENT_XOP_FOLLOW_URLS_PROPERTY); } } }
googleapis/google-cloud-java
37,110
java-datacatalog/proto-google-cloud-datacatalog-v1/src/main/java/com/google/cloud/datacatalog/v1/EntryOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1/datacatalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1; public interface EntryOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.datacatalog.v1.Entry) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Output only. Identifier. The resource name of an entry in URL format. * * Note: The entry itself and its child resources might not be * stored in the location specified in its name. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IDENTIFIER, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ java.lang.String getName(); /** * * * <pre> * Output only. Identifier. The resource name of an entry in URL format. * * Note: The entry itself and its child resources might not be * stored in the location specified in its name. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY, (.google.api.field_behavior) = IDENTIFIER, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * The resource this metadata entry refers to. * * For Google Cloud Platform resources, `linked_resource` is the * [Full Resource Name] * (https://cloud.google.com/apis/design/resource_names#full_resource_name). * For example, the `linked_resource` for a table resource from BigQuery is: * * `//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}` * * Output only when the entry is one of the types in the `EntryType` enum. * * For entries with a `user_specified_type`, this field is optional and * defaults to an empty string. * * The resource string must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), periods (.), colons (:), slashes (/), dashes (-), * and hashes (#). * The maximum size is 200 bytes when encoded in UTF-8. * </pre> * * <code>string linked_resource = 9;</code> * * @return The linkedResource. */ java.lang.String getLinkedResource(); /** * * * <pre> * The resource this metadata entry refers to. * * For Google Cloud Platform resources, `linked_resource` is the * [Full Resource Name] * (https://cloud.google.com/apis/design/resource_names#full_resource_name). * For example, the `linked_resource` for a table resource from BigQuery is: * * `//bigquery.googleapis.com/projects/{PROJECT_ID}/datasets/{DATASET_ID}/tables/{TABLE_ID}` * * Output only when the entry is one of the types in the `EntryType` enum. * * For entries with a `user_specified_type`, this field is optional and * defaults to an empty string. * * The resource string must contain only letters (a-z, A-Z), numbers (0-9), * underscores (_), periods (.), colons (:), slashes (/), dashes (-), * and hashes (#). * The maximum size is 200 bytes when encoded in UTF-8. * </pre> * * <code>string linked_resource = 9;</code> * * @return The bytes for linkedResource. */ com.google.protobuf.ByteString getLinkedResourceBytes(); /** * * * <pre> * [Fully Qualified Name * (FQN)](https://cloud.google.com//data-catalog/docs/fully-qualified-names) * of the resource. Set automatically for entries representing resources from * synced systems. Settable only during creation, and read-only later. Can * be used for search and lookup of the entries. * </pre> * * <code>string fully_qualified_name = 29;</code> * * @return The fullyQualifiedName. */ java.lang.String getFullyQualifiedName(); /** * * * <pre> * [Fully Qualified Name * (FQN)](https://cloud.google.com//data-catalog/docs/fully-qualified-names) * of the resource. Set automatically for entries representing resources from * synced systems. Settable only during creation, and read-only later. Can * be used for search and lookup of the entries. * </pre> * * <code>string fully_qualified_name = 29;</code> * * @return The bytes for fullyQualifiedName. */ com.google.protobuf.ByteString getFullyQualifiedNameBytes(); /** * * * <pre> * The type of the entry. * * For details, see [`EntryType`](#entrytype). * </pre> * * <code>.google.cloud.datacatalog.v1.EntryType type = 2;</code> * * @return Whether the type field is set. */ boolean hasType(); /** * * * <pre> * The type of the entry. * * For details, see [`EntryType`](#entrytype). * </pre> * * <code>.google.cloud.datacatalog.v1.EntryType type = 2;</code> * * @return The enum numeric value on the wire for type. */ int getTypeValue(); /** * * * <pre> * The type of the entry. * * For details, see [`EntryType`](#entrytype). * </pre> * * <code>.google.cloud.datacatalog.v1.EntryType type = 2;</code> * * @return The type. */ com.google.cloud.datacatalog.v1.EntryType getType(); /** * * * <pre> * Custom entry type that doesn't match any of the values allowed for input * and listed in the `EntryType` enum. * * When creating an entry, first check the type values in the enum. * If there are no appropriate types for the new entry, * provide a custom value, for example, `my_special_type`. * * The `user_specified_type` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_type = 16;</code> * * @return Whether the userSpecifiedType field is set. */ boolean hasUserSpecifiedType(); /** * * * <pre> * Custom entry type that doesn't match any of the values allowed for input * and listed in the `EntryType` enum. * * When creating an entry, first check the type values in the enum. * If there are no appropriate types for the new entry, * provide a custom value, for example, `my_special_type`. * * The `user_specified_type` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_type = 16;</code> * * @return The userSpecifiedType. */ java.lang.String getUserSpecifiedType(); /** * * * <pre> * Custom entry type that doesn't match any of the values allowed for input * and listed in the `EntryType` enum. * * When creating an entry, first check the type values in the enum. * If there are no appropriate types for the new entry, * provide a custom value, for example, `my_special_type`. * * The `user_specified_type` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_type = 16;</code> * * @return The bytes for userSpecifiedType. */ com.google.protobuf.ByteString getUserSpecifiedTypeBytes(); /** * * * <pre> * Output only. Indicates the entry's source system that Data Catalog * integrates with, such as BigQuery, Pub/Sub, or Dataproc Metastore. * </pre> * * <code> * .google.cloud.datacatalog.v1.IntegratedSystem integrated_system = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the integratedSystem field is set. */ boolean hasIntegratedSystem(); /** * * * <pre> * Output only. Indicates the entry's source system that Data Catalog * integrates with, such as BigQuery, Pub/Sub, or Dataproc Metastore. * </pre> * * <code> * .google.cloud.datacatalog.v1.IntegratedSystem integrated_system = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for integratedSystem. */ int getIntegratedSystemValue(); /** * * * <pre> * Output only. Indicates the entry's source system that Data Catalog * integrates with, such as BigQuery, Pub/Sub, or Dataproc Metastore. * </pre> * * <code> * .google.cloud.datacatalog.v1.IntegratedSystem integrated_system = 17 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The integratedSystem. */ com.google.cloud.datacatalog.v1.IntegratedSystem getIntegratedSystem(); /** * * * <pre> * Indicates the entry's source system that Data Catalog doesn't * automatically integrate with. * * The `user_specified_system` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_system = 18;</code> * * @return Whether the userSpecifiedSystem field is set. */ boolean hasUserSpecifiedSystem(); /** * * * <pre> * Indicates the entry's source system that Data Catalog doesn't * automatically integrate with. * * The `user_specified_system` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_system = 18;</code> * * @return The userSpecifiedSystem. */ java.lang.String getUserSpecifiedSystem(); /** * * * <pre> * Indicates the entry's source system that Data Catalog doesn't * automatically integrate with. * * The `user_specified_system` string has the following limitations: * * * Is case insensitive. * * Must begin with a letter or underscore. * * Can only contain letters, numbers, and underscores. * * Must be at least 1 character and at most 64 characters long. * </pre> * * <code>string user_specified_system = 18;</code> * * @return The bytes for userSpecifiedSystem. */ com.google.protobuf.ByteString getUserSpecifiedSystemBytes(); /** * * * <pre> * Specification that applies to a relational database system. Only settable * when `user_specified_system` is equal to `SQL_DATABASE` * </pre> * * <code>.google.cloud.datacatalog.v1.SqlDatabaseSystemSpec sql_database_system_spec = 39;</code> * * @return Whether the sqlDatabaseSystemSpec field is set. */ boolean hasSqlDatabaseSystemSpec(); /** * * * <pre> * Specification that applies to a relational database system. Only settable * when `user_specified_system` is equal to `SQL_DATABASE` * </pre> * * <code>.google.cloud.datacatalog.v1.SqlDatabaseSystemSpec sql_database_system_spec = 39;</code> * * @return The sqlDatabaseSystemSpec. */ com.google.cloud.datacatalog.v1.SqlDatabaseSystemSpec getSqlDatabaseSystemSpec(); /** * * * <pre> * Specification that applies to a relational database system. Only settable * when `user_specified_system` is equal to `SQL_DATABASE` * </pre> * * <code>.google.cloud.datacatalog.v1.SqlDatabaseSystemSpec sql_database_system_spec = 39;</code> */ com.google.cloud.datacatalog.v1.SqlDatabaseSystemSpecOrBuilder getSqlDatabaseSystemSpecOrBuilder(); /** * * * <pre> * Specification that applies to Looker sysstem. Only settable when * `user_specified_system` is equal to `LOOKER` * </pre> * * <code>.google.cloud.datacatalog.v1.LookerSystemSpec looker_system_spec = 40;</code> * * @return Whether the lookerSystemSpec field is set. */ boolean hasLookerSystemSpec(); /** * * * <pre> * Specification that applies to Looker sysstem. Only settable when * `user_specified_system` is equal to `LOOKER` * </pre> * * <code>.google.cloud.datacatalog.v1.LookerSystemSpec looker_system_spec = 40;</code> * * @return The lookerSystemSpec. */ com.google.cloud.datacatalog.v1.LookerSystemSpec getLookerSystemSpec(); /** * * * <pre> * Specification that applies to Looker sysstem. Only settable when * `user_specified_system` is equal to `LOOKER` * </pre> * * <code>.google.cloud.datacatalog.v1.LookerSystemSpec looker_system_spec = 40;</code> */ com.google.cloud.datacatalog.v1.LookerSystemSpecOrBuilder getLookerSystemSpecOrBuilder(); /** * * * <pre> * Specification that applies to Cloud Bigtable system. Only settable when * `integrated_system` is equal to `CLOUD_BIGTABLE` * </pre> * * <code>.google.cloud.datacatalog.v1.CloudBigtableSystemSpec cloud_bigtable_system_spec = 41; * </code> * * @return Whether the cloudBigtableSystemSpec field is set. */ boolean hasCloudBigtableSystemSpec(); /** * * * <pre> * Specification that applies to Cloud Bigtable system. Only settable when * `integrated_system` is equal to `CLOUD_BIGTABLE` * </pre> * * <code>.google.cloud.datacatalog.v1.CloudBigtableSystemSpec cloud_bigtable_system_spec = 41; * </code> * * @return The cloudBigtableSystemSpec. */ com.google.cloud.datacatalog.v1.CloudBigtableSystemSpec getCloudBigtableSystemSpec(); /** * * * <pre> * Specification that applies to Cloud Bigtable system. Only settable when * `integrated_system` is equal to `CLOUD_BIGTABLE` * </pre> * * <code>.google.cloud.datacatalog.v1.CloudBigtableSystemSpec cloud_bigtable_system_spec = 41; * </code> */ com.google.cloud.datacatalog.v1.CloudBigtableSystemSpecOrBuilder getCloudBigtableSystemSpecOrBuilder(); /** * * * <pre> * Specification that applies to a Cloud Storage fileset. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.GcsFilesetSpec gcs_fileset_spec = 6;</code> * * @return Whether the gcsFilesetSpec field is set. */ boolean hasGcsFilesetSpec(); /** * * * <pre> * Specification that applies to a Cloud Storage fileset. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.GcsFilesetSpec gcs_fileset_spec = 6;</code> * * @return The gcsFilesetSpec. */ com.google.cloud.datacatalog.v1.GcsFilesetSpec getGcsFilesetSpec(); /** * * * <pre> * Specification that applies to a Cloud Storage fileset. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.GcsFilesetSpec gcs_fileset_spec = 6;</code> */ com.google.cloud.datacatalog.v1.GcsFilesetSpecOrBuilder getGcsFilesetSpecOrBuilder(); /** * * * <pre> * Output only. Specification that applies to a BigQuery table. Valid only * for entries with the `TABLE` type. * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryTableSpec bigquery_table_spec = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the bigqueryTableSpec field is set. */ boolean hasBigqueryTableSpec(); /** * * * <pre> * Output only. Specification that applies to a BigQuery table. Valid only * for entries with the `TABLE` type. * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryTableSpec bigquery_table_spec = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The bigqueryTableSpec. */ com.google.cloud.datacatalog.v1.BigQueryTableSpec getBigqueryTableSpec(); /** * * * <pre> * Output only. Specification that applies to a BigQuery table. Valid only * for entries with the `TABLE` type. * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryTableSpec bigquery_table_spec = 12 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.datacatalog.v1.BigQueryTableSpecOrBuilder getBigqueryTableSpecOrBuilder(); /** * * * <pre> * Output only. Specification for a group of BigQuery tables with * the `[prefix]YYYYMMDD` name pattern. * * For more information, see [Introduction to partitioned tables] * (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryDateShardedSpec bigquery_date_sharded_spec = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the bigqueryDateShardedSpec field is set. */ boolean hasBigqueryDateShardedSpec(); /** * * * <pre> * Output only. Specification for a group of BigQuery tables with * the `[prefix]YYYYMMDD` name pattern. * * For more information, see [Introduction to partitioned tables] * (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryDateShardedSpec bigquery_date_sharded_spec = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The bigqueryDateShardedSpec. */ com.google.cloud.datacatalog.v1.BigQueryDateShardedSpec getBigqueryDateShardedSpec(); /** * * * <pre> * Output only. Specification for a group of BigQuery tables with * the `[prefix]YYYYMMDD` name pattern. * * For more information, see [Introduction to partitioned tables] * (https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding). * </pre> * * <code> * .google.cloud.datacatalog.v1.BigQueryDateShardedSpec bigquery_date_sharded_spec = 15 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.datacatalog.v1.BigQueryDateShardedSpecOrBuilder getBigqueryDateShardedSpecOrBuilder(); /** * * * <pre> * Specification that applies to a table resource. Valid only * for entries with the `TABLE` or `EXPLORE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DatabaseTableSpec database_table_spec = 24;</code> * * @return Whether the databaseTableSpec field is set. */ boolean hasDatabaseTableSpec(); /** * * * <pre> * Specification that applies to a table resource. Valid only * for entries with the `TABLE` or `EXPLORE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DatabaseTableSpec database_table_spec = 24;</code> * * @return The databaseTableSpec. */ com.google.cloud.datacatalog.v1.DatabaseTableSpec getDatabaseTableSpec(); /** * * * <pre> * Specification that applies to a table resource. Valid only * for entries with the `TABLE` or `EXPLORE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DatabaseTableSpec database_table_spec = 24;</code> */ com.google.cloud.datacatalog.v1.DatabaseTableSpecOrBuilder getDatabaseTableSpecOrBuilder(); /** * * * <pre> * Specification that applies to a data source connection. Valid only * for entries with the `DATA_SOURCE_CONNECTION` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DataSourceConnectionSpec data_source_connection_spec = 27; * </code> * * @return Whether the dataSourceConnectionSpec field is set. */ boolean hasDataSourceConnectionSpec(); /** * * * <pre> * Specification that applies to a data source connection. Valid only * for entries with the `DATA_SOURCE_CONNECTION` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DataSourceConnectionSpec data_source_connection_spec = 27; * </code> * * @return The dataSourceConnectionSpec. */ com.google.cloud.datacatalog.v1.DataSourceConnectionSpec getDataSourceConnectionSpec(); /** * * * <pre> * Specification that applies to a data source connection. Valid only * for entries with the `DATA_SOURCE_CONNECTION` type. * </pre> * * <code>.google.cloud.datacatalog.v1.DataSourceConnectionSpec data_source_connection_spec = 27; * </code> */ com.google.cloud.datacatalog.v1.DataSourceConnectionSpecOrBuilder getDataSourceConnectionSpecOrBuilder(); /** * * * <pre> * Specification that applies to a user-defined function or procedure. Valid * only for entries with the `ROUTINE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.RoutineSpec routine_spec = 28;</code> * * @return Whether the routineSpec field is set. */ boolean hasRoutineSpec(); /** * * * <pre> * Specification that applies to a user-defined function or procedure. Valid * only for entries with the `ROUTINE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.RoutineSpec routine_spec = 28;</code> * * @return The routineSpec. */ com.google.cloud.datacatalog.v1.RoutineSpec getRoutineSpec(); /** * * * <pre> * Specification that applies to a user-defined function or procedure. Valid * only for entries with the `ROUTINE` type. * </pre> * * <code>.google.cloud.datacatalog.v1.RoutineSpec routine_spec = 28;</code> */ com.google.cloud.datacatalog.v1.RoutineSpecOrBuilder getRoutineSpecOrBuilder(); /** * * * <pre> * Specification that applies to a dataset. * </pre> * * <code>.google.cloud.datacatalog.v1.DatasetSpec dataset_spec = 32;</code> * * @return Whether the datasetSpec field is set. */ boolean hasDatasetSpec(); /** * * * <pre> * Specification that applies to a dataset. * </pre> * * <code>.google.cloud.datacatalog.v1.DatasetSpec dataset_spec = 32;</code> * * @return The datasetSpec. */ com.google.cloud.datacatalog.v1.DatasetSpec getDatasetSpec(); /** * * * <pre> * Specification that applies to a dataset. * </pre> * * <code>.google.cloud.datacatalog.v1.DatasetSpec dataset_spec = 32;</code> */ com.google.cloud.datacatalog.v1.DatasetSpecOrBuilder getDatasetSpecOrBuilder(); /** * * * <pre> * Specification that applies to a fileset resource. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.FilesetSpec fileset_spec = 33;</code> * * @return Whether the filesetSpec field is set. */ boolean hasFilesetSpec(); /** * * * <pre> * Specification that applies to a fileset resource. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.FilesetSpec fileset_spec = 33;</code> * * @return The filesetSpec. */ com.google.cloud.datacatalog.v1.FilesetSpec getFilesetSpec(); /** * * * <pre> * Specification that applies to a fileset resource. Valid only * for entries with the `FILESET` type. * </pre> * * <code>.google.cloud.datacatalog.v1.FilesetSpec fileset_spec = 33;</code> */ com.google.cloud.datacatalog.v1.FilesetSpecOrBuilder getFilesetSpecOrBuilder(); /** * * * <pre> * Specification that applies to a Service resource. * </pre> * * <code>.google.cloud.datacatalog.v1.ServiceSpec service_spec = 42;</code> * * @return Whether the serviceSpec field is set. */ boolean hasServiceSpec(); /** * * * <pre> * Specification that applies to a Service resource. * </pre> * * <code>.google.cloud.datacatalog.v1.ServiceSpec service_spec = 42;</code> * * @return The serviceSpec. */ com.google.cloud.datacatalog.v1.ServiceSpec getServiceSpec(); /** * * * <pre> * Specification that applies to a Service resource. * </pre> * * <code>.google.cloud.datacatalog.v1.ServiceSpec service_spec = 42;</code> */ com.google.cloud.datacatalog.v1.ServiceSpecOrBuilder getServiceSpecOrBuilder(); /** * * * <pre> * Model specification. * </pre> * * <code>.google.cloud.datacatalog.v1.ModelSpec model_spec = 43;</code> * * @return Whether the modelSpec field is set. */ boolean hasModelSpec(); /** * * * <pre> * Model specification. * </pre> * * <code>.google.cloud.datacatalog.v1.ModelSpec model_spec = 43;</code> * * @return The modelSpec. */ com.google.cloud.datacatalog.v1.ModelSpec getModelSpec(); /** * * * <pre> * Model specification. * </pre> * * <code>.google.cloud.datacatalog.v1.ModelSpec model_spec = 43;</code> */ com.google.cloud.datacatalog.v1.ModelSpecOrBuilder getModelSpecOrBuilder(); /** * * * <pre> * FeatureonlineStore spec for Vertex AI Feature Store. * </pre> * * <code>.google.cloud.datacatalog.v1.FeatureOnlineStoreSpec feature_online_store_spec = 45; * </code> * * @return Whether the featureOnlineStoreSpec field is set. */ boolean hasFeatureOnlineStoreSpec(); /** * * * <pre> * FeatureonlineStore spec for Vertex AI Feature Store. * </pre> * * <code>.google.cloud.datacatalog.v1.FeatureOnlineStoreSpec feature_online_store_spec = 45; * </code> * * @return The featureOnlineStoreSpec. */ com.google.cloud.datacatalog.v1.FeatureOnlineStoreSpec getFeatureOnlineStoreSpec(); /** * * * <pre> * FeatureonlineStore spec for Vertex AI Feature Store. * </pre> * * <code>.google.cloud.datacatalog.v1.FeatureOnlineStoreSpec feature_online_store_spec = 45; * </code> */ com.google.cloud.datacatalog.v1.FeatureOnlineStoreSpecOrBuilder getFeatureOnlineStoreSpecOrBuilder(); /** * * * <pre> * Display name of an entry. * * The maximum size is 500 bytes when encoded in UTF-8. * Default value is an empty string. * </pre> * * <code>string display_name = 3;</code> * * @return The displayName. */ java.lang.String getDisplayName(); /** * * * <pre> * Display name of an entry. * * The maximum size is 500 bytes when encoded in UTF-8. * Default value is an empty string. * </pre> * * <code>string display_name = 3;</code> * * @return The bytes for displayName. */ com.google.protobuf.ByteString getDisplayNameBytes(); /** * * * <pre> * Entry description that can consist of several sentences or paragraphs * that describe entry contents. * * The description must not contain Unicode non-characters as well as C0 * and C1 control codes except tabs (HT), new lines (LF), carriage returns * (CR), and page breaks (FF). * The maximum size is 2000 bytes when encoded in UTF-8. * Default value is an empty string. * </pre> * * <code>string description = 4;</code> * * @return The description. */ java.lang.String getDescription(); /** * * * <pre> * Entry description that can consist of several sentences or paragraphs * that describe entry contents. * * The description must not contain Unicode non-characters as well as C0 * and C1 control codes except tabs (HT), new lines (LF), carriage returns * (CR), and page breaks (FF). * The maximum size is 2000 bytes when encoded in UTF-8. * Default value is an empty string. * </pre> * * <code>string description = 4;</code> * * @return The bytes for description. */ com.google.protobuf.ByteString getDescriptionBytes(); /** * * * <pre> * Business Context of the entry. Not supported for BigQuery datasets * </pre> * * <code>.google.cloud.datacatalog.v1.BusinessContext business_context = 37;</code> * * @return Whether the businessContext field is set. */ boolean hasBusinessContext(); /** * * * <pre> * Business Context of the entry. Not supported for BigQuery datasets * </pre> * * <code>.google.cloud.datacatalog.v1.BusinessContext business_context = 37;</code> * * @return The businessContext. */ com.google.cloud.datacatalog.v1.BusinessContext getBusinessContext(); /** * * * <pre> * Business Context of the entry. Not supported for BigQuery datasets * </pre> * * <code>.google.cloud.datacatalog.v1.BusinessContext business_context = 37;</code> */ com.google.cloud.datacatalog.v1.BusinessContextOrBuilder getBusinessContextOrBuilder(); /** * * * <pre> * Schema of the entry. An entry might not have any schema attached to it. * </pre> * * <code>.google.cloud.datacatalog.v1.Schema schema = 5;</code> * * @return Whether the schema field is set. */ boolean hasSchema(); /** * * * <pre> * Schema of the entry. An entry might not have any schema attached to it. * </pre> * * <code>.google.cloud.datacatalog.v1.Schema schema = 5;</code> * * @return The schema. */ com.google.cloud.datacatalog.v1.Schema getSchema(); /** * * * <pre> * Schema of the entry. An entry might not have any schema attached to it. * </pre> * * <code>.google.cloud.datacatalog.v1.Schema schema = 5;</code> */ com.google.cloud.datacatalog.v1.SchemaOrBuilder getSchemaOrBuilder(); /** * * * <pre> * Timestamps from the underlying resource, not from the Data Catalog * entry. * * Output only when the entry has a system listed in the `IntegratedSystem` * enum. For entries with `user_specified_system`, this field is optional * and defaults to an empty timestamp. * </pre> * * <code>.google.cloud.datacatalog.v1.SystemTimestamps source_system_timestamps = 7;</code> * * @return Whether the sourceSystemTimestamps field is set. */ boolean hasSourceSystemTimestamps(); /** * * * <pre> * Timestamps from the underlying resource, not from the Data Catalog * entry. * * Output only when the entry has a system listed in the `IntegratedSystem` * enum. For entries with `user_specified_system`, this field is optional * and defaults to an empty timestamp. * </pre> * * <code>.google.cloud.datacatalog.v1.SystemTimestamps source_system_timestamps = 7;</code> * * @return The sourceSystemTimestamps. */ com.google.cloud.datacatalog.v1.SystemTimestamps getSourceSystemTimestamps(); /** * * * <pre> * Timestamps from the underlying resource, not from the Data Catalog * entry. * * Output only when the entry has a system listed in the `IntegratedSystem` * enum. For entries with `user_specified_system`, this field is optional * and defaults to an empty timestamp. * </pre> * * <code>.google.cloud.datacatalog.v1.SystemTimestamps source_system_timestamps = 7;</code> */ com.google.cloud.datacatalog.v1.SystemTimestampsOrBuilder getSourceSystemTimestampsOrBuilder(); /** * * * <pre> * Resource usage statistics. * </pre> * * <code>.google.cloud.datacatalog.v1.UsageSignal usage_signal = 13;</code> * * @return Whether the usageSignal field is set. */ boolean hasUsageSignal(); /** * * * <pre> * Resource usage statistics. * </pre> * * <code>.google.cloud.datacatalog.v1.UsageSignal usage_signal = 13;</code> * * @return The usageSignal. */ com.google.cloud.datacatalog.v1.UsageSignal getUsageSignal(); /** * * * <pre> * Resource usage statistics. * </pre> * * <code>.google.cloud.datacatalog.v1.UsageSignal usage_signal = 13;</code> */ com.google.cloud.datacatalog.v1.UsageSignalOrBuilder getUsageSignalOrBuilder(); /** * * * <pre> * Cloud labels attached to the entry. * * In Data Catalog, you can create and modify labels attached only to custom * entries. Synced entries have unmodifiable labels that come from the source * system. * </pre> * * <code>map&lt;string, string&gt; labels = 14;</code> */ int getLabelsCount(); /** * * * <pre> * Cloud labels attached to the entry. * * In Data Catalog, you can create and modify labels attached only to custom * entries. Synced entries have unmodifiable labels that come from the source * system. * </pre> * * <code>map&lt;string, string&gt; labels = 14;</code> */ boolean containsLabels(java.lang.String key); /** Use {@link #getLabelsMap()} instead. */ @java.lang.Deprecated java.util.Map<java.lang.String, java.lang.String> getLabels(); /** * * * <pre> * Cloud labels attached to the entry. * * In Data Catalog, you can create and modify labels attached only to custom * entries. Synced entries have unmodifiable labels that come from the source * system. * </pre> * * <code>map&lt;string, string&gt; labels = 14;</code> */ java.util.Map<java.lang.String, java.lang.String> getLabelsMap(); /** * * * <pre> * Cloud labels attached to the entry. * * In Data Catalog, you can create and modify labels attached only to custom * entries. Synced entries have unmodifiable labels that come from the source * system. * </pre> * * <code>map&lt;string, string&gt; labels = 14;</code> */ /* nullable */ java.lang.String getLabelsOrDefault( java.lang.String key, /* nullable */ java.lang.String defaultValue); /** * * * <pre> * Cloud labels attached to the entry. * * In Data Catalog, you can create and modify labels attached only to custom * entries. Synced entries have unmodifiable labels that come from the source * system. * </pre> * * <code>map&lt;string, string&gt; labels = 14;</code> */ java.lang.String getLabelsOrThrow(java.lang.String key); /** * * * <pre> * Output only. Physical location of the entry. * </pre> * * <code> * .google.cloud.datacatalog.v1.DataSource data_source = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the dataSource field is set. */ boolean hasDataSource(); /** * * * <pre> * Output only. Physical location of the entry. * </pre> * * <code> * .google.cloud.datacatalog.v1.DataSource data_source = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The dataSource. */ com.google.cloud.datacatalog.v1.DataSource getDataSource(); /** * * * <pre> * Output only. Physical location of the entry. * </pre> * * <code> * .google.cloud.datacatalog.v1.DataSource data_source = 20 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.datacatalog.v1.DataSourceOrBuilder getDataSourceOrBuilder(); /** * * * <pre> * Output only. Additional information related to the entry. Private to the * current user. * </pre> * * <code> * .google.cloud.datacatalog.v1.PersonalDetails personal_details = 26 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the personalDetails field is set. */ boolean hasPersonalDetails(); /** * * * <pre> * Output only. Additional information related to the entry. Private to the * current user. * </pre> * * <code> * .google.cloud.datacatalog.v1.PersonalDetails personal_details = 26 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The personalDetails. */ com.google.cloud.datacatalog.v1.PersonalDetails getPersonalDetails(); /** * * * <pre> * Output only. Additional information related to the entry. Private to the * current user. * </pre> * * <code> * .google.cloud.datacatalog.v1.PersonalDetails personal_details = 26 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ com.google.cloud.datacatalog.v1.PersonalDetailsOrBuilder getPersonalDetailsOrBuilder(); com.google.cloud.datacatalog.v1.Entry.EntryTypeCase getEntryTypeCase(); com.google.cloud.datacatalog.v1.Entry.SystemCase getSystemCase(); com.google.cloud.datacatalog.v1.Entry.SystemSpecCase getSystemSpecCase(); com.google.cloud.datacatalog.v1.Entry.TypeSpecCase getTypeSpecCase(); com.google.cloud.datacatalog.v1.Entry.SpecCase getSpecCase(); }
googleapis/google-cloud-java
37,341
java-asset/proto-google-cloud-asset-v1p7beta1/src/main/java/com/google/cloud/asset/v1p7beta1/OutputConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/asset/v1p7beta1/asset_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.asset.v1p7beta1; /** * * * <pre> * Output configuration for export assets destination. * </pre> * * Protobuf type {@code google.cloud.asset.v1p7beta1.OutputConfig} */ public final class OutputConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.asset.v1p7beta1.OutputConfig) OutputConfigOrBuilder { private static final long serialVersionUID = 0L; // Use OutputConfig.newBuilder() to construct. private OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private OutputConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new OutputConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.asset.v1p7beta1.AssetServiceProto .internal_static_google_cloud_asset_v1p7beta1_OutputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.asset.v1p7beta1.AssetServiceProto .internal_static_google_cloud_asset_v1p7beta1_OutputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.asset.v1p7beta1.OutputConfig.class, com.google.cloud.asset.v1p7beta1.OutputConfig.Builder.class); } private int destinationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object destination_; public enum DestinationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_DESTINATION(1), BIGQUERY_DESTINATION(2), DESTINATION_NOT_SET(0); private final int value; private DestinationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DestinationCase valueOf(int value) { return forNumber(value); } public static DestinationCase forNumber(int value) { switch (value) { case 1: return GCS_DESTINATION; case 2: return BIGQUERY_DESTINATION; case 0: return DESTINATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public static final int GCS_DESTINATION_FIELD_NUMBER = 1; /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> * * @return Whether the gcsDestination field is set. */ @java.lang.Override public boolean hasGcsDestination() { return destinationCase_ == 1; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> * * @return The gcsDestination. */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.GcsDestination getGcsDestination() { if (destinationCase_ == 1) { return (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_; } return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() { if (destinationCase_ == 1) { return (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_; } return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } public static final int BIGQUERY_DESTINATION_FIELD_NUMBER = 2; /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> * * @return Whether the bigqueryDestination field is set. */ @java.lang.Override public boolean hasBigqueryDestination() { return destinationCase_ == 2; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> * * @return The bigqueryDestination. */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.BigQueryDestination getBigqueryDestination() { if (destinationCase_ == 2) { return (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_; } return com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder() { if (destinationCase_ == 2) { return (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_; } return com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (destinationCase_ == 1) { output.writeMessage(1, (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_); } if (destinationCase_ == 2) { output.writeMessage(2, (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (destinationCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_); } if (destinationCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.asset.v1p7beta1.OutputConfig)) { return super.equals(obj); } com.google.cloud.asset.v1p7beta1.OutputConfig other = (com.google.cloud.asset.v1p7beta1.OutputConfig) obj; if (!getDestinationCase().equals(other.getDestinationCase())) return false; switch (destinationCase_) { case 1: if (!getGcsDestination().equals(other.getGcsDestination())) return false; break; case 2: if (!getBigqueryDestination().equals(other.getBigqueryDestination())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (destinationCase_) { case 1: hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER; hash = (53 * hash) + getGcsDestination().hashCode(); break; case 2: hash = (37 * hash) + BIGQUERY_DESTINATION_FIELD_NUMBER; hash = (53 * hash) + getBigqueryDestination().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.asset.v1p7beta1.OutputConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.asset.v1p7beta1.OutputConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Output configuration for export assets destination. * </pre> * * Protobuf type {@code google.cloud.asset.v1p7beta1.OutputConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.asset.v1p7beta1.OutputConfig) com.google.cloud.asset.v1p7beta1.OutputConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.asset.v1p7beta1.AssetServiceProto .internal_static_google_cloud_asset_v1p7beta1_OutputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.asset.v1p7beta1.AssetServiceProto .internal_static_google_cloud_asset_v1p7beta1_OutputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.asset.v1p7beta1.OutputConfig.class, com.google.cloud.asset.v1p7beta1.OutputConfig.Builder.class); } // Construct using com.google.cloud.asset.v1p7beta1.OutputConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsDestinationBuilder_ != null) { gcsDestinationBuilder_.clear(); } if (bigqueryDestinationBuilder_ != null) { bigqueryDestinationBuilder_.clear(); } destinationCase_ = 0; destination_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.asset.v1p7beta1.AssetServiceProto .internal_static_google_cloud_asset_v1p7beta1_OutputConfig_descriptor; } @java.lang.Override public com.google.cloud.asset.v1p7beta1.OutputConfig getDefaultInstanceForType() { return com.google.cloud.asset.v1p7beta1.OutputConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.asset.v1p7beta1.OutputConfig build() { com.google.cloud.asset.v1p7beta1.OutputConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.asset.v1p7beta1.OutputConfig buildPartial() { com.google.cloud.asset.v1p7beta1.OutputConfig result = new com.google.cloud.asset.v1p7beta1.OutputConfig(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.asset.v1p7beta1.OutputConfig result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.cloud.asset.v1p7beta1.OutputConfig result) { result.destinationCase_ = destinationCase_; result.destination_ = this.destination_; if (destinationCase_ == 1 && gcsDestinationBuilder_ != null) { result.destination_ = gcsDestinationBuilder_.build(); } if (destinationCase_ == 2 && bigqueryDestinationBuilder_ != null) { result.destination_ = bigqueryDestinationBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.asset.v1p7beta1.OutputConfig) { return mergeFrom((com.google.cloud.asset.v1p7beta1.OutputConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.asset.v1p7beta1.OutputConfig other) { if (other == com.google.cloud.asset.v1p7beta1.OutputConfig.getDefaultInstance()) return this; switch (other.getDestinationCase()) { case GCS_DESTINATION: { mergeGcsDestination(other.getGcsDestination()); break; } case BIGQUERY_DESTINATION: { mergeBigqueryDestination(other.getBigqueryDestination()); break; } case DESTINATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsDestinationFieldBuilder().getBuilder(), extensionRegistry); destinationCase_ = 1; break; } // case 10 case 18: { input.readMessage( getBigqueryDestinationFieldBuilder().getBuilder(), extensionRegistry); destinationCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int destinationCase_ = 0; private java.lang.Object destination_; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public Builder clearDestination() { destinationCase_ = 0; destination_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.GcsDestination, com.google.cloud.asset.v1p7beta1.GcsDestination.Builder, com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder> gcsDestinationBuilder_; /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> * * @return Whether the gcsDestination field is set. */ @java.lang.Override public boolean hasGcsDestination() { return destinationCase_ == 1; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> * * @return The gcsDestination. */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.GcsDestination getGcsDestination() { if (gcsDestinationBuilder_ == null) { if (destinationCase_ == 1) { return (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_; } return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } else { if (destinationCase_ == 1) { return gcsDestinationBuilder_.getMessage(); } return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ public Builder setGcsDestination(com.google.cloud.asset.v1p7beta1.GcsDestination value) { if (gcsDestinationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } destination_ = value; onChanged(); } else { gcsDestinationBuilder_.setMessage(value); } destinationCase_ = 1; return this; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ public Builder setGcsDestination( com.google.cloud.asset.v1p7beta1.GcsDestination.Builder builderForValue) { if (gcsDestinationBuilder_ == null) { destination_ = builderForValue.build(); onChanged(); } else { gcsDestinationBuilder_.setMessage(builderForValue.build()); } destinationCase_ = 1; return this; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ public Builder mergeGcsDestination(com.google.cloud.asset.v1p7beta1.GcsDestination value) { if (gcsDestinationBuilder_ == null) { if (destinationCase_ == 1 && destination_ != com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance()) { destination_ = com.google.cloud.asset.v1p7beta1.GcsDestination.newBuilder( (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_) .mergeFrom(value) .buildPartial(); } else { destination_ = value; } onChanged(); } else { if (destinationCase_ == 1) { gcsDestinationBuilder_.mergeFrom(value); } else { gcsDestinationBuilder_.setMessage(value); } } destinationCase_ = 1; return this; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ public Builder clearGcsDestination() { if (gcsDestinationBuilder_ == null) { if (destinationCase_ == 1) { destinationCase_ = 0; destination_ = null; onChanged(); } } else { if (destinationCase_ == 1) { destinationCase_ = 0; destination_ = null; } gcsDestinationBuilder_.clear(); } return this; } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ public com.google.cloud.asset.v1p7beta1.GcsDestination.Builder getGcsDestinationBuilder() { return getGcsDestinationFieldBuilder().getBuilder(); } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() { if ((destinationCase_ == 1) && (gcsDestinationBuilder_ != null)) { return gcsDestinationBuilder_.getMessageOrBuilder(); } else { if (destinationCase_ == 1) { return (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_; } return com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } } /** * * * <pre> * Destination on Cloud Storage. * </pre> * * <code>.google.cloud.asset.v1p7beta1.GcsDestination gcs_destination = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.GcsDestination, com.google.cloud.asset.v1p7beta1.GcsDestination.Builder, com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder> getGcsDestinationFieldBuilder() { if (gcsDestinationBuilder_ == null) { if (!(destinationCase_ == 1)) { destination_ = com.google.cloud.asset.v1p7beta1.GcsDestination.getDefaultInstance(); } gcsDestinationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.GcsDestination, com.google.cloud.asset.v1p7beta1.GcsDestination.Builder, com.google.cloud.asset.v1p7beta1.GcsDestinationOrBuilder>( (com.google.cloud.asset.v1p7beta1.GcsDestination) destination_, getParentForChildren(), isClean()); destination_ = null; } destinationCase_ = 1; onChanged(); return gcsDestinationBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.BigQueryDestination, com.google.cloud.asset.v1p7beta1.BigQueryDestination.Builder, com.google.cloud.asset.v1p7beta1.BigQueryDestinationOrBuilder> bigqueryDestinationBuilder_; /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> * * @return Whether the bigqueryDestination field is set. */ @java.lang.Override public boolean hasBigqueryDestination() { return destinationCase_ == 2; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> * * @return The bigqueryDestination. */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.BigQueryDestination getBigqueryDestination() { if (bigqueryDestinationBuilder_ == null) { if (destinationCase_ == 2) { return (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_; } return com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } else { if (destinationCase_ == 2) { return bigqueryDestinationBuilder_.getMessage(); } return com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ public Builder setBigqueryDestination( com.google.cloud.asset.v1p7beta1.BigQueryDestination value) { if (bigqueryDestinationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } destination_ = value; onChanged(); } else { bigqueryDestinationBuilder_.setMessage(value); } destinationCase_ = 2; return this; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ public Builder setBigqueryDestination( com.google.cloud.asset.v1p7beta1.BigQueryDestination.Builder builderForValue) { if (bigqueryDestinationBuilder_ == null) { destination_ = builderForValue.build(); onChanged(); } else { bigqueryDestinationBuilder_.setMessage(builderForValue.build()); } destinationCase_ = 2; return this; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ public Builder mergeBigqueryDestination( com.google.cloud.asset.v1p7beta1.BigQueryDestination value) { if (bigqueryDestinationBuilder_ == null) { if (destinationCase_ == 2 && destination_ != com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance()) { destination_ = com.google.cloud.asset.v1p7beta1.BigQueryDestination.newBuilder( (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_) .mergeFrom(value) .buildPartial(); } else { destination_ = value; } onChanged(); } else { if (destinationCase_ == 2) { bigqueryDestinationBuilder_.mergeFrom(value); } else { bigqueryDestinationBuilder_.setMessage(value); } } destinationCase_ = 2; return this; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ public Builder clearBigqueryDestination() { if (bigqueryDestinationBuilder_ == null) { if (destinationCase_ == 2) { destinationCase_ = 0; destination_ = null; onChanged(); } } else { if (destinationCase_ == 2) { destinationCase_ = 0; destination_ = null; } bigqueryDestinationBuilder_.clear(); } return this; } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ public com.google.cloud.asset.v1p7beta1.BigQueryDestination.Builder getBigqueryDestinationBuilder() { return getBigqueryDestinationFieldBuilder().getBuilder(); } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ @java.lang.Override public com.google.cloud.asset.v1p7beta1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder() { if ((destinationCase_ == 2) && (bigqueryDestinationBuilder_ != null)) { return bigqueryDestinationBuilder_.getMessageOrBuilder(); } else { if (destinationCase_ == 2) { return (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_; } return com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } } /** * * * <pre> * Destination on BigQuery. The output table stores the fields in asset * proto as columns in BigQuery. * </pre> * * <code>.google.cloud.asset.v1p7beta1.BigQueryDestination bigquery_destination = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.BigQueryDestination, com.google.cloud.asset.v1p7beta1.BigQueryDestination.Builder, com.google.cloud.asset.v1p7beta1.BigQueryDestinationOrBuilder> getBigqueryDestinationFieldBuilder() { if (bigqueryDestinationBuilder_ == null) { if (!(destinationCase_ == 2)) { destination_ = com.google.cloud.asset.v1p7beta1.BigQueryDestination.getDefaultInstance(); } bigqueryDestinationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.asset.v1p7beta1.BigQueryDestination, com.google.cloud.asset.v1p7beta1.BigQueryDestination.Builder, com.google.cloud.asset.v1p7beta1.BigQueryDestinationOrBuilder>( (com.google.cloud.asset.v1p7beta1.BigQueryDestination) destination_, getParentForChildren(), isClean()); destination_ = null; } destinationCase_ = 2; onChanged(); return bigqueryDestinationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.asset.v1p7beta1.OutputConfig) } // @@protoc_insertion_point(class_scope:google.cloud.asset.v1p7beta1.OutputConfig) private static final com.google.cloud.asset.v1p7beta1.OutputConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.asset.v1p7beta1.OutputConfig(); } public static com.google.cloud.asset.v1p7beta1.OutputConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<OutputConfig> PARSER = new com.google.protobuf.AbstractParser<OutputConfig>() { @java.lang.Override public OutputConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<OutputConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<OutputConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.asset.v1p7beta1.OutputConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
37,525
espresso/src/com.oracle.truffle.espresso/src/com/oracle/truffle/espresso/EspressoLanguage.java
/* * Copyright (c) 2018, 2021, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.truffle.espresso; import static com.oracle.truffle.espresso.jni.JniEnv.JNI_OK; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.regex.Pattern; import org.graalvm.home.HomeFinder; import org.graalvm.home.Version; import org.graalvm.options.OptionDescriptors; import org.graalvm.options.OptionKey; import org.graalvm.options.OptionValues; import com.oracle.truffle.api.Assumption; import com.oracle.truffle.api.CallTarget; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.ContextThreadLocal; import com.oracle.truffle.api.TruffleContext; import com.oracle.truffle.api.TruffleFile; import com.oracle.truffle.api.TruffleLanguage; import com.oracle.truffle.api.TruffleLanguage.Registration; import com.oracle.truffle.api.TruffleSafepoint; import com.oracle.truffle.api.dsl.Idempotent; import com.oracle.truffle.api.instrumentation.AllocationReporter; import com.oracle.truffle.api.instrumentation.ProvidedTags; import com.oracle.truffle.api.instrumentation.StandardTags; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.staticobject.DefaultStaticProperty; import com.oracle.truffle.api.staticobject.StaticProperty; import com.oracle.truffle.api.staticobject.StaticShape; import com.oracle.truffle.espresso.classfile.JavaKind; import com.oracle.truffle.espresso.classfile.JavaVersion; import com.oracle.truffle.espresso.classfile.descriptors.NameSymbols; import com.oracle.truffle.espresso.classfile.descriptors.ParserSymbols; import com.oracle.truffle.espresso.classfile.descriptors.SignatureSymbols; import com.oracle.truffle.espresso.classfile.descriptors.Symbols; import com.oracle.truffle.espresso.classfile.descriptors.TypeSymbols; import com.oracle.truffle.espresso.classfile.descriptors.Utf8Symbols; import com.oracle.truffle.espresso.descriptors.EspressoSymbols; import com.oracle.truffle.espresso.ffi.nfi.NFIIsolatedNativeAccess; import com.oracle.truffle.espresso.ffi.nfi.NFINativeAccess; import com.oracle.truffle.espresso.ffi.nfi.NFISulongNativeAccess; import com.oracle.truffle.espresso.impl.EspressoType; import com.oracle.truffle.espresso.impl.SuppressFBWarnings; import com.oracle.truffle.espresso.meta.EspressoError; import com.oracle.truffle.espresso.nodes.commands.ExitCodeNode; import com.oracle.truffle.espresso.nodes.commands.GetBindingsNode; import com.oracle.truffle.espresso.nodes.commands.ReferenceProcessRootNode; import com.oracle.truffle.espresso.preinit.ContextPatchingException; import com.oracle.truffle.espresso.preinit.EspressoLanguageCache; import com.oracle.truffle.espresso.runtime.EspressoContext; import com.oracle.truffle.espresso.runtime.EspressoException; import com.oracle.truffle.espresso.runtime.EspressoThreadLocalState; import com.oracle.truffle.espresso.runtime.GuestAllocator; import com.oracle.truffle.espresso.runtime.OS; import com.oracle.truffle.espresso.runtime.staticobject.StaticObject; import com.oracle.truffle.espresso.runtime.staticobject.StaticObject.StaticObjectFactory; import com.oracle.truffle.espresso.shared.meta.SymbolPool; import com.oracle.truffle.espresso.substitutions.JImageExtensions; import com.oracle.truffle.espresso.substitutions.Substitutions; import com.oracle.truffle.espresso.substitutions.standard.Target_sun_misc_Unsafe.CompactGuestFieldOffsetStrategy; import com.oracle.truffle.espresso.substitutions.standard.Target_sun_misc_Unsafe.GraalGuestFieldOffsetStrategy; import com.oracle.truffle.espresso.substitutions.standard.Target_sun_misc_Unsafe.GuestFieldOffsetStrategy; import com.oracle.truffle.espresso.substitutions.standard.Target_sun_misc_Unsafe.SafetyGuestFieldOffsetStrategy; // TODO: Update website once Espresso has one @Registration(id = EspressoLanguage.ID, // name = EspressoLanguage.NAME, // implementationName = EspressoLanguage.IMPLEMENTATION_NAME, // contextPolicy = TruffleLanguage.ContextPolicy.SHARED, // dependentLanguages = {"nfi"}, // website = "https://www.graalvm.org/dev/reference-manual/java-on-truffle/") @ProvidedTags({StandardTags.RootTag.class, StandardTags.RootBodyTag.class, StandardTags.StatementTag.class}) public final class EspressoLanguage extends TruffleLanguage<EspressoContext> implements SymbolPool { public static final String ID = "java"; public static final String NAME = "Java"; public static final String IMPLEMENTATION_NAME = "Espresso"; // Espresso VM info public static final String VM_SPECIFICATION_NAME = "Java Virtual Machine Specification"; public static final String VM_SPECIFICATION_VENDOR = "Oracle Corporation"; public static final String VM_VERSION = /* 1.8|11 */ "espresso-" + Version.getCurrent(); public static final String VM_VENDOR = "Oracle Corporation"; public static final String VM_NAME = "Espresso 64-Bit VM"; public static final String VM_INFO = "mixed mode"; public static final String FILE_EXTENSION = ".class"; @CompilationFinal private Utf8Symbols utf8Symbols; @CompilationFinal private NameSymbols nameSymbols; @CompilationFinal private TypeSymbols typeSymbols; @CompilationFinal private SignatureSymbols signatureSymbols; private final StaticProperty arrayProperty = new DefaultStaticProperty("array"); private final StaticProperty arrayHashCodeProperty = new DefaultStaticProperty("ihashcode"); // This field should be final, but creating a shape requires a fully-initialized instance of // TruffleLanguage. @CompilationFinal // private StaticShape<StaticObjectFactory> arrayShape; private final StaticProperty foreignProperty = new DefaultStaticProperty("foreignObject"); private final StaticProperty typeArgumentProperty = new DefaultStaticProperty("typeArguments"); // This field should be final, but creating a shape requires a fully-initialized instance of // TruffleLanguage. @CompilationFinal // private StaticShape<StaticObjectFactory> foreignShape; @CompilationFinal private JavaVersion javaVersion; // region Options // Note: All options are initialized during the bootstrapping of the first context @CompilationFinal private EspressoOptions.VerifyMode verifyMode; @CompilationFinal private EspressoOptions.SpecComplianceMode specComplianceMode; @CompilationFinal private EspressoOptions.LivenessAnalysisMode livenessAnalysisMode; @CompilationFinal private int livenessAnalysisMinimumLocals; @CompilationFinal private boolean previewEnabled; @CompilationFinal private boolean whiteBoxEnabled; @CompilationFinal private boolean eagerFrameAnalysis; @CompilationFinal private boolean internalJvmciEnabled; @CompilationFinal private boolean useEspressoLibs; @CompilationFinal private boolean enableNetworking; @CompilationFinal private boolean continuum; @CompilationFinal private String nativeBackendId; @CompilationFinal private boolean useTRegex; @CompilationFinal private int maxStackTraceDepth; // endregion Options // region Allocation // Note: Initialized during the bootstrapping of the first context; See initializeOptions() @CompilationFinal private GuestAllocator allocator; @CompilationFinal private final Assumption noAllocationTracking = Assumption.create("Espresso no allocation tracking assumption"); // endregion Allocation // region Preinit and sharing private final EspressoLanguageCache languageCache = new EspressoLanguageCache(); @CompilationFinal private boolean isShared = false; // endregion Preinit and sharing @CompilationFinal private volatile boolean fullyInitialized; @CompilationFinal private JImageExtensions jImageExtensions; @CompilationFinal private GuestFieldOffsetStrategy guestFieldOffsetStrategy; private final ContextThreadLocal<EspressoThreadLocalState> threadLocalState = locals.createContextThreadLocal(EspressoThreadLocalState::new); public EspressoLanguage() { // Initialize statically defined symbols and substitutions. // Initialization order is very fragile. ParserSymbols.ensureInitialized(); JavaKind.ensureInitialized(); Substitutions.ensureInitialized(); EspressoSymbols.ensureInitialized(); // Raw symbols are not exposed directly, use the typed interfaces: NameSymbols, TypeSymbols // and SignatureSymbols instead. // HelloWorld requires ~25K symbols. Give enough space to the symbol table to avoid resizing // during startup. int initialSymbolTableCapacity = 1 << 16; Symbols symbols = Symbols.fromExisting(EspressoSymbols.SYMBOLS.freeze(), initialSymbolTableCapacity); this.utf8Symbols = new Utf8Symbols(symbols); this.nameSymbols = new NameSymbols(symbols); this.typeSymbols = new TypeSymbols(symbols); this.signatureSymbols = new SignatureSymbols(symbols, typeSymbols); } @Override protected OptionDescriptors getOptionDescriptors() { return new EspressoOptionsOptionDescriptors(); } public EspressoThreadLocalState getThreadLocalState() { return threadLocalState.get(); } public EspressoThreadLocalState getThreadLocalStateFor(Thread t) { return threadLocalState.get(t); } @Override protected EspressoContext createContext(final TruffleLanguage.Env env) { // We cannot use env.isPreinitialization() here because the language instance that holds the // inner context is not under pre-initialization boolean isPreinitLanguageInstance = (boolean) env.getConfig().getOrDefault("preinit", false); if (isPreinitLanguageInstance) { languageCache.addCapability(EspressoLanguageCache.CacheCapability.PRE_INITIALIZED); } ensureInitialized(env); // TODO(peterssen): Redirect in/out to env.in()/out() EspressoContext context = new EspressoContext(env, this); context.setMainArguments(env.getApplicationArguments()); return context; } public void ensureInitialized(final TruffleLanguage.Env env) { if (!fullyInitialized) { CompilerDirectives.transferToInterpreterAndInvalidate(); synchronized (this) { if (!fullyInitialized) { // Initialize required options. initializeOptions(env); initializeGuestAllocator(env); // Create known shapes. arrayShape = createArrayShape(); foreignShape = createForeignShape(); // Prevent further changes in cache capabilities, // languageCache.freezeCapabilities(); // Publish initialization. fullyInitialized = true; } } } } private void initializeOptions(final TruffleLanguage.Env env) { assert Thread.holdsLock(this); eagerFrameAnalysis = env.getOptions().get(EspressoOptions.EagerFrameAnalysis); verifyMode = eagerFrameAnalysis ? EspressoOptions.VerifyMode.ALL : env.getOptions().get(EspressoOptions.Verify); specComplianceMode = env.getOptions().get(EspressoOptions.SpecCompliance); livenessAnalysisMode = env.getOptions().get(EspressoOptions.LivenessAnalysis); livenessAnalysisMinimumLocals = env.getOptions().get(EspressoOptions.LivenessAnalysisMinimumLocals); previewEnabled = env.getOptions().get(EspressoOptions.EnablePreview); whiteBoxEnabled = env.getOptions().get(EspressoOptions.WhiteBoxAPI); internalJvmciEnabled = env.getOptions().get(EspressoOptions.EnableJVMCI); continuum = env.getOptions().get(EspressoOptions.Continuum); maxStackTraceDepth = env.getOptions().get(EspressoOptions.MaxJavaStackTraceDepth); useTRegex = env.getOptions().get(EspressoOptions.UseTRegex); if (useTRegex && !env.getInternalLanguages().containsKey("regex")) { throw EspressoError.fatal("UseTRegex is set to true but the 'regex' language is not available."); } EspressoOptions.GuestFieldOffsetStrategyEnum strategy = env.getOptions().get(EspressoOptions.GuestFieldOffsetStrategy); guestFieldOffsetStrategy = switch (strategy) { case safety -> new SafetyGuestFieldOffsetStrategy(); case compact -> new CompactGuestFieldOffsetStrategy(); case graal -> new GraalGuestFieldOffsetStrategy(); }; this.useEspressoLibs = env.getOptions().get(EspressoOptions.UseEspressoLibs); this.nativeBackendId = setNativeBackendId(env); assert guestFieldOffsetStrategy.name().equals(strategy.name()); } @Override protected void initializeMultipleContexts() { // Called before any context is created. No racing issues expected. languageCache.addCapability(EspressoLanguageCache.CacheCapability.SHARED); isShared = true; } @Override protected void initializeContext(final EspressoContext context) throws Exception { if (context.getEnv().isPreInitialization()) { // Spawn Espresso VM in an inner context. Make sure to initialize the context TruffleContext ctx = context.getEnv() // .newInnerContextBuilder() // .initializeCreatorContext(true) // .inheritAllAccess(true) // .config("preinit", true) // .build(); Object prev = ctx.enter(null); try { // Retrieve caches and options and store them in the pre-initialized language // instance. EspressoContext inner = EspressoContext.get(null); inner.preInitializeContext(); languageCache.addCapability(EspressoLanguageCache.CacheCapability.PRE_INITIALIZED); extractDataFrom(inner.getLanguage()); languageCache.logCacheStatus(); if (!inner.multiThreadingEnabled()) { // Force collection of guest references. inner.getLazyCaches().getReferenceProcessCache().execute(); } // This is needed to ensure that there are no references to the inner context inner = null; } finally { ctx.leave(null, prev); ctx.close(); // This is needed to ensure that there are no references to the inner context ctx = null; // Ensure that weak references will get collected System.gc(); } } else { context.initializeContext(); } } private void extractDataFrom(EspressoLanguage other) { javaVersion = other.javaVersion; utf8Symbols = other.getUtf8Symbols(); nameSymbols = other.getNames(); typeSymbols = other.getTypes(); signatureSymbols = other.getSignatures(); languageCache.importFrom(other.getLanguageCache()); } private static String setNativeBackendId(final TruffleLanguage.Env env) { String nativeBackend; if (env.getOptions().hasBeenSet(EspressoOptions.NativeBackend)) { nativeBackend = env.getOptions().get(EspressoOptions.NativeBackend); } else { // Pick a sane "default" native backend depending on the platform. boolean isInPreInit = (boolean) env.getConfig().getOrDefault("preinit", false); if (isInPreInit || !EspressoOptions.RUNNING_ON_SVM) { if (OS.getCurrent() == OS.Linux) { nativeBackend = NFIIsolatedNativeAccess.Provider.ID; } else { nativeBackend = NFISulongNativeAccess.Provider.ID; } } else { nativeBackend = NFINativeAccess.Provider.ID; } } return nativeBackend; } @Override protected boolean patchContext(EspressoContext context, Env newEnv) { // This check has to be done manually as long as language uses exclusive context sharing // policy. if (!areOptionsCompatible(context.getEnv().getOptions(), newEnv.getOptions())) { return false; } context.patchContext(newEnv); try { context.initializeContext(); } catch (ContextPatchingException e) { context.getLogger().severe(e.getMessage()); return false; } return true; } @Override protected boolean areOptionsCompatible(OptionValues oldOptions, OptionValues newOptions) { return isOptionCompatible(newOptions, oldOptions, EspressoOptions.JavaHome) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.BootClasspath) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.Verify) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.EagerFrameAnalysis) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.SpecCompliance) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.LivenessAnalysis) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.LivenessAnalysisMinimumLocals) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.EnablePreview) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.WhiteBoxAPI) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.EnableJVMCI) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.Continuum) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.UseTRegex) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.GuestFieldOffsetStrategy) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.UseEspressoLibs) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.NativeBackend) && isOptionCompatible(newOptions, oldOptions, EspressoOptions.MaxJavaStackTraceDepth); } private static boolean isOptionCompatible(OptionValues oldOptions, OptionValues newOptions, OptionKey<?> option) { return oldOptions.get(option).equals(newOptions.get(option)); } @Override protected void exitContext(EspressoContext context, ExitMode exitMode, int exitCode) { if (!context.isInitialized()) { return; } if (exitMode == ExitMode.NATURAL) { // Make sure current thread is no longer considered alive by guest code. if (context.getVM().DetachCurrentThread(context, this) == JNI_OK) { // Create a new guest thread to wait for other non-daemon threads context.createThread(Thread.currentThread(), context.getMainThreadGroup(), "DestroyJavaVM", false); } // Wait for ongoing threads to finish. context.destroyVM(); } else { // Here we give a chance for our threads to exit gracefully in guest code before // Truffle kicks in with host thread deaths. context.doExit(exitCode); } } @Override protected void finalizeContext(EspressoContext context) { context.ensureThreadsJoined(); TruffleSafepoint sp = TruffleSafepoint.getCurrent(); boolean prev = sp.setAllowActions(false); try { // we can still run limited guest code, even if the context is already invalid context.prepareDispose(); context.cleanupNativeEnv(); } catch (Throwable t) { context.getLogger().log(Level.FINER, "Exception while finalizing Espresso context", t); throw t; } finally { sp.setAllowActions(prev); context.setFinalized(); } long elapsedTimeNanos = System.nanoTime() - context.getStartupClockNanos(); long seconds = TimeUnit.NANOSECONDS.toSeconds(elapsedTimeNanos); if (seconds > 10) { context.getLogger().log(Level.FINE, "Time spent in Espresso: {0} s", seconds); } else { context.getLogger().log(Level.FINE, "Time spent in Espresso: {0} ms", TimeUnit.NANOSECONDS.toMillis(elapsedTimeNanos)); } } @Override protected Object getScope(EspressoContext context) { return context.getBindings(); } @Override protected void disposeContext(final EspressoContext context) { context.disposeContext(); } @Override @SuppressWarnings("deprecation") protected CallTarget parse(final ParsingRequest request) throws Exception { assert EspressoContext.get(null).isInitialized(); String contents = request.getSource().getCharacters().toString(); if (com.oracle.truffle.espresso.nodes.commands.DestroyVMNode.EVAL_NAME.equals(contents)) { RootNode node = new com.oracle.truffle.espresso.nodes.commands.DestroyVMNode(this); return node.getCallTarget(); } if (ExitCodeNode.EVAL_NAME.equals(contents)) { RootNode node = new ExitCodeNode(this); return node.getCallTarget(); } if (GetBindingsNode.EVAL_NAME.equals(contents)) { RootNode node = new GetBindingsNode(this); return node.getCallTarget(); } if (ReferenceProcessRootNode.EVAL_NAME.equals(contents)) { RootNode node = new ReferenceProcessRootNode(this); return node.getCallTarget(); } throw new EspressoParseError( "Espresso cannot evaluate Java sources directly, only a few special commands are supported: " + GetBindingsNode.EVAL_NAME + " and " + ReferenceProcessRootNode.EVAL_NAME + "\n" + "Use the \"" + ID + "\" language bindings to load guest Java classes e.g. context.getBindings(\"" + ID + "\").getMember(\"java.lang.Integer\")"); } @Override public NameSymbols getNames() { return nameSymbols; } public Utf8Symbols getUtf8Symbols() { return utf8Symbols; } @Override public TypeSymbols getTypes() { return typeSymbols; } @Override public SignatureSymbols getSignatures() { return signatureSymbols; } @Override protected boolean isThreadAccessAllowed(Thread thread, boolean singleThreaded) { // allow access from any thread instead of just one return true; } @Override protected void initializeMultiThreading(EspressoContext context) { // perform actions when the context is switched to multi-threading // context.singleThreaded.invalidate(); } @Override protected void initializeThread(EspressoContext context, Thread thread) { if (context.isFinalized()) { // we can no longer run guest code context.getLogger().log(Level.FINE, "Context is already finalized, ignoring request to initialize a new thread"); return; } context.createThread(thread); } @Override protected void disposeThread(EspressoContext context, Thread thread) { context.disposeThread(thread); } public StaticProperty getArrayProperty() { return arrayProperty; } public StaticProperty getArrayHashCodeProperty() { if (!continuum) { CompilerDirectives.transferToInterpreterAndInvalidate(); throw EspressoError.shouldNotReachHere("Accessing array hash code property without continuum set up."); } return arrayHashCodeProperty; } public StaticShape<StaticObjectFactory> getArrayShape() { assert fullyInitialized : "Array shape accessed before language is fully initialized"; return arrayShape; } @TruffleBoundary private StaticShape<StaticObjectFactory> createArrayShape() { assert arrayShape == null; StaticShape.Builder builder = StaticShape.newBuilder(this).property(arrayProperty, Object.class, true); if (continuum) { builder.property(arrayHashCodeProperty, int.class, false); } return builder.build(StaticObject.class, StaticObjectFactory.class); } public StaticProperty getForeignProperty() { return foreignProperty; } public StaticProperty getTypeArgumentProperty() { return typeArgumentProperty; } public StaticShape<StaticObjectFactory> getForeignShape() { assert fullyInitialized : "Array shape accessed before language is fully initialized"; return foreignShape; } @TruffleBoundary private StaticShape<StaticObjectFactory> createForeignShape() { assert foreignShape == null; return StaticShape.newBuilder(this).property(foreignProperty, Object.class, true).property(typeArgumentProperty, EspressoType[].class, true).build(StaticObject.class, StaticObjectFactory.class); } private static final LanguageReference<EspressoLanguage> REFERENCE = LanguageReference.create(EspressoLanguage.class); public static EspressoLanguage get(Node node) { return REFERENCE.get(node); } public JavaVersion getJavaVersion() { return javaVersion; } public EspressoOptions.SpecComplianceMode getSpecComplianceMode() { return specComplianceMode; } public EspressoOptions.LivenessAnalysisMode getLivenessAnalysisMode() { return livenessAnalysisMode; } public EspressoOptions.VerifyMode getVerifyMode() { return verifyMode; } public int livenessAnalysisMinimumLocals() { return livenessAnalysisMinimumLocals; } public boolean isAllocationTrackingDisabled() { return noAllocationTracking.isValid(); } public void invalidateAllocationTrackingDisabled() { noAllocationTracking.invalidate(); } public boolean isPreviewEnabled() { return previewEnabled; } public boolean isWhiteBoxEnabled() { return whiteBoxEnabled; } public boolean isEagerFrameAnalysisEnabled() { return eagerFrameAnalysis; } public boolean isInternalJVMCIEnabled() { return internalJvmciEnabled; } public boolean isJVMCIEnabled() { return internalJvmciEnabled; } public boolean useTRegex() { return useTRegex; } public boolean useEspressoLibs() { return useEspressoLibs; } public String nativeBackendId() { return nativeBackendId; } public boolean isContinuumEnabled() { return continuum; } public EspressoLanguageCache getLanguageCache() { return languageCache; } public GuestAllocator getAllocator() { return allocator; } public void initializeGuestAllocator(TruffleLanguage.Env env) { this.allocator = new GuestAllocator(this, env.lookup(AllocationReporter.class)); } @Idempotent public boolean isShared() { return isShared; } @SuppressFBWarnings(value = "DC_DOUBLECHECK", // justification = "non-volatile for performance reasons, javaVersion is initialized very early during context creation with an enum value, only benign races expected.") public void tryInitializeJavaVersion(JavaVersion version) { Objects.requireNonNull(version); JavaVersion ref = this.javaVersion; if (ref == null) { synchronized (this) { ref = this.javaVersion; if (ref == null) { if (!getGuestFieldOffsetStrategy().isAllowed(version)) { throw EspressoError.fatal("This guest field offset strategy (" + getGuestFieldOffsetStrategy().name() + ") is not allowed with this Java version (" + version + ")"); } if (useTRegex && !version.java21OrLater()) { throw EspressoError.fatal("UseTRegex is not available for a context running Java version < 21."); } if (internalJvmciEnabled && !version.java21OrLater()) { throw EspressoError.fatal("EnableJVMCI is not available for a context running Java version < 21."); } this.javaVersion = ref = version; } } } EspressoError.guarantee(version.equals(ref), "incompatible Java versions"); } public StaticObject getCurrentVirtualThread() { return getThreadLocalState().getCurrentVirtualThread(); } public void setCurrentVirtualThread(StaticObject thread) { getThreadLocalState().setCurrentVirtualThread(thread); } public static Path getEspressoLibs(TruffleLanguage.Env env) { Path espressoHome = HomeFinder.getInstance().getLanguageHomes().get(EspressoLanguage.ID); if (espressoHome != null) { Path libs = espressoHome.resolve("lib"); if (Files.isDirectory(libs)) { env.getLogger(EspressoContext.class).config(() -> "Using espresso libs from language home at " + libs); return libs; } } try { String resources = env.getInternalResource("espresso-libs").getAbsoluteFile().toString(); Path libs = Path.of(resources, "lib"); assert Files.isDirectory(libs); env.getLogger(EspressoContext.class).config(() -> "Using espresso libs from resources at " + libs); return libs; } catch (IOException e) { throw EspressoError.shouldNotReachHere(e); } } private static final String[] KNOWN_ESPRESSO_RUNTIMES = {"jdk25", "openjdk25", "jdk21", "openjdk21", "jdk" + JavaVersion.HOST_VERSION, "openjdk" + JavaVersion.HOST_VERSION}; private static final Pattern VALID_RESOURCE_ID = Pattern.compile("[0-9a-z\\-]+"); public static Path getEspressoRuntime(TruffleLanguage.Env env) { if (env.getOptions().hasBeenSet(EspressoOptions.JavaHome)) { if (env.getOptions().hasBeenSet(EspressoOptions.RuntimeResourceId)) { env.getLogger(EspressoContext.class).warning("Both java.JavaHome and java.RuntimeResourceId are set. RuntimeResourceId will be ignored."); } // This option's value will be used, no need to guess return null; } try { if (env.getOptions().hasBeenSet(EspressoOptions.RuntimeResourceId)) { String runtimeName = env.getOptions().get(EspressoOptions.RuntimeResourceId); if (!VALID_RESOURCE_ID.matcher(runtimeName).matches()) { throw EspressoError.fatal("Invalid RuntimeResourceId: " + runtimeName); } TruffleFile resource = env.getInternalResource("espresso-runtime-" + runtimeName); if (resource == null) { throw EspressoError.fatal("Couldn't find: espresso-runtime-" + runtimeName + " internal resource.\n" + "Did you add the corresponding jar to the class or module path?"); } Path resources = Path.of(resource.getAbsoluteFile().toString()); assert Files.isDirectory(resources); env.getLogger(EspressoContext.class).config(() -> "Using " + runtimeName + " runtime at " + resources); return resources; } for (String runtimeName : KNOWN_ESPRESSO_RUNTIMES) { TruffleFile resource = env.getInternalResource("espresso-runtime-" + runtimeName); if (resource != null) { Path resources = Path.of(resource.getAbsoluteFile().toString()); if (Files.isDirectory(resources)) { env.getLogger(EspressoContext.class).config(() -> "Selected " + runtimeName + " runtime at " + resources); return resources; } } } } catch (IOException e) { throw EspressoError.shouldNotReachHere(e); } // Try to figure out if we are running in a legacy GraalVM or standalone Path espressoHome = HomeFinder.getInstance().getLanguageHomes().get(EspressoLanguage.ID); if (espressoHome != null && Files.isDirectory(espressoHome)) { // ESPRESSO_HOME = GRAALVM_JAVA_HOME/languages/java Path graalvmHome = HomeFinder.getInstance().getHomeFolder(); try { if (graalvmHome != null) { Path expectedLanguageHome = graalvmHome.resolve("languages").resolve("java"); if (Files.isDirectory(expectedLanguageHome) && Files.isSameFile(espressoHome, expectedLanguageHome)) { env.getLogger(EspressoContext.class).config(() -> "Using graalvm home at " + graalvmHome); return graalvmHome; } } Path tentativeHome = espressoHome.resolve("..").resolve(".."); Path expectedReleaseFile = tentativeHome.resolve("release"); if (Files.isRegularFile(expectedReleaseFile)) { Path normalized = tentativeHome.normalize(); env.getLogger(EspressoContext.class).config(() -> "Using graalvm-like home at " + normalized); return normalized; } } catch (IOException e) { env.getLogger(EspressoContext.class).log(Level.WARNING, "Error while probing espresso and graalvm home", e); } } if (OS.getCurrent() == OS.Linux && JavaVersion.HOST_VERSION.compareTo(JavaVersion.latestSupported()) <= 0) { if (!EspressoOptions.RUNNING_ON_SVM || (boolean) env.getConfig().getOrDefault("preinit", false)) { // we might be able to use the host runtime libraries env.getLogger(EspressoContext.class).config("Trying to use the host's runtime libraries"); return Paths.get(System.getProperty("java.home")); } } throw EspressoError.fatal("Couldn't find suitable runtime libraries for espresso. You can try to\n" + "add a jar with the necessary resources such as org.graalvm.espresso:espresso-runtime-resources-*,\n" + "or set java.JavaHome explicitly."); } public DisableSingleStepping disableStepping() { return new DisableSingleStepping(); } public int getMaxStackTraceDepth() { return maxStackTraceDepth; } public final class DisableSingleStepping implements AutoCloseable { private final boolean steppingDisabled; private DisableSingleStepping() { steppingDisabled = getThreadLocalState().disableSingleStepping(false); } @Override public void close() { if (steppingDisabled) { getThreadLocalState().enableSingleStepping(); } } } public JImageExtensions getJImageExtensions() { return jImageExtensions; } public GuestFieldOffsetStrategy getGuestFieldOffsetStrategy() { return guestFieldOffsetStrategy; } public StaticObject getPendingException() { return getThreadLocalState().getPendingExceptionObject(); } public EspressoException getPendingEspressoException() { return getThreadLocalState().getPendingException(); } public void clearPendingException() { getThreadLocalState().clearPendingException(); } public void setPendingException(EspressoException ex) { getThreadLocalState().setPendingException(ex); } }
googleapis/google-cloud-java
37,487
java-dialogflow-cx/grpc-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/GeneratorsGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/dialogflow/cx/v3beta1/generator.proto") @io.grpc.stub.annotations.GrpcGenerated public final class GeneratorsGrpc { private GeneratorsGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.cx.v3beta1.Generators"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest, com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> getListGeneratorsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListGenerators", requestType = com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest.class, responseType = com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest, com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> getListGeneratorsMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest, com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> getListGeneratorsMethod; if ((getListGeneratorsMethod = GeneratorsGrpc.getListGeneratorsMethod) == null) { synchronized (GeneratorsGrpc.class) { if ((getListGeneratorsMethod = GeneratorsGrpc.getListGeneratorsMethod) == null) { GeneratorsGrpc.getListGeneratorsMethod = getListGeneratorsMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest, com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListGenerators")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse .getDefaultInstance())) .setSchemaDescriptor(new GeneratorsMethodDescriptorSupplier("ListGenerators")) .build(); } } } return getListGeneratorsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getGetGeneratorMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetGenerator", requestType = com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest.class, responseType = com.google.cloud.dialogflow.cx.v3beta1.Generator.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getGetGeneratorMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getGetGeneratorMethod; if ((getGetGeneratorMethod = GeneratorsGrpc.getGetGeneratorMethod) == null) { synchronized (GeneratorsGrpc.class) { if ((getGetGeneratorMethod = GeneratorsGrpc.getGetGeneratorMethod) == null) { GeneratorsGrpc.getGetGeneratorMethod = getGetGeneratorMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetGenerator")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.Generator .getDefaultInstance())) .setSchemaDescriptor(new GeneratorsMethodDescriptorSupplier("GetGenerator")) .build(); } } } return getGetGeneratorMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getCreateGeneratorMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateGenerator", requestType = com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest.class, responseType = com.google.cloud.dialogflow.cx.v3beta1.Generator.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getCreateGeneratorMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getCreateGeneratorMethod; if ((getCreateGeneratorMethod = GeneratorsGrpc.getCreateGeneratorMethod) == null) { synchronized (GeneratorsGrpc.class) { if ((getCreateGeneratorMethod = GeneratorsGrpc.getCreateGeneratorMethod) == null) { GeneratorsGrpc.getCreateGeneratorMethod = getCreateGeneratorMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateGenerator")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.Generator .getDefaultInstance())) .setSchemaDescriptor( new GeneratorsMethodDescriptorSupplier("CreateGenerator")) .build(); } } } return getCreateGeneratorMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getUpdateGeneratorMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateGenerator", requestType = com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest.class, responseType = com.google.cloud.dialogflow.cx.v3beta1.Generator.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getUpdateGeneratorMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> getUpdateGeneratorMethod; if ((getUpdateGeneratorMethod = GeneratorsGrpc.getUpdateGeneratorMethod) == null) { synchronized (GeneratorsGrpc.class) { if ((getUpdateGeneratorMethod = GeneratorsGrpc.getUpdateGeneratorMethod) == null) { GeneratorsGrpc.getUpdateGeneratorMethod = getUpdateGeneratorMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateGenerator")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.Generator .getDefaultInstance())) .setSchemaDescriptor( new GeneratorsMethodDescriptorSupplier("UpdateGenerator")) .build(); } } } return getUpdateGeneratorMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest, com.google.protobuf.Empty> getDeleteGeneratorMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteGenerator", requestType = com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest, com.google.protobuf.Empty> getDeleteGeneratorMethod() { io.grpc.MethodDescriptor< com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest, com.google.protobuf.Empty> getDeleteGeneratorMethod; if ((getDeleteGeneratorMethod = GeneratorsGrpc.getDeleteGeneratorMethod) == null) { synchronized (GeneratorsGrpc.class) { if ((getDeleteGeneratorMethod = GeneratorsGrpc.getDeleteGeneratorMethod) == null) { GeneratorsGrpc.getDeleteGeneratorMethod = getDeleteGeneratorMethod = io.grpc.MethodDescriptor .<com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteGenerator")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new GeneratorsMethodDescriptorSupplier("DeleteGenerator")) .build(); } } } return getDeleteGeneratorMethod; } /** Creates a new async stub that supports all call types for the service */ public static GeneratorsStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<GeneratorsStub> factory = new io.grpc.stub.AbstractStub.StubFactory<GeneratorsStub>() { @java.lang.Override public GeneratorsStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsStub(channel, callOptions); } }; return GeneratorsStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static GeneratorsBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<GeneratorsBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<GeneratorsBlockingV2Stub>() { @java.lang.Override public GeneratorsBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsBlockingV2Stub(channel, callOptions); } }; return GeneratorsBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static GeneratorsBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<GeneratorsBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<GeneratorsBlockingStub>() { @java.lang.Override public GeneratorsBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsBlockingStub(channel, callOptions); } }; return GeneratorsBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static GeneratorsFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<GeneratorsFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<GeneratorsFutureStub>() { @java.lang.Override public GeneratorsFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsFutureStub(channel, callOptions); } }; return GeneratorsFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public interface AsyncService { /** * * * <pre> * Returns the list of all generators in the specified agent. * </pre> */ default void listGenerators( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListGeneratorsMethod(), responseObserver); } /** * * * <pre> * Retrieves the specified generator. * </pre> */ default void getGenerator( com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetGeneratorMethod(), responseObserver); } /** * * * <pre> * Creates a generator in the specified agent. * </pre> */ default void createGenerator( com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateGeneratorMethod(), responseObserver); } /** * * * <pre> * Update the specified generator. * </pre> */ default void updateGenerator( com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateGeneratorMethod(), responseObserver); } /** * * * <pre> * Deletes the specified generators. * </pre> */ default void deleteGenerator( com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteGeneratorMethod(), responseObserver); } } /** * Base class for the server implementation of the service Generators. * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public abstract static class GeneratorsImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return GeneratorsGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Generators. * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public static final class GeneratorsStub extends io.grpc.stub.AbstractAsyncStub<GeneratorsStub> { private GeneratorsStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected GeneratorsStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsStub(channel, callOptions); } /** * * * <pre> * Returns the list of all generators in the specified agent. * </pre> */ public void listGenerators( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListGeneratorsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves the specified generator. * </pre> */ public void getGenerator( com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetGeneratorMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a generator in the specified agent. * </pre> */ public void createGenerator( com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateGeneratorMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Update the specified generator. * </pre> */ public void updateGenerator( com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateGeneratorMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes the specified generators. * </pre> */ public void deleteGenerator( com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteGeneratorMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Generators. * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public static final class GeneratorsBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<GeneratorsBlockingV2Stub> { private GeneratorsBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected GeneratorsBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Returns the list of all generators in the specified agent. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse listGenerators( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListGeneratorsMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified generator. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator getGenerator( com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a generator in the specified agent. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator createGenerator( com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Update the specified generator. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator updateGenerator( com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the specified generators. * </pre> */ public com.google.protobuf.Empty deleteGenerator( com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteGeneratorMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Generators. * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public static final class GeneratorsBlockingStub extends io.grpc.stub.AbstractBlockingStub<GeneratorsBlockingStub> { private GeneratorsBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected GeneratorsBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsBlockingStub(channel, callOptions); } /** * * * <pre> * Returns the list of all generators in the specified agent. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse listGenerators( com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListGeneratorsMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves the specified generator. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator getGenerator( com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a generator in the specified agent. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator createGenerator( com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Update the specified generator. * </pre> */ public com.google.cloud.dialogflow.cx.v3beta1.Generator updateGenerator( com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateGeneratorMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes the specified generators. * </pre> */ public com.google.protobuf.Empty deleteGenerator( com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteGeneratorMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Generators. * * <pre> * Service for managing * [Generators][google.cloud.dialogflow.cx.v3beta1.Generator] * </pre> */ public static final class GeneratorsFutureStub extends io.grpc.stub.AbstractFutureStub<GeneratorsFutureStub> { private GeneratorsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected GeneratorsFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new GeneratorsFutureStub(channel, callOptions); } /** * * * <pre> * Returns the list of all generators in the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse> listGenerators(com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListGeneratorsMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves the specified generator. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.cx.v3beta1.Generator> getGenerator(com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetGeneratorMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a generator in the specified agent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.cx.v3beta1.Generator> createGenerator(com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateGeneratorMethod(), getCallOptions()), request); } /** * * * <pre> * Update the specified generator. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.dialogflow.cx.v3beta1.Generator> updateGenerator(com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateGeneratorMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes the specified generators. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteGenerator(com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteGeneratorMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_GENERATORS = 0; private static final int METHODID_GET_GENERATOR = 1; private static final int METHODID_CREATE_GENERATOR = 2; private static final int METHODID_UPDATE_GENERATOR = 3; private static final int METHODID_DELETE_GENERATOR = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_GENERATORS: serviceImpl.listGenerators( (com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse>) responseObserver); break; case METHODID_GET_GENERATOR: serviceImpl.getGenerator( (com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator>) responseObserver); break; case METHODID_CREATE_GENERATOR: serviceImpl.createGenerator( (com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator>) responseObserver); break; case METHODID_UPDATE_GENERATOR: serviceImpl.updateGenerator( (com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Generator>) responseObserver); break; case METHODID_DELETE_GENERATOR: serviceImpl.deleteGenerator( (com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListGeneratorsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsRequest, com.google.cloud.dialogflow.cx.v3beta1.ListGeneratorsResponse>( service, METHODID_LIST_GENERATORS))) .addMethod( getGetGeneratorMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.cx.v3beta1.GetGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator>( service, METHODID_GET_GENERATOR))) .addMethod( getCreateGeneratorMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.cx.v3beta1.CreateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator>( service, METHODID_CREATE_GENERATOR))) .addMethod( getUpdateGeneratorMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.cx.v3beta1.UpdateGeneratorRequest, com.google.cloud.dialogflow.cx.v3beta1.Generator>( service, METHODID_UPDATE_GENERATOR))) .addMethod( getDeleteGeneratorMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.dialogflow.cx.v3beta1.DeleteGeneratorRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_GENERATOR))) .build(); } private abstract static class GeneratorsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { GeneratorsBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.GeneratorProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Generators"); } } private static final class GeneratorsFileDescriptorSupplier extends GeneratorsBaseDescriptorSupplier { GeneratorsFileDescriptorSupplier() {} } private static final class GeneratorsMethodDescriptorSupplier extends GeneratorsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; GeneratorsMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (GeneratorsGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new GeneratorsFileDescriptorSupplier()) .addMethod(getListGeneratorsMethod()) .addMethod(getGetGeneratorMethod()) .addMethod(getCreateGeneratorMethod()) .addMethod(getUpdateGeneratorMethod()) .addMethod(getDeleteGeneratorMethod()) .build(); } } } return result; } }
apache/seatunnel-web
37,637
seatunnel-web-it/src/test/java/org/apache/seatunnel/app/test/SeatunnelAccessControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.seatunnel.app.test; import org.apache.seatunnel.app.common.AccessControllerTestingImp; import org.apache.seatunnel.app.common.ResourcePermissionData; import org.apache.seatunnel.app.common.Result; import org.apache.seatunnel.app.common.SeaTunnelWebCluster; import org.apache.seatunnel.app.controller.JobConfigControllerWrapper; import org.apache.seatunnel.app.controller.JobControllerWrapper; import org.apache.seatunnel.app.controller.JobDefinitionControllerWrapper; import org.apache.seatunnel.app.controller.JobExecutorControllerWrapper; import org.apache.seatunnel.app.controller.SeatunnelDatasourceControllerWrapper; import org.apache.seatunnel.app.controller.UserControllerWrapper; import org.apache.seatunnel.app.controller.WorkspaceControllerWrapper; import org.apache.seatunnel.app.dal.entity.Workspace; import org.apache.seatunnel.app.domain.request.datasource.DatasourceReq; import org.apache.seatunnel.app.domain.request.job.JobConfig; import org.apache.seatunnel.app.domain.request.job.JobCreateReq; import org.apache.seatunnel.app.domain.request.user.AddUserReq; import org.apache.seatunnel.app.domain.request.user.UpdateUserReq; import org.apache.seatunnel.app.domain.request.user.UserLoginReq; import org.apache.seatunnel.app.domain.request.workspace.WorkspaceReq; import org.apache.seatunnel.app.domain.response.PageInfo; import org.apache.seatunnel.app.domain.response.datasource.DatasourceDetailRes; import org.apache.seatunnel.app.domain.response.datasource.DatasourceRes; import org.apache.seatunnel.app.domain.response.job.JobConfigRes; import org.apache.seatunnel.app.domain.response.job.JobDefinitionRes; import org.apache.seatunnel.app.domain.response.job.JobRes; import org.apache.seatunnel.app.domain.response.user.AddUserRes; import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes; import org.apache.seatunnel.app.utils.JobTestingUtils; import org.apache.seatunnel.common.access.AccessType; import org.apache.seatunnel.common.access.ResourceType; import org.apache.seatunnel.common.constants.JobMode; import org.apache.seatunnel.server.common.SeatunnelErrorEnum; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; public class SeatunnelAccessControllerTest { private static final SeaTunnelWebCluster seaTunnelWebCluster = new SeaTunnelWebCluster(); private static WorkspaceControllerWrapper workspaceControllerWrapper; private static UserControllerWrapper userControllerWrapper; private static SeatunnelDatasourceControllerWrapper datasourceControllerWrapper; private static JobDefinitionControllerWrapper jobDefinitionControllerWrapper; private static JobConfigControllerWrapper jobConfigControllerWrapper; private static JobControllerWrapper jobControllerWrapper; private static JobExecutorControllerWrapper jobExecutorControllerWrapper; private static final String uniqueId = "_" + System.currentTimeMillis(); @BeforeAll public static void setUp() { seaTunnelWebCluster.start(); workspaceControllerWrapper = new WorkspaceControllerWrapper(); userControllerWrapper = new UserControllerWrapper(); datasourceControllerWrapper = new SeatunnelDatasourceControllerWrapper(); jobDefinitionControllerWrapper = new JobDefinitionControllerWrapper(); jobConfigControllerWrapper = new JobConfigControllerWrapper(); jobControllerWrapper = new JobControllerWrapper(); jobExecutorControllerWrapper = new JobExecutorControllerWrapper(); AccessControllerTestingImp.enableAccessController(); } @Test public void testWorkspaceAccessPermission() { String user1 = "admin"; String workspaceName = "workspace_access_workspace" + uniqueId; Result<Long> createWorkspaceResult = workspaceControllerWrapper.createWorkspace(workspaceName); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), createWorkspaceResult.getCode()); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, workspaceName, ResourceType.WORKSPACE, Collections.singletonList(AccessType.CREATE))); createWorkspaceResult = workspaceControllerWrapper.createWorkspace(workspaceName); assertTrue(createWorkspaceResult.isSuccess()); // Handle read operation AccessControllerTestingImp.clearPermission(); Result<List<Workspace>> getWorkspaces = workspaceControllerWrapper.getAllWorkspaces(); assertTrue(getWorkspaces.isSuccess()); assertEquals(0, getWorkspaces.getData().size()); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, workspaceName, ResourceType.WORKSPACE, Collections.singletonList(AccessType.READ))); getWorkspaces = workspaceControllerWrapper.getAllWorkspaces(); assertTrue(getWorkspaces.isSuccess()); assertEquals(1, getWorkspaces.getData().size()); String anotherWorkspace = "another_workspace_access" + uniqueId; AccessControllerTestingImp.clearPermission(); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, anotherWorkspace, ResourceType.WORKSPACE, Collections.singletonList(AccessType.CREATE))); Result<Long> anotherCreateWorkspaceResult = workspaceControllerWrapper.createWorkspace(anotherWorkspace); assertTrue(anotherCreateWorkspaceResult.isSuccess()); getWorkspaces = workspaceControllerWrapper.getAllWorkspaces(); assertTrue(anotherCreateWorkspaceResult.isSuccess()); assertEquals(0, getWorkspaces.getData().size()); AccessControllerTestingImp.clearPermission(); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, workspaceName, ResourceType.WORKSPACE, Collections.singletonList(AccessType.READ))); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( null, anotherWorkspace, ResourceType.WORKSPACE, Collections.singletonList(AccessType.READ))); getWorkspaces = workspaceControllerWrapper.getAllWorkspaces(); assertTrue(anotherCreateWorkspaceResult.isSuccess()); assertEquals(2, getWorkspaces.getData().size()); // Handle update operation AccessControllerTestingImp.clearPermission(); WorkspaceReq updateWorkspaceReq = new WorkspaceReq(workspaceName + "_new", "new description"); Result<Boolean> updateResult = workspaceControllerWrapper.updateWorkspace( createWorkspaceResult.getData(), updateWorkspaceReq); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), updateResult.getCode()); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, workspaceName, ResourceType.WORKSPACE, Collections.singletonList(AccessType.UPDATE))); updateResult = workspaceControllerWrapper.updateWorkspace( createWorkspaceResult.getData(), updateWorkspaceReq); assertTrue(updateResult.isSuccess(), updateResult.getMsg()); // Handle delete operation AccessControllerTestingImp.clearPermission(); Result<Boolean> deleteResult = workspaceControllerWrapper.deleteWorkspace(createWorkspaceResult.getData()); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), deleteResult.getCode()); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( null, updateWorkspaceReq.getWorkspaceName(), ResourceType.WORKSPACE, Collections.singletonList(AccessType.DELETE))); deleteResult = workspaceControllerWrapper.deleteWorkspace(createWorkspaceResult.getData()); assertTrue(deleteResult.isSuccess(), deleteResult.getMsg()); } @Test public void testUserAccessPermission() { String user1 = "user_access_user_1" + uniqueId; String pass = "somePassword"; String workspaceName = "workspace_access_user" + uniqueId; List<AccessType> accessTypes = new ArrayList<>(); accessTypes.add(AccessType.CREATE); createWorkspaceAndUser(workspaceName, user1, pass); login(new UserLoginReq(user1, pass, workspaceName)); // Handle create operation accessTypes.clear(); String newUser = "new_user_access" + uniqueId; Result<AddUserRes> addUserResult = userControllerWrapper.addUser(getAddUserReq(newUser, pass)); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), addUserResult.getCode()); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); // should be successful as user1 has access to create user addUserResult = userControllerWrapper.addUser(getAddUserReq(newUser, pass)); assertTrue(addUserResult.isSuccess()); // Handle read operation Result<PageInfo<UserSimpleInfoRes>> getUsers = userControllerWrapper.listUsers(newUser); assertTrue(getUsers.isSuccess()); assertEquals(0, getUsers.getData().getData().size()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); getUsers = userControllerWrapper.listUsers(newUser); assertTrue(getUsers.isSuccess()); assertEquals(1, getUsers.getData().getData().size()); String anotherUser = "another_user_access" + uniqueId; accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, anotherUser, ResourceType.USER, accessTypes)); Result<AddUserRes> anotherAddUserResult = userControllerWrapper.addUser(getAddUserReq(anotherUser, pass)); assertTrue(anotherAddUserResult.isSuccess()); Result<PageInfo<UserSimpleInfoRes>> listUsers = userControllerWrapper.listUsers(); assertTrue(anotherAddUserResult.isSuccess()); assertEquals(0, listUsers.getData().getData().size()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, anotherUser, ResourceType.USER, accessTypes)); listUsers = userControllerWrapper.listUsers(); assertTrue(listUsers.isSuccess()); assertEquals(2, listUsers.getData().getData().size()); // Handle update operation UpdateUserReq updateUserReq = new UpdateUserReq(); updateUserReq.setUsername(newUser); updateUserReq.setUserId(addUserResult.getData().getId()); updateUserReq.setPassword("newPassword"); updateUserReq.setStatus((byte) 0); updateUserReq.setType((byte) 0); Result<Void> updateResult = userControllerWrapper.updateUser( String.valueOf(updateUserReq.getUserId()), updateUserReq); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), updateResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); updateResult = userControllerWrapper.updateUser( String.valueOf(updateUserReq.getUserId()), updateUserReq); assertTrue(updateResult.isSuccess(), updateResult.getMsg()); // Handle disable operation AccessControllerTestingImp.clearPermission(); Result<Void> disableResult = userControllerWrapper.disableUser(String.valueOf(updateUserReq.getUserId())); assertEquals( SeatunnelErrorEnum.ACCESS_DENIED.getCode(), disableResult.getCode(), disableResult.getMsg()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); disableResult = userControllerWrapper.disableUser(String.valueOf(updateUserReq.getUserId())); assertTrue(disableResult.isSuccess()); // Handle enable operation AccessControllerTestingImp.clearPermission(); Result<Void> enableResult = userControllerWrapper.enableUser(String.valueOf(updateUserReq.getUserId())); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), enableResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); enableResult = userControllerWrapper.enableUser(String.valueOf(updateUserReq.getUserId())); assertTrue(enableResult.isSuccess()); // Handle delete operation AccessControllerTestingImp.clearPermission(); Result<Void> deleteResult = userControllerWrapper.deleteUser(String.valueOf(addUserResult.getData().getId())); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), deleteResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.DELETE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, newUser, ResourceType.USER, accessTypes)); deleteResult = userControllerWrapper.deleteUser(String.valueOf(addUserResult.getData().getId())); assertTrue(deleteResult.isSuccess()); } @Test public void testDatasourceAccessPermission() { String user1 = "user_access_datasource_1" + uniqueId; String user2 = "user_access_datasource_2" + uniqueId; String pass = "somePassword"; String workspaceName = "workspace_access_datasource" + uniqueId; // create workspaces and users using admin credentials createWorkspaceAndUser(workspaceName, user1, pass); createUserAndVerify(user2, pass); login(new UserLoginReq(user1, pass, workspaceName)); // Handle create operation String datasourceName1 = "1_datasource_access" + uniqueId; datasourceControllerWrapper.createDatasourceExpectingFailure(datasourceName1); List<AccessType> accessTypes = new ArrayList<>(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); // should be successful as user1 has access to create datasource String datasourceId1 = datasourceControllerWrapper.createFakeSourceDatasource(datasourceName1); Result<DatasourceDetailRes> getDataSource = datasourceControllerWrapper.getDatasource(datasourceId1); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), getDataSource.getCode()); // Handle read operation accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); getDataSource = datasourceControllerWrapper.getDatasource(datasourceId1); assertTrue(getDataSource.isSuccess()); // Handle update operation DatasourceReq req = new DatasourceReq(); req.setDescription(getDataSource.getData().getDescription() + " new description"); Result<Boolean> updateResult = datasourceControllerWrapper.updateDatasource(datasourceId1, req); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), updateResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); updateResult = datasourceControllerWrapper.updateDatasource(datasourceId1, req); assertTrue(updateResult.isSuccess()); // Handle delete operation Result<Boolean> deleteResult = datasourceControllerWrapper.deleteDatasource(datasourceId1); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), deleteResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.DELETE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); deleteResult = datasourceControllerWrapper.deleteDatasource(datasourceId1); assertTrue(deleteResult.isSuccess()); // create again to use in list datasource accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); datasourceControllerWrapper.createFakeSourceDatasource(datasourceName1); // logout and login with another user userControllerWrapper.logout(); login(new UserLoginReq(user2, pass, workspaceName)); // Handle list operation String datasourceName2 = "2_datasource_access" + uniqueId; accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user2, new ResourcePermissionData( workspaceName, datasourceName2, ResourceType.DATASOURCE, accessTypes)); String datasourceId2 = datasourceControllerWrapper.createFakeSourceDatasource(datasourceName2); Result<PageInfo<DatasourceRes>> datasourceList = datasourceControllerWrapper.getDatasourceList( "datasource_access" + uniqueId, "FakeSource", 1, 10); assertTrue(datasourceList.isSuccess()); assertEquals(0, datasourceList.getData().getData().size()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user2, new ResourcePermissionData( workspaceName, datasourceName2, ResourceType.DATASOURCE, accessTypes)); datasourceList = datasourceControllerWrapper.getDatasourceList( "datasource_access" + uniqueId, "FakeSource", 1, 10); assertTrue(datasourceList.isSuccess()); assertEquals(1, datasourceList.getData().getData().size()); assertEquals(datasourceId2, datasourceList.getData().getData().get(0).getId()); // Give permission to user2 on datasource created by user1 AccessControllerTestingImp.addResourcePermission( user2, new ResourcePermissionData( workspaceName, datasourceName1, ResourceType.DATASOURCE, accessTypes)); datasourceList = datasourceControllerWrapper.getDatasourceList( "datasource_access" + uniqueId, "FakeSource", 1, 10); assertTrue(datasourceList.isSuccess()); assertEquals(2, datasourceList.getData().getData().size()); } @Test public void testJobAccessPermission() { String user1 = "user_access_job_1" + uniqueId; String user2 = "user_access_job_2" + uniqueId; String pass = "somePassword"; String workspaceName = "workspace_access_job" + uniqueId; // create workspaces and users using admin credentials createWorkspaceAndUser(workspaceName, user1, pass); createUserAndVerify(user2, pass); login(new UserLoginReq(user1, pass, workspaceName)); // Handle create operation String jobName1 = "1_job_access" + uniqueId; jobDefinitionControllerWrapper.createJobExpectingFailure(jobName1); List<AccessType> accessTypes = new ArrayList<>(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); // should be successful as user1 has access to create job Long jobId = jobDefinitionControllerWrapper.createJobDefinition(jobName1); // Handle read operation Result<JobDefinitionRes> getJob = jobDefinitionControllerWrapper.getJobDefinitionById(jobId); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), getJob.getCode()); Result<JobConfigRes> getJobConfig = jobConfigControllerWrapper.getJobConfig(jobId); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), getJobConfig.getCode()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); getJob = jobDefinitionControllerWrapper.getJobDefinitionById(jobId); assertTrue(getJob.isSuccess()); getJobConfig = jobConfigControllerWrapper.getJobConfig(jobId); assertTrue(getJobConfig.isSuccess()); // Handle update operation AccessControllerTestingImp.clearPermission(); JobConfig jobConfig = jobConfigControllerWrapper.populateJobConfigObject(jobName1); Result<Void> updateResult = jobConfigControllerWrapper.updateJobConfig(jobId, jobConfig); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), updateResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); updateResult = jobConfigControllerWrapper.updateJobConfig(jobId, jobConfig); assertTrue(updateResult.isSuccess()); // Handle delete operation Result<Void> deleteResult = jobDefinitionControllerWrapper.deleteJobDefinition(jobId); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), deleteResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.DELETE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); deleteResult = jobDefinitionControllerWrapper.deleteJobDefinition(jobId); assertTrue(deleteResult.isSuccess()); // create again to use in list job accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); jobDefinitionControllerWrapper.createJobDefinition(jobName1); // logout and login with another user userControllerWrapper.logout(); login(new UserLoginReq(user2, pass, workspaceName)); // Handle list operation String jobName2 = "2_job_access" + uniqueId; accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user2, new ResourcePermissionData(workspaceName, jobName2, ResourceType.JOB, accessTypes)); Long jobId2 = jobDefinitionControllerWrapper.createJobDefinition(jobName2); Result<PageInfo<JobDefinitionRes>> jobList = jobDefinitionControllerWrapper.getJobDefinition( "job_access" + uniqueId, 1, 10, JobMode.BATCH); assertTrue(jobList.isSuccess()); assertEquals(0, jobList.getData().getData().size()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user2, new ResourcePermissionData(workspaceName, jobName2, ResourceType.JOB, accessTypes)); jobList = jobDefinitionControllerWrapper.getJobDefinition( "job_access" + uniqueId, 1, 10, JobMode.BATCH); assertTrue(jobList.isSuccess()); assertEquals(1, jobList.getData().getData().size()); assertEquals(jobId2, jobList.getData().getData().get(0).getId()); // Give permission to user2 on job created by user1 AccessControllerTestingImp.addResourcePermission( user2, new ResourcePermissionData(workspaceName, jobName1, ResourceType.JOB, accessTypes)); jobList = jobDefinitionControllerWrapper.getJobDefinition( "job_access" + uniqueId, 1, 10, JobMode.BATCH); assertTrue(jobList.isSuccess()); assertEquals(2, jobList.getData().getData().size()); } @Test public void testJobExecutionAccessPermission() { String userName = "jobExec_user_access" + uniqueId; String pass = "somePassword"; String workspaceName = "jobExec_workspace_access" + uniqueId; // create workspaces and users using admin credentials createWorkspaceAndUser(workspaceName, userName, pass); login(new UserLoginReq(userName, pass, workspaceName)); String jobName = "execJob_access" + uniqueId; List<AccessType> accessTypes = new ArrayList<>(); accessTypes.add(AccessType.CREATE); // job update api is called during job creation, accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( userName, new ResourcePermissionData(workspaceName, jobName, ResourceType.JOB, accessTypes)); AccessControllerTestingImp.addResourcePermission( userName, new ResourcePermissionData( workspaceName, "source_" + jobName, ResourceType.DATASOURCE, Arrays.asList(AccessType.CREATE, AccessType.READ))); AccessControllerTestingImp.addResourcePermission( userName, new ResourcePermissionData( workspaceName, "console_" + jobName, ResourceType.DATASOURCE, Arrays.asList(AccessType.CREATE, AccessType.READ))); long jobVersionId = JobTestingUtils.createJob(jobName); Result<Long> executionResult = jobExecutorControllerWrapper.jobExecutor(jobVersionId); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), executionResult.getCode()); accessTypes.add(AccessType.EXECUTE); AccessControllerTestingImp.resetResourcePermission( userName, new ResourcePermissionData(workspaceName, jobName, ResourceType.JOB, accessTypes)); AccessControllerTestingImp.addResourcePermission( userName, new ResourcePermissionData( workspaceName, "source_" + jobName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); AccessControllerTestingImp.addResourcePermission( userName, new ResourcePermissionData( workspaceName, "console_" + jobName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); executionResult = jobExecutorControllerWrapper.jobExecutor(jobVersionId); assertTrue(executionResult.isSuccess(), executionResult.getMsg()); } @Test public void testJobAccessPermissionForSingleJobCreateAPI() { String user1 = "user_access_single_job_1" + uniqueId; String user2 = "user_access_single_job_2" + uniqueId; String pass = "somePassword"; String workspaceName = "workspace_access_single_job" + uniqueId; // create workspaces and users using admin credentials createWorkspaceAndUser(workspaceName, user1, pass); createUserAndVerify(user2, pass); login(new UserLoginReq(user1, pass, workspaceName)); // Handle create operation String jobName = "access_single_api" + uniqueId; String fsdSourceName = "fake_source_create" + uniqueId; String csSourceName = "console_create" + uniqueId; List<AccessType> accessTypes = new ArrayList<>(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData( workspaceName, fsdSourceName, ResourceType.DATASOURCE, accessTypes)); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, csSourceName, ResourceType.DATASOURCE, accessTypes)); JobCreateReq jobCreateReq = JobTestingUtils.populateJobCreateReqFromFile(jobName, fsdSourceName, csSourceName); Result<Long> jobCreation = jobControllerWrapper.createJob(jobCreateReq); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), jobCreation.getCode()); accessTypes.clear(); accessTypes.add(AccessType.CREATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName, ResourceType.JOB, accessTypes)); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, fsdSourceName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, csSourceName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); jobCreation = jobControllerWrapper.createJob(jobCreateReq); assertTrue(jobCreation.isSuccess()); // Handle read operation Result<JobRes> getJobResponse = jobControllerWrapper.getJob(jobCreation.getData()); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), getJobResponse.getCode()); accessTypes.clear(); accessTypes.add(AccessType.READ); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName, ResourceType.JOB, accessTypes)); getJobResponse = jobControllerWrapper.getJob(jobCreation.getData()); assertTrue(getJobResponse.isSuccess(), getJobResponse.getMsg()); // Handle update operation JobCreateReq jobUpdateReq = jobControllerWrapper.convertJobResToJobCreateReq(getJobResponse.getData()); Result<Void> updateResult = jobControllerWrapper.updateJob(jobCreation.getData(), jobUpdateReq); assertEquals(SeatunnelErrorEnum.ACCESS_DENIED.getCode(), updateResult.getCode()); accessTypes.clear(); accessTypes.add(AccessType.UPDATE); AccessControllerTestingImp.resetResourcePermission( user1, new ResourcePermissionData(workspaceName, jobName, ResourceType.JOB, accessTypes)); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, fsdSourceName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); AccessControllerTestingImp.addResourcePermission( user1, new ResourcePermissionData( workspaceName, csSourceName, ResourceType.DATASOURCE, Collections.singletonList(AccessType.READ))); updateResult = jobControllerWrapper.updateJob(jobCreation.getData(), jobUpdateReq); assertTrue(updateResult.isSuccess(), updateResult.getMsg()); } private void createWorkspaceAndUser(String workspaceName, String username, String password) { AccessControllerTestingImp.resetResourcePermission( "admin", new ResourcePermissionData( null, workspaceName, ResourceType.WORKSPACE, Arrays.asList(AccessType.CREATE, AccessType.UPDATE))); workspaceControllerWrapper.createWorkspaceAndVerify(workspaceName); createUserAndVerify(username, password); } private void createUserAndVerify(String username, String password) { AccessControllerTestingImp.addResourcePermission( "admin", new ResourcePermissionData( null, username, ResourceType.USER, Collections.singletonList(AccessType.CREATE))); Result<AddUserRes> result = userControllerWrapper.addUser(getAddUserReq(username, password)); assertTrue(result.isSuccess()); } private static void login(UserLoginReq userLoginReq) { Result<UserSimpleInfoRes> login = userControllerWrapper.login(userLoginReq, null, true); assertTrue(login.isSuccess()); } private AddUserReq getAddUserReq(String user, String pass) { AddUserReq addUserReq = new AddUserReq(); addUserReq.setUsername(user); addUserReq.setPassword(pass); addUserReq.setStatus((byte) 0); addUserReq.setType((byte) 0); return addUserReq; } @AfterEach public void cleanup() { userControllerWrapper.logout(); AccessControllerTestingImp.clearPermission(); } @AfterAll public static void tearDown() { AccessControllerTestingImp.disableAccessController(); seaTunnelWebCluster.stop(); } }
apache/derby
34,841
java/org.apache.derby.engine/org/apache/derby/impl/sql/catalog/TabInfoImpl.java
/* Derby - Class org.apache.derby.impl.sql.catalog.TabInfoImpl Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.catalog; import org.apache.derby.iapi.services.io.FormatableBitSet; import org.apache.derby.shared.common.sanity.SanityManager; import org.apache.derby.iapi.services.io.StreamStorable; import org.apache.derby.shared.common.error.StandardException; import org.apache.derby.iapi.sql.dictionary.CatalogRowFactory; import org.apache.derby.iapi.sql.dictionary.ConglomerateDescriptor; import org.apache.derby.iapi.sql.dictionary.IndexRowGenerator; import org.apache.derby.iapi.sql.execute.ExecIndexRow; import org.apache.derby.iapi.sql.execute.ExecRow; import org.apache.derby.iapi.sql.execute.RowChanger; import org.apache.derby.iapi.sql.execute.TupleFilter; import org.apache.derby.iapi.sql.Activation; import org.apache.derby.iapi.store.access.ConglomerateController; import org.apache.derby.iapi.store.access.DynamicCompiledOpenConglomInfo; import org.apache.derby.iapi.store.access.Qualifier; import org.apache.derby.iapi.store.access.ScanController; import org.apache.derby.iapi.store.access.StaticCompiledOpenConglomInfo; import org.apache.derby.iapi.store.access.TransactionController; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.RowLocation; import java.util.Properties; /** * A poor mans structure used in DataDictionaryImpl.java. * Used to save heapId, name pairs for non core tables. * */ class TabInfoImpl { /** * ROWNOTDUPLICATE is out of range for a row * number. If a return code does not equal * this value, then it refers to the row * that is a duplicate. */ static final int ROWNOTDUPLICATE = -1; private IndexInfoImpl[] indexes; private long heapConglomerate; private int numIndexesSet; private boolean heapSet; private final CatalogRowFactory crf; /** * Constructor * * @param crf the associated CatalogRowFactory */ TabInfoImpl(CatalogRowFactory crf) { this.heapConglomerate = -1; this.crf = crf; int numIndexes = crf.getNumIndexes(); if (numIndexes > 0) { indexes = new IndexInfoImpl[numIndexes]; /* Init indexes */ for (int indexCtr = 0; indexCtr < numIndexes; indexCtr++) { indexes[indexCtr] = new IndexInfoImpl( indexCtr, crf); } } } /** * Get the conglomerate for the heap. * * @return long The conglomerate for the heap. */ long getHeapConglomerate() { return heapConglomerate; } /** * Set the heap conglomerate for this. * * @param heapConglomerate The new heap conglomerate. */ void setHeapConglomerate(long heapConglomerate) { this.heapConglomerate = heapConglomerate; heapSet = true; } /** * Get the conglomerate for the specified index. * * @return long The conglomerate for the specified index. */ long getIndexConglomerate(int indexID) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (indexID >= indexes.length) { SanityManager.THROWASSERT( "indexID (" + indexID + ") is out of range(0-" + indexes.length + ")"); } } return indexes[indexID].getConglomerateNumber(); } /** * Set the index conglomerate for the table. * * @param index Index number for index for table * @param indexConglomerate The conglomerate for that index */ void setIndexConglomerate(int index, long indexConglomerate) { /* Index names must be set before conglomerates. * Also verify that we are not setting the same conglomerate * twice. */ if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes[index] != null, "indexes[index] expected to be non-null"); SanityManager.ASSERT(indexes[index].getConglomerateNumber() == -1, "indexes[index] expected to be -1"); } indexes[index].setConglomerateNumber(indexConglomerate); /* We are completely initialized when all indexes have * their conglomerates initialized */ numIndexesSet++; } /** * Set the index conglomerate for the table. * * @param cd The ConglomerateDescriptor for one of the index * for this table. */ void setIndexConglomerate(ConglomerateDescriptor cd) { int index; String indexName = cd.getConglomerateName(); if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); } for (index = 0; index < indexes.length; index++) { /* All index names expected to be set before * any conglomerate is set. */ if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes[index] != null, "indexes[index] expected to be non-null"); SanityManager.ASSERT(indexes[index].getIndexName() != null, "indexes[index].getIndexName() expected to be non-null"); } /* Do we have a match? */ if (indexes[index].getIndexName().equals(indexName)) { indexes[index].setConglomerateNumber(cd.getConglomerateNumber()); break; } } if (SanityManager.DEBUG) { if (index == indexes.length) { SanityManager.THROWASSERT("match not found for " + indexName); } } /* We are completely initialized when all indexIds are initialized */ numIndexesSet++; } /** * Get the table name. * * @return String The table name. */ String getTableName() { return crf.getCatalogName(); } /** * Get the index name. * * @param indexId Index number for index for table * * @return String The index name. */ String getIndexName(int indexId) { return indexes[indexId].getIndexName(); } /** * Get the CatalogRowFactory for this. * * @return CatalogRowFactory The CatalogRowFactory for this. */ CatalogRowFactory getCatalogRowFactory() { return crf; } /** * Is this fully initialized. * (i.e., is all conglomerate info initialized) * * @return boolean Whether or not this is fully initialized. */ boolean isComplete() { /* We are complete when heap conglomerate and all * index conglomerates are set. */ if (! heapSet) { return false; } return (indexes == null || indexes.length == numIndexesSet); } /** * Get the column count for the specified index number. * * @param indexNumber The index number. * * @return int The column count for the specified index. */ int getIndexColumnCount(int indexNumber) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (!(indexNumber < indexes.length)) { SanityManager.THROWASSERT("indexNumber (" + indexNumber + ") is out of range(0-" + indexes.length + ")"); } } return indexes[indexNumber].getColumnCount(); } /** * Get the IndexRowGenerator for the specified index number. * * @param indexNumber The index number. * * @return IndexRowGenerator The IRG for the specified index number. */ IndexRowGenerator getIndexRowGenerator(int indexNumber) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (indexNumber >= indexes.length) { SanityManager.THROWASSERT( "indexNumber (" + indexNumber + ") is out of range(0-" + indexes.length + ")"); } } return indexes[indexNumber].getIndexRowGenerator(); } /** * Set the IndexRowGenerator for the specified index number. * * @param indexNumber The index number. * @param irg The IndexRowGenerator for the specified index number. */ void setIndexRowGenerator(int indexNumber, IndexRowGenerator irg) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (indexNumber >= indexes.length) { SanityManager.THROWASSERT( "indexNumber (" + indexNumber + ") is out of range(0-" + indexes.length + ")"); } } indexes[indexNumber].setIndexRowGenerator(irg); } /** * Get the number of indexes on this catalog. * * @return int The number of indexes on this catalog. */ int getNumberOfIndexes() { if (indexes == null) { return 0; } else { return indexes.length; } } /** * Get the base column position for a column within a catalog * given the (0-based) index number for this catalog and the * (0-based) column number for the column within the index. * * @param indexNumber The index number * @param colNumber The column number within the index * * @return int The base column position for the column. */ int getBaseColumnPosition(int indexNumber, int colNumber) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (indexNumber >= indexes.length) { SanityManager.THROWASSERT("indexNumber (" + indexNumber + ") is out of range(0-" + indexes.length + ")"); } } return indexes[indexNumber].getBaseColumnPosition(colNumber); } /** * Return whether or not this index is declared unique * * @param indexNumber The index number * * @return boolean Whether or not this index is declared unique */ boolean isIndexUnique(int indexNumber) { if (SanityManager.DEBUG) { SanityManager.ASSERT(indexes != null, "indexes is expected to be non-null"); if (indexNumber >= indexes.length) { SanityManager.THROWASSERT("indexNumber (" + indexNumber + ") is out of range(0-" + indexes.length + ")"); } } return indexes[indexNumber].isIndexUnique(); } /** * Inserts a base row into a catalog and inserts all the corresponding * index rows. * * @param row row to insert * @param tc transaction * @return row number (&gt;= 0) if duplicate row inserted into an index * ROWNOTDUPLICATE otherwise * * @exception StandardException Thrown on failure */ int insertRow( ExecRow row, TransactionController tc) throws StandardException { RowLocation[] notUsed = new RowLocation[1]; return insertRowListImpl(new ExecRow[] {row},tc,notUsed); } /** * Inserts a list of base rows into a catalog and inserts all the corresponding * index rows. * * @param rowList List of rows to insert * @param tc transaction controller * * * @return row number (&gt;= 0) if duplicate row inserted into an index * ROWNOTDUPLICATE otherwise * * @exception StandardException Thrown on failure */ int insertRowList(ExecRow[] rowList, TransactionController tc ) throws StandardException { RowLocation[] notUsed = new RowLocation[1]; return insertRowListImpl(rowList,tc,notUsed); } /** Insert logic to insert a list of rows into a table. This logic has two odd features. <OL> <LI>Returns an indication if any returned row was a duplicate. <LI>Returns the RowLocation of the last row inserted. </OL> @param rowList the list of rows to insert @param tc transaction controller @param rowLocationOut on output rowLocationOut[0] is set to the last RowLocation inserted. @return row number (&gt;= 0) if duplicate row inserted into an index ROWNOTDUPLICATE otherwise */ private int insertRowListImpl(ExecRow[] rowList, TransactionController tc, RowLocation[] rowLocationOut) throws StandardException { ConglomerateController heapController; RowLocation heapLocation; ExecIndexRow indexableRow; int insertRetCode; int retCode = ROWNOTDUPLICATE; int indexCount = crf.getNumIndexes(); ConglomerateController[] indexControllers = new ConglomerateController[ indexCount ]; // Open the conglomerates heapController = tc.openConglomerate( getHeapConglomerate(), false, TransactionController.OPENMODE_FORUPDATE, TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ); /* NOTE: Due to the lovely problem of trying to add * a new column to syscolumns and an index on that * column during upgrade, we have to deal with the * issue of the index not existing yet. So, it's okay * if the index doesn't exist yet. (It will magically * get created at a later point during upgrade.) */ for ( int ictr = 0; ictr < indexCount; ictr++ ) { long conglomNumber = getIndexConglomerate(ictr); if (conglomNumber > -1) { indexControllers[ ictr ] = tc.openConglomerate( conglomNumber, false, TransactionController.OPENMODE_FORUPDATE, TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ); } } heapLocation = heapController.newRowLocationTemplate(); rowLocationOut[0]=heapLocation; // loop through rows on this list, inserting them into system table for (int rowNumber = 0; rowNumber < rowList.length; rowNumber++) { ExecRow row = rowList[rowNumber]; // insert the base row and get its new location heapController.insertAndFetchLocation(row.getRowArray(), heapLocation); for ( int ictr = 0; ictr < indexCount; ictr++ ) { if (indexControllers[ ictr ] == null) { continue; } // Get an index row based on the base row indexableRow = getIndexRowFromHeapRow( getIndexRowGenerator(ictr), heapLocation, row ); insertRetCode = indexControllers[ ictr ].insert( indexableRow.getRowArray()); if ( insertRetCode == ConglomerateController.ROWISDUPLICATE ) { retCode = rowNumber; } } } // end loop through rows on list // Close the open conglomerates for ( int ictr = 0; ictr < indexCount; ictr++ ) { if (indexControllers[ ictr ] == null) { continue; } indexControllers[ ictr ].close(); } heapController.close(); return retCode; } /** * Given a key row, delete all matching heap rows and their index * rows. * <p> * LOCKING: row locking if there is a key; otherwise, * table locking. * * @param tc transaction controller * @param key key to delete by. * @param indexNumber Key is appropriate for this index. * @return the number of rows deleted. If key is not unique, * this may be more than one. * @exception StandardException Thrown on failure */ int deleteRow( TransactionController tc, ExecIndexRow key, int indexNumber ) throws StandardException { // Always row locking return deleteRows(tc, key, ScanController.GE, null, null, key, ScanController.GT, indexNumber, true); } int deleteRow( TransactionController tc, ExecIndexRow key, int indexNumber, boolean wait) throws StandardException { // Always row locking return deleteRows(tc, key, ScanController.GE, null, null, key, ScanController.GT, indexNumber, wait); } /** * Delete the set of rows defined by a scan on an index * from the table. Most of the parameters are simply passed * to TransactionController.openScan. Please refer to the * TransactionController documentation for details. * <p> * LOCKING: row locking if there is a start and a stop * key; otherwise, table locking * * @param tc transaction controller * @param startKey key to start the scan. * @param startOp operation to start the scan. * @param stopKey key to start the scan. * @param qualifier a qualifier for the scan. * @param filter filter on base rows * @param stopOp operation to start the scan. * @param indexNumber Key is appropriate for this index. * @return the number of rows deleted. * @exception StandardException Thrown on failure * @see TransactionController#openScan */ int deleteRows(TransactionController tc, ExecIndexRow startKey, int startOp, Qualifier[][] qualifier, TupleFilter filter, ExecIndexRow stopKey, int stopOp, int indexNumber) throws StandardException { return deleteRows(tc, startKey, startOp, qualifier, filter, stopKey, stopOp, indexNumber, true); } /** * @inheritDoc */ private int deleteRows(TransactionController tc, ExecIndexRow startKey, int startOp, Qualifier[][] qualifier, TupleFilter filter, ExecIndexRow stopKey, int stopOp, int indexNumber, boolean wait) throws StandardException { ConglomerateController heapCC; ScanController drivingScan; ExecIndexRow drivingIndexRow; RowLocation baseRowLocation; RowChanger rc; ExecRow baseRow = crf.makeEmptyRow(); int rowsDeleted = 0; boolean passedFilter = true; rc = getRowChanger( tc, (int[])null,baseRow ); /* ** If we have a start and a stop key, then we are going to ** get row locks, otherwise, we are getting table locks. ** This may be excessive locking for the case where there ** is a start key and no stop key or vice versa. */ int lockMode = ((startKey != null) && (stopKey != null)) ? TransactionController.MODE_RECORD : TransactionController.MODE_TABLE; /* ** Don't use level 3 if we have the same start/stop key. */ int isolation = ((startKey != null) && (stopKey != null) && (startKey == stopKey)) ? TransactionController.ISOLATION_REPEATABLE_READ : TransactionController.ISOLATION_SERIALIZABLE; // Row level locking rc.open(lockMode, wait); DataValueDescriptor[] startKeyRow = startKey == null ? null : startKey.getRowArray(); DataValueDescriptor[] stopKeyRow = stopKey == null ? null : stopKey.getRowArray(); /* Open the heap conglomerate */ heapCC = tc.openConglomerate( getHeapConglomerate(), false, (TransactionController.OPENMODE_FORUPDATE | ((wait) ? 0 : TransactionController.OPENMODE_LOCK_NOWAIT)), lockMode, TransactionController.ISOLATION_REPEATABLE_READ); drivingScan = tc.openScan( getIndexConglomerate(indexNumber), // conglomerate to open false, // don't hold open across commit (TransactionController.OPENMODE_FORUPDATE | ((wait) ? 0 : TransactionController.OPENMODE_LOCK_NOWAIT)), lockMode, isolation, (FormatableBitSet) null, // all fields as objects startKeyRow, // start position - first row startOp, // startSearchOperation qualifier, //scanQualifier stopKeyRow, // stop position - through last row stopOp); // stopSearchOperation // Get an index row based on the base row drivingIndexRow = getIndexRowFromHeapRow( getIndexRowGenerator( indexNumber ), heapCC.newRowLocationTemplate(), crf.makeEmptyRow()); while (drivingScan.fetchNext(drivingIndexRow.getRowArray())) { baseRowLocation = (RowLocation) drivingIndexRow.getColumn(drivingIndexRow.nColumns()); boolean base_row_exists = heapCC.fetch( baseRowLocation, baseRow.getRowArray(), (FormatableBitSet) null); if (SanityManager.DEBUG) { // it can not be possible for heap row to disappear while // holding scan cursor on index at ISOLATION_REPEATABLE_READ. SanityManager.ASSERT(base_row_exists, "base row not found"); } // only delete rows which pass the base-row filter if ( filter != null ) { passedFilter = filter.execute( baseRow ).equals( true ); } if ( passedFilter ) { rc.deleteRow( baseRow, baseRowLocation ); rowsDeleted++; } } heapCC.close(); drivingScan.close(); rc.close(); return rowsDeleted; } /** * Given a key row, return the first matching heap row. * <p> * LOCKING: shared row locking. * * @param tc transaction controller * @param key key to read by. * @param indexNumber Key is appropriate for this index. * @exception StandardException Thrown on failure */ ExecRow getRow( TransactionController tc, ExecIndexRow key, int indexNumber ) throws StandardException { ConglomerateController heapCC; /* Open the heap conglomerate */ heapCC = tc.openConglomerate( getHeapConglomerate(), false, 0, // for read only TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ); try { return getRow( tc, heapCC, key, indexNumber ); } finally { heapCC.close(); } } /** * Given an index row and index number return the RowLocation * in the heap of the first matching row. * Used by the autoincrement code to get the RowLocation in * syscolumns given a &lt;tablename, columname&gt; pair. * * @see DataDictionaryImpl#computeRowLocation(TransactionController, TableDescriptor, String) * * @param tc Transaction Controller to use. * @param key Index Row to search in the index. * @param indexNumber Identifies the index to use. * * @exception StandardException thrown on failure. */ RowLocation getRowLocation(TransactionController tc, ExecIndexRow key, int indexNumber) throws StandardException { ConglomerateController heapCC; heapCC = tc.openConglomerate( getHeapConglomerate(), false, 0, // for read only TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ); try { RowLocation rl[] = new RowLocation[1]; ExecRow notUsed = getRowInternal(tc, heapCC, key, indexNumber, rl); return rl[0]; } finally { heapCC.close(); } } /** * Given a key row, return the first matching heap row. * <p> * LOCKING: shared row locking. * * @param tc transaction controller * @param heapCC heap to look in * @param key key to read by. * @param indexNumber Key is appropriate for this index. * @exception StandardException Thrown on failure */ ExecRow getRow( TransactionController tc, ConglomerateController heapCC, ExecIndexRow key, int indexNumber) throws StandardException { RowLocation rl[] = new RowLocation[1]; return getRowInternal(tc, heapCC, key, indexNumber, rl); } /** * @exception StandardException Thrown on failure */ private ExecRow getRowInternal( TransactionController tc, ConglomerateController heapCC, ExecIndexRow key, int indexNumber, RowLocation rl[]) throws StandardException { ScanController drivingScan; ExecIndexRow drivingIndexRow; RowLocation baseRowLocation; ExecRow baseRow = crf.makeEmptyRow(); drivingScan = tc.openScan( getIndexConglomerate(indexNumber), // conglomerate to open false, // don't hold open across commit 0, // open for read TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ, (FormatableBitSet) null, // all fields as objects key.getRowArray(), // start position - first row ScanController.GE, // startSearchOperation null, //scanQualifier key.getRowArray(), // stop position - through last row ScanController.GT); // stopSearchOperation // Get an index row based on the base row drivingIndexRow = getIndexRowFromHeapRow( getIndexRowGenerator( indexNumber ), heapCC.newRowLocationTemplate(), crf.makeEmptyRow()); try { if (drivingScan.fetchNext(drivingIndexRow.getRowArray())) { rl[0] = baseRowLocation = (RowLocation) drivingIndexRow.getColumn(drivingIndexRow.nColumns()); boolean base_row_exists = heapCC.fetch( baseRowLocation, baseRow.getRowArray(), (FormatableBitSet) null); if (SanityManager.DEBUG) { // it can not be possible for heap row to disappear while // holding scan cursor on index at ISOLATION_REPEATABLE_READ. SanityManager.ASSERT(base_row_exists, "base row not found"); } return baseRow; } else { return null; } } finally { drivingScan.close(); } } /** * Updates a base row in a catalog and updates all the corresponding * index rows. * * @param key key row * @param newRow new version of the row * @param indexNumber index that key operates * @param indicesToUpdate array of booleans, one for each index on the catalog. * if a boolean is true, that means we must update the * corresponding index because changes in the newRow * affect it. * @param colsToUpdate array of ints indicating which columns (1 based) * to update. If null, do all. * @param tc transaction controller * * @exception StandardException Thrown on failure */ void updateRow( ExecIndexRow key, ExecRow newRow, int indexNumber, boolean[] indicesToUpdate, int[] colsToUpdate, TransactionController tc) throws StandardException { ExecRow[] newRows = new ExecRow[1]; newRows[0] = newRow; updateRow(key, newRows, indexNumber, indicesToUpdate, colsToUpdate, tc); } /** * Updates a set of base rows in a catalog with the same key on an index * and updates all the corresponding index rows. * * @param key key row * @param newRows new version of the array of rows * @param indexNumber index that key operates * @param indicesToUpdate array of booleans, one for each index on the catalog. * if a boolean is true, that means we must update the * corresponding index because changes in the newRow * affect it. * @param colsToUpdate array of ints indicating which columns (1 based) * to update. If null, do all. * @param tc transaction controller * * @exception StandardException Thrown on failure */ void updateRow( ExecIndexRow key, ExecRow[] newRows, int indexNumber, boolean[] indicesToUpdate, int[] colsToUpdate, TransactionController tc ) throws StandardException { ConglomerateController heapCC; ScanController drivingScan; ExecIndexRow drivingIndexRow; RowLocation baseRowLocation; ExecRow baseRow = crf.makeEmptyRow(); if (SanityManager.DEBUG) { SanityManager.ASSERT( indicesToUpdate.length == crf.getNumIndexes(), "Wrong number of indices." ); } RowChanger rc = getRowChanger( tc, colsToUpdate,baseRow ); // Row level locking rc.openForUpdate(indicesToUpdate, TransactionController.MODE_RECORD, true); /* Open the heap conglomerate */ heapCC = tc.openConglomerate( getHeapConglomerate(), false, TransactionController.OPENMODE_FORUPDATE, TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ); drivingScan = tc.openScan( getIndexConglomerate(indexNumber), // conglomerate to open false, // don't hold open across commit TransactionController.OPENMODE_FORUPDATE, TransactionController.MODE_RECORD, TransactionController.ISOLATION_REPEATABLE_READ, (FormatableBitSet) null, // all fields as objects key.getRowArray(), // start position - first row ScanController.GE, // startSearchOperation null, //scanQualifier key.getRowArray(), // stop position - through last row ScanController.GT); // stopSearchOperation // Get an index row based on the base row drivingIndexRow = getIndexRowFromHeapRow( getIndexRowGenerator( indexNumber ), heapCC.newRowLocationTemplate(), crf.makeEmptyRow()); int rowNum = 0; while (drivingScan.fetchNext(drivingIndexRow.getRowArray())) { baseRowLocation = (RowLocation) drivingIndexRow.getColumn(drivingIndexRow.nColumns()); boolean base_row_exists = heapCC.fetch( baseRowLocation, baseRow.getRowArray(), (FormatableBitSet) null); if (SanityManager.DEBUG) { // it can not be possible for heap row to disappear while // holding scan cursor on index at ISOLATION_REPEATABLE_READ. SanityManager.ASSERT(base_row_exists, "base row not found"); } rc.updateRow(baseRow, (rowNum == newRows.length - 1) ? newRows[rowNum] : newRows[rowNum++], baseRowLocation ); } rc.finish(); heapCC.close(); drivingScan.close(); rc.close(); } /** * Get the Properties associated with creating the heap. * * @return The Properties associated with creating the heap. */ Properties getCreateHeapProperties() { return crf.getCreateHeapProperties(); } /** * Get the Properties associated with creating the specified index. * * @param indexNumber The specified index number. * * @return The Properties associated with creating the specified index. */ Properties getCreateIndexProperties(int indexNumber) { return crf.getCreateIndexProperties(indexNumber); } /** * Gets a row changer for this catalog. * * @param tc transaction controller * @param changedCols the columns to change (1 based), may be null * @param baseRow used to detemine column types at creation time * only. The row changer does ***Not*** keep a referance to * this row or change it in any way. * * @return a row changer for this catalog. * @exception StandardException Thrown on failure */ private RowChanger getRowChanger( TransactionController tc, int[] changedCols, ExecRow baseRow) throws StandardException { RowChanger rc; int indexCount = crf.getNumIndexes(); IndexRowGenerator[] irgs = new IndexRowGenerator[ indexCount ]; long[] cids = new long[ indexCount ]; if (SanityManager.DEBUG) { if (changedCols != null) { for (int i = changedCols.length - 1; i >= 0; i--) { SanityManager.ASSERT(changedCols[i] != 0, "Column id is 0, but should be 1 based"); } } } for ( int ictr = 0; ictr < indexCount; ictr++ ) { irgs[ictr] = getIndexRowGenerator(ictr); cids[ictr] = getIndexConglomerate(ictr); } rc = crf.getExecutionFactory().getRowChanger(getHeapConglomerate(), (StaticCompiledOpenConglomInfo) null, (DynamicCompiledOpenConglomInfo) null, irgs, cids, (StaticCompiledOpenConglomInfo[]) null, (DynamicCompiledOpenConglomInfo[]) null, crf.getHeapColumnCount(), tc, changedCols, getStreamStorableHeapColIds(baseRow), (Activation) null); return rc; } private boolean computedStreamStorableHeapColIds = false; private int[] streamStorableHeapColIds; private int[] getStreamStorableHeapColIds(ExecRow baseRow) throws StandardException { if (!computedStreamStorableHeapColIds) { int sshcidLen = 0; // //Compute the length of streamStorableHeapColIds //One entry for each column id. DataValueDescriptor[] ra = baseRow.getRowArray(); for(int ix=0;ix<ra.length;ix++) if (ra[ix] instanceof StreamStorable) sshcidLen++; // //If we have some streamStorableHeapColIds we //allocate an array to remember them and fill in //the array with the 0 based column ids. If we //have none leave streamStorableHeapColIds Null. if (sshcidLen > 0) { streamStorableHeapColIds = new int[sshcidLen]; int sshcidOffset=0; for(int ix=0;ix<ra.length;ix++) if (ra[ix] instanceof StreamStorable) streamStorableHeapColIds[sshcidOffset++] = ix; } computedStreamStorableHeapColIds = true; } return streamStorableHeapColIds; } /** * Get an index row based on a row from the heap. * * @param irg IndexRowGenerator to use * @param rl RowLocation for heap * @param heapRow Row from the heap * * @return ExecIndexRow Index row. * * @exception StandardException Thrown on error */ private ExecIndexRow getIndexRowFromHeapRow(IndexRowGenerator irg, RowLocation rl, ExecRow heapRow) throws StandardException { ExecIndexRow indexRow; indexRow = irg.getIndexRowTemplate(); // Get an index row based on the base row irg.getIndexRow(heapRow, rl, indexRow, (FormatableBitSet) null); return indexRow; } public String toString() { if (SanityManager.DEBUG) { return "name: " + this.getTableName() + "\n\theapCongolomerate: "+heapConglomerate + "\n\tnumIndexes: " + ((indexes != null) ? indexes.length : 0) + "\n\tnumIndexesSet: " + numIndexesSet + "\n\theapSet: " + heapSet + "\n"; } else { return ""; } } }
apache/solr
37,465
solr/core/src/java/org/apache/solr/search/Grouping.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.CachingCollector; import org.apache.lucene.search.Collector; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.grouping.AllGroupHeadsCollector; import org.apache.lucene.search.grouping.AllGroupsCollector; import org.apache.lucene.search.grouping.FirstPassGroupingCollector; import org.apache.lucene.search.grouping.GroupDocs; import org.apache.lucene.search.grouping.SearchGroup; import org.apache.lucene.search.grouping.TermGroupSelector; import org.apache.lucene.search.grouping.TopGroups; import org.apache.lucene.search.grouping.TopGroupsCollector; import org.apache.lucene.search.grouping.ValueSourceGroupSelector; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.mutable.MutableValue; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.StrFieldSource; import org.apache.solr.search.grouping.collector.FilterCollector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Basic Solr Grouping infrastructure. Warning NOT thread safe! * * @lucene.experimental */ public class Grouping { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private final SolrIndexSearcher searcher; private final QueryResult qr; private final QueryCommand cmd; private final List<Command<?>> commands = new ArrayList<>(); private final boolean main; private final boolean cacheSecondPassSearch; private final int maxDocsPercentageToCache; private Sort groupSort; private Sort withinGroupSort; private int limitDefault; private int docsPerGroupDefault; private int groupOffsetDefault; private Format defaultFormat; private TotalCount defaultTotalCount; private int maxDoc; private boolean needScores; private boolean getDocSet; private boolean getGroupedDocSet; private boolean getDocList; // doclist needed for debugging or highlighting private Query query; private NamedList<Object> grouped = new SimpleOrderedMap<>(); // used for tracking unique docs when we need a doclist private Set<Integer> idSet = new LinkedHashSet<>(); private int maxMatches; // max number of matches from any grouping command private float maxScore = Float.NaN; // max score seen in any doclist private boolean signalCacheWarning = false; // output if one of the grouping commands should be used as the main result. public DocList mainResult; /** * @param cacheSecondPassSearch Whether to cache the documents and scores from the first pass * search for the second pass search. * @param maxDocsPercentageToCache The maximum number of documents in a percentage relative from * maxdoc that is allowed in the cache. When this threshold is met, the cache is not used in * the second pass search. */ public Grouping( SolrIndexSearcher searcher, QueryResult qr, QueryCommand cmd, boolean cacheSecondPassSearch, int maxDocsPercentageToCache, boolean main) { this.searcher = searcher; this.qr = qr; this.cmd = cmd; this.cacheSecondPassSearch = cacheSecondPassSearch; this.maxDocsPercentageToCache = maxDocsPercentageToCache; this.main = main; } public void add(Grouping.Command<?> groupingCommand) { commands.add(groupingCommand); } /** * Adds a field command based on the specified field. If the field is not compatible with {@link * CommandField} it invokes the {@link #addFunctionCommand(String, * org.apache.solr.request.SolrQueryRequest)} method. * * @param field The fieldname to group by. */ public void addFieldCommand(String field, SolrQueryRequest request) throws SyntaxError { SchemaField schemaField = searcher .getSchema() .getField(field); // Throws an exception when field doesn't exist. Bad request. FieldType fieldType = schemaField.getType(); ValueSource valueSource = fieldType.getValueSource(schemaField, null); if (!(valueSource instanceof StrFieldSource)) { addFunctionCommand(field, request); return; } Grouping.CommandField gc = new CommandField(); gc.withinGroupSort = withinGroupSort; gc.groupBy = field; gc.key = field; gc.numGroups = limitDefault; gc.docsPerGroup = docsPerGroupDefault; gc.groupOffset = groupOffsetDefault; gc.offset = cmd.getOffset(); gc.groupSort = groupSort; gc.format = defaultFormat; gc.totalCount = defaultTotalCount; if (main) { gc.main = true; gc.format = Grouping.Format.simple; } if (gc.format == Grouping.Format.simple) { gc.groupOffset = 0; // doesn't make sense } commands.add(gc); } public void addFunctionCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError { ValueSource valueSource = FunctionQParser.parseAsValueSource(groupByStr, request); final Grouping.Command<?> gc; if (valueSource instanceof StrFieldSource) { String field = ((StrFieldSource) valueSource).getField(); CommandField commandField = new CommandField(); commandField.groupBy = field; gc = commandField; } else { CommandFunc commandFunc = new CommandFunc(); commandFunc.groupBy = valueSource; gc = commandFunc; } gc.withinGroupSort = withinGroupSort; gc.key = groupByStr; gc.numGroups = limitDefault; gc.docsPerGroup = docsPerGroupDefault; gc.groupOffset = groupOffsetDefault; gc.offset = cmd.getOffset(); gc.groupSort = groupSort; gc.format = defaultFormat; gc.totalCount = defaultTotalCount; if (main) { gc.main = true; gc.format = Grouping.Format.simple; } if (gc.format == Grouping.Format.simple) { gc.groupOffset = 0; // doesn't make sense } commands.add(gc); } public void addQueryCommand(String groupByStr, SolrQueryRequest request) throws SyntaxError { QParser parser = QParser.getParser(groupByStr, request); Query gq = parser.getQuery(); if (gq == null) { // normalize a null query to a query that matches nothing gq = new MatchNoDocsQuery(); } Grouping.CommandQuery gc = new CommandQuery(); gc.query = gq; gc.withinGroupSort = withinGroupSort; gc.key = groupByStr; gc.numGroups = limitDefault; gc.docsPerGroup = docsPerGroupDefault; gc.groupOffset = groupOffsetDefault; // these two params will only be used if this is for the main result set gc.offset = cmd.getOffset(); gc.numGroups = limitDefault; gc.format = defaultFormat; if (main) { gc.main = true; gc.format = Grouping.Format.simple; } if (gc.format == Grouping.Format.simple) { gc.docsPerGroup = gc.numGroups; // doesn't make sense to limit to one gc.groupOffset = gc.offset; } commands.add(gc); } public Grouping setGroupSort(Sort groupSort) { this.groupSort = groupSort; return this; } public Grouping setWithinGroupSort(Sort withinGroupSort) { this.withinGroupSort = withinGroupSort; return this; } public Grouping setLimitDefault(int limitDefault) { this.limitDefault = limitDefault; return this; } public Grouping setDocsPerGroupDefault(int docsPerGroupDefault) { this.docsPerGroupDefault = docsPerGroupDefault; return this; } public Grouping setGroupOffsetDefault(int groupOffsetDefault) { this.groupOffsetDefault = groupOffsetDefault; return this; } public Grouping setDefaultFormat(Format defaultFormat) { this.defaultFormat = defaultFormat; return this; } public Grouping setDefaultTotalCount(TotalCount defaultTotalCount) { this.defaultTotalCount = defaultTotalCount; return this; } public Grouping setGetGroupedDocSet(boolean getGroupedDocSet) { this.getGroupedDocSet = getGroupedDocSet; return this; } public List<Command<?>> getCommands() { return commands; } public void execute() throws IOException { if (commands.isEmpty()) { throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Specify at least one field, function or query to group by."); } DocListAndSet out = new DocListAndSet(); qr.setDocListAndSet(out); SolrIndexSearcher.ProcessedFilter pf = searcher.getProcessedFilter(cmd.getFilterList()); final Query searchQuery = QueryUtils.combineQueryAndFilter(QueryUtils.makeQueryable(cmd.getQuery()), pf.filter); maxDoc = searcher.maxDoc(); needScores = (cmd.getFlags() & SolrIndexSearcher.GET_SCORES) != 0; boolean cacheScores = false; // NOTE: Change this when withinGroupSort can be specified per group if (!needScores && !commands.isEmpty()) { Sort withinGroupSort = commands.get(0).withinGroupSort; cacheScores = withinGroupSort == null || withinGroupSort.needsScores(); } else if (needScores) { cacheScores = needScores; } getDocSet = (cmd.getFlags() & SolrIndexSearcher.GET_DOCSET) != 0; getDocList = (cmd.getFlags() & SolrIndexSearcher.GET_DOCLIST) != 0; query = QueryUtils.makeQueryable(cmd.getQuery()); for (Command<?> cmd : commands) { cmd.prepare(); } AllGroupHeadsCollector<?> allGroupHeadsCollector = null; List<Collector> collectors = new ArrayList<>(commands.size()); for (Command<?> cmd : commands) { Collector collector = cmd.createFirstPassCollector(); if (collector != null) { collectors.add(collector); } if (getGroupedDocSet && allGroupHeadsCollector == null) { collectors.add(allGroupHeadsCollector = cmd.createAllGroupCollector()); } } DocSetCollector setCollector = null; if (getDocSet && allGroupHeadsCollector == null) { setCollector = new DocSetCollector(maxDoc); collectors.add(setCollector); } Collector allCollectors = MultiCollector.wrap(collectors); CachingCollector cachedCollector = null; if (cacheSecondPassSearch && allCollectors != null) { int maxDocsToCache = (int) Math.round(maxDoc * (maxDocsPercentageToCache / 100.0d)); // Only makes sense to cache if we cache more than zero. // Maybe we should have a minimum and a maximum, that defines the window we would like caching // for. if (maxDocsToCache > 0) { allCollectors = cachedCollector = CachingCollector.create(allCollectors, cacheScores, maxDocsToCache); } } if (pf.postFilter != null) { pf.postFilter.setLastDelegate(allCollectors); allCollectors = pf.postFilter; } if (allCollectors != null) { searchWithTimeLimiter(searchQuery, allCollectors); if (allCollectors instanceof DelegatingCollector) { ((DelegatingCollector) allCollectors).complete(); } } if (getGroupedDocSet && allGroupHeadsCollector != null) { qr.setDocSet(new BitDocSet(allGroupHeadsCollector.retrieveGroupHeads(maxDoc))); } else if (getDocSet) { qr.setDocSet(setCollector.getDocSet()); } collectors.clear(); for (Command<?> cmd : commands) { Collector collector = cmd.createSecondPassCollector(); if (collector != null) collectors.add(collector); } if (!collectors.isEmpty()) { Collector secondPhaseCollectors = MultiCollector.wrap(collectors.toArray(new Collector[0])); if (collectors.size() > 0) { if (cachedCollector != null) { if (cachedCollector.isCached()) { cachedCollector.replay(secondPhaseCollectors); } else { signalCacheWarning = true; log.warn( String.format( Locale.ROOT, "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent", maxDocsPercentageToCache)); log.warn("Please increase cache size or disable group caching."); searchWithTimeLimiter(searchQuery, secondPhaseCollectors); } } else { if (pf.postFilter != null) { pf.postFilter.setLastDelegate(secondPhaseCollectors); secondPhaseCollectors = pf.postFilter; } searchWithTimeLimiter(searchQuery, secondPhaseCollectors); } if (secondPhaseCollectors instanceof DelegatingCollector) { ((DelegatingCollector) secondPhaseCollectors).complete(); } } } for (Command<?> cmd : commands) { cmd.finish(); } qr.groupedResults = grouped; if (getDocList) { int sz = idSet.size(); int[] ids = new int[sz]; int idx = 0; for (int val : idSet) { ids[idx++] = val; } qr.setDocList( new DocSlice(0, sz, ids, null, maxMatches, maxScore, TotalHits.Relation.EQUAL_TO)); } } /** * Invokes search with the specified filter and collector. If a time limit has been specified, * wrap the collector in a TimeLimitingCollector */ private void searchWithTimeLimiter(final Query searchQuery, Collector collector) throws IOException { searcher.search(searchQuery, collector); } /** * Returns offset + len if len equals zero or higher. Otherwise returns max. * * @param offset The offset * @param len The number of documents to return * @param max The number of document to return if len &lt; 0 or if offset + len &gt; 0 * @return offset + len if len equals zero or higher. Otherwise returns max */ public static int getMax(int offset, int len, int max) { int v = len < 0 ? max : offset + len; if (v < 0 || v > max) v = max; return v; } /** * Returns whether a cache warning should be send to the client. The value <code>true</code> is * returned when the cache is emptied because the caching limits where met, otherwise <code>false * </code> is returned. * * @return whether a cache warning should be send to the client */ public boolean isSignalCacheWarning() { return signalCacheWarning; } // ===== Inner classes ===== public static enum Format { /** Grouped result. Each group has its own result set. */ grouped, /** Flat result. All documents of all groups are put in one list. */ simple } public static enum TotalCount { /** Computations should be based on groups. */ grouped, /** Computations should be based on plain documents, so not taking grouping into account. */ ungrouped } /** * General group command. A group command is responsible for creating the first and second pass * collectors. A group command is also responsible for creating the response structure. * * <p>Note: Maybe the creating the response structure should be done in something like a * ReponseBuilder??? Warning NOT thread save! */ public abstract class Command<T> { public String key; // the name to use for this group in the response public Sort withinGroupSort; // the sort of the documents *within* a single group. public Sort groupSort; // the sort between groups public int docsPerGroup; // how many docs in each group - from "group.limit" param, default=1 public int groupOffset; // the offset within each group (for paging within each group) public int numGroups; // how many groups - defaults to the "rows" parameter // How many groups should actually be found. Based on groupOffset and numGroups. int actualGroupsToFind; public int offset; // offset into the list of groups public Format format; public boolean main; // use as the main result in simple format (grouped.main=true param) public TotalCount totalCount = TotalCount.ungrouped; TopGroups<T> result; /** * Prepare this <code>Command</code> for execution. * * @throws IOException If I/O related errors occur */ protected abstract void prepare() throws IOException; /** * Returns one or more {@link Collector} instances that are needed to perform the first pass * search. If multiple Collectors are returned then these wrapped in a {@link * org.apache.lucene.search.MultiCollector}. * * @return one or more {@link Collector} instances that are need to perform the first pass * search * @throws IOException If I/O related errors occur */ protected abstract Collector createFirstPassCollector() throws IOException; /** * Returns zero or more {@link Collector} instances that are needed to perform the second pass * search. In the case when no {@link Collector} instances are created <code>null</code> is * returned. If multiple Collectors are returned then these wrapped in a {@link * org.apache.lucene.search.MultiCollector}. * * @return zero or more {@link Collector} instances that are needed to perform the second pass * search * @throws IOException If I/O related errors occur */ protected Collector createSecondPassCollector() throws IOException { return null; } /** * Returns a collector that is able to return the most relevant document of all groups. Returns * <code>null</code> if the command doesn't support this type of collector. * * @return a collector that is able to return the most relevant document of all groups. * @throws IOException If I/O related errors occur */ public AllGroupHeadsCollector<?> createAllGroupCollector() throws IOException { return null; } /** * Performs any necessary post actions to prepare the response. * * @throws IOException If I/O related errors occur */ protected abstract void finish() throws IOException; /** * Returns the number of matches for this <code>Command</code>. * * @return the number of matches for this <code>Command</code> */ public abstract int getMatches(); /** * Returns the number of groups found for this <code>Command</code>. If the command doesn't * support counting the groups <code>null</code> is returned. * * @return the number of groups found for this <code>Command</code> */ protected Integer getNumberOfGroups() { return null; } protected void populateScoresIfNecessary() throws IOException { if (needScores) { for (GroupDocs<?> groups : result.groups) { if (log.isDebugEnabled()) { log.debug("Group score is 1: {}", groups.maxScore()); } TopFieldCollector.populateScores(groups.scoreDocs(), searcher, query); } } } protected NamedList<Object> commonResponse() { NamedList<Object> groupResult = new SimpleOrderedMap<>(); grouped.add(key, groupResult); // grouped={ key={ int matches = getMatches(); groupResult.add("matches", matches); if (totalCount == TotalCount.grouped) { Integer totalNrOfGroups = getNumberOfGroups(); groupResult.add("ngroups", totalNrOfGroups == null ? Integer.valueOf(0) : totalNrOfGroups); } maxMatches = Math.max(maxMatches, matches); return groupResult; } protected DocList getDocList(GroupDocs<?> groups) { assert groups.totalHits().relation() == TotalHits.Relation.EQUAL_TO; int max = Math.toIntExact(groups.totalHits().value()); int off = groupOffset; int len = docsPerGroup; if (format == Format.simple) { off = offset; len = numGroups; } int docsToCollect = getMax(off, len, max); // TODO: implement a DocList impl that doesn't need to start at offset=0 int docsCollected = Math.min(docsToCollect, groups.scoreDocs().length); int ids[] = new int[docsCollected]; float[] scores = needScores ? new float[docsCollected] : null; for (int i = 0; i < ids.length; i++) { ids[i] = groups.scoreDocs()[i].doc; if (scores != null) scores[i] = groups.scoreDocs()[i].score; } float score = groups.maxScore(); maxScore = maxAvoidNaN(score, maxScore); if (log.isDebugEnabled()) { log.debug("Here, score is {} and maxScore is {}", score, maxScore); } DocSlice docs = new DocSlice( off, Math.max(0, ids.length - off), ids, scores, groups.totalHits().value(), score, TotalHits.Relation.EQUAL_TO); if (getDocList) { DocIterator iter = docs.iterator(); while (iter.hasNext()) idSet.add(iter.nextDoc()); } return docs; } protected void addDocList(NamedList<? super DocList> rsp, GroupDocs<?> groups) { rsp.add("doclist", getDocList(groups)); } // Flatten the groups and get up offset + rows documents protected DocList createSimpleResponse() { @SuppressWarnings("unchecked") GroupDocs<T>[] groups = result != null ? result.groups : (GroupDocs<T>[]) Array.newInstance(GroupDocs.class, 0); List<Integer> ids = new ArrayList<>(); List<Float> scores = new ArrayList<>(); int docsToGather = getMax(offset, numGroups, maxDoc); int docsGathered = 0; float maxScore = Float.NaN; outer: for (GroupDocs<T> group : groups) { maxScore = maxAvoidNaN(maxScore, group.maxScore()); for (ScoreDoc scoreDoc : group.scoreDocs()) { if (docsGathered >= docsToGather) { break outer; } ids.add(scoreDoc.doc); scores.add(scoreDoc.score); docsGathered++; } } int len = docsGathered > offset ? docsGathered - offset : 0; int[] docs = ids.stream().mapToInt(v -> v).toArray(); float[] docScores = new float[scores.size()]; // Convert scores Float to primitive docScores float for (int i = 0; i < scores.size(); i++) { docScores[i] = scores.get(i); } DocSlice docSlice = new DocSlice( offset, len, docs, docScores, getMatches(), maxScore, TotalHits.Relation.EQUAL_TO); if (getDocList) { for (int i = offset; i < docs.length; i++) { idSet.add(docs[i]); } } return docSlice; } } /** * Differs from {@link Math#max(float, float)} in that if only one side is NaN, we return the * other. */ private float maxAvoidNaN(float valA, float valB) { if (Float.isNaN(valA) || valB > valA) { return valB; } else { return valA; } } /** A group command for grouping on a field. */ public class CommandField extends Command<BytesRef> { public String groupBy; FirstPassGroupingCollector<BytesRef> firstPass; TopGroupsCollector<BytesRef> secondPass; AllGroupsCollector<BytesRef> allGroupsCollector; // If offset falls outside the number of documents a group can provide use this collector // instead of secondPass TotalHitCountCollector fallBackCollector; Collection<SearchGroup<BytesRef>> topGroups; @Override protected void prepare() throws IOException { actualGroupsToFind = getMax(offset, numGroups, maxDoc); } @Override protected Collector createFirstPassCollector() throws IOException { // Ok we don't want groups, but do want a total count if (actualGroupsToFind <= 0) { fallBackCollector = new TotalHitCountCollector(); return fallBackCollector; } groupSort = groupSort == null ? Sort.RELEVANCE : groupSort; firstPass = new FirstPassGroupingCollector<>( new TermGroupSelector(groupBy), searcher.weightSort(groupSort), actualGroupsToFind); return firstPass; } @Override protected Collector createSecondPassCollector() throws IOException { if (actualGroupsToFind <= 0) { allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy)); return totalCount == TotalCount.grouped ? allGroupsCollector : null; } topGroups = format == Format.grouped ? firstPass.getTopGroups(offset) : firstPass.getTopGroups(0); if (topGroups == null) { if (totalCount == TotalCount.grouped) { allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy)); fallBackCollector = new TotalHitCountCollector(); return MultiCollector.wrap(allGroupsCollector, fallBackCollector); } else { fallBackCollector = new TotalHitCountCollector(); return fallBackCollector; } } int groupedDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc); groupedDocsToCollect = Math.max(groupedDocsToCollect, 1); Sort withinGroupSort = this.withinGroupSort != null ? this.withinGroupSort : Sort.RELEVANCE; secondPass = new TopGroupsCollector<>( new TermGroupSelector(groupBy), topGroups, groupSort, withinGroupSort, groupedDocsToCollect, needScores); if (totalCount == TotalCount.grouped) { allGroupsCollector = new AllGroupsCollector<>(new TermGroupSelector(groupBy)); return MultiCollector.wrap(secondPass, allGroupsCollector); } else { return secondPass; } } @Override public AllGroupHeadsCollector<?> createAllGroupCollector() throws IOException { Sort sortWithinGroup = withinGroupSort != null ? withinGroupSort : Sort.RELEVANCE; return AllGroupHeadsCollector.newCollector(new TermGroupSelector(groupBy), sortWithinGroup); } @Override @SuppressWarnings({"unchecked"}) protected void finish() throws IOException { if (secondPass != null) { result = secondPass.getTopGroups(0); populateScoresIfNecessary(); } if (main) { mainResult = createSimpleResponse(); return; } NamedList<Object> groupResult = commonResponse(); if (format == Format.simple) { groupResult.add("doclist", createSimpleResponse()); return; } List<NamedList<Object>> groupList = new ArrayList<>(); groupResult.add("groups", groupList); // grouped={ key={ groups=[ if (result == null) { return; } // handle case of rows=0 if (numGroups == 0) return; for (GroupDocs<BytesRef> group : result.groups) { NamedList<Object> nl = new SimpleOrderedMap<>(); groupList.add(nl); // grouped={ key={ groups=[ { // To keep the response format compatable with trunk. // In trunk MutableValue can convert an indexed value to its native type. E.g. string to int // The only option I currently see is the use the FieldType for this if (group.groupValue() != null) { SchemaField schemaField = searcher.getSchema().getField(groupBy); FieldType fieldType = schemaField.getType(); // use createFields so that fields having doc values are also supported // TODO: currently, this path is called only for string field, so // should we just use fieldType.toObject(schemaField, group.groupValue) here? List<IndexableField> fields = schemaField.createFields(group.groupValue().utf8ToString()); if (fields != null && !fields.isEmpty()) { nl.add("groupValue", fieldType.toObject(fields.get(0))); } else { throw new SolrException( ErrorCode.INVALID_STATE, "Couldn't create schema field for grouping, group value: " + group.groupValue().utf8ToString() + ", field: " + schemaField); } } else { nl.add("groupValue", null); } addDocList(nl, group); } } @Override public int getMatches() { if (result == null && fallBackCollector == null) { return 0; } return result != null ? result.totalHitCount : fallBackCollector.getTotalHits(); } @Override protected Integer getNumberOfGroups() { return allGroupsCollector == null ? null : allGroupsCollector.getGroupCount(); } } /** A group command for grouping on a query. */ // NOTE: doesn't need to be generic. Maybe Command interface --> First / Second pass abstract // impl. public class CommandQuery extends Command<Object> { public Query query; TopDocsCollector<? extends ScoreDoc> topCollector; MaxScoreCollector maxScoreCollector; FilterCollector collector; @Override protected void prepare() throws IOException { actualGroupsToFind = getMax(offset, numGroups, maxDoc); } @Override protected Collector createFirstPassCollector() throws IOException { DocSet groupFilt = searcher.getDocSet(query); int groupDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc); Collector subCollector; if (withinGroupSort == null || withinGroupSort.equals(Sort.RELEVANCE)) { subCollector = topCollector = new TopScoreDocCollectorManager(groupDocsToCollect, Integer.MAX_VALUE) .newCollector(); } else { topCollector = new TopFieldCollectorManager( searcher.weightSort(withinGroupSort), groupDocsToCollect, Integer.MAX_VALUE) .newCollector(); if (needScores) { maxScoreCollector = new MaxScoreCollector(); subCollector = MultiCollector.wrap(topCollector, maxScoreCollector); } else { subCollector = topCollector; } } collector = new FilterCollector(groupFilt, subCollector); return collector; } @Override protected void finish() throws IOException { TopDocs topDocs = topCollector.topDocs(); float maxScore; if (withinGroupSort == null || withinGroupSort.equals(Sort.RELEVANCE)) { maxScore = topDocs.scoreDocs.length == 0 ? Float.NaN : topDocs.scoreDocs[0].score; } else if (needScores) { // use top-level query to populate the scores if (log.isDebugEnabled()) { log.debug("Group score is 2: {}", maxScoreCollector.getMaxScore()); } TopFieldCollector.populateScores(topDocs.scoreDocs, searcher, Grouping.this.query); maxScore = maxScoreCollector.getMaxScore(); } else { maxScore = Float.NaN; } GroupDocs<String> groupDocs = new GroupDocs<>( Float.NaN, maxScore, topDocs.totalHits, topDocs.scoreDocs, query.toString(), null); if (main) { mainResult = getDocList(groupDocs); } else { NamedList<Object> rsp = commonResponse(); addDocList(rsp, groupDocs); } } @Override public int getMatches() { return collector.getMatches(); } } /** A command for grouping on a function. */ public class CommandFunc extends Command<MutableValue> { public ValueSource groupBy; Map<Object, Object> context; private ValueSourceGroupSelector newSelector() { return new ValueSourceGroupSelector(groupBy, context); } FirstPassGroupingCollector<MutableValue> firstPass; TopGroupsCollector<MutableValue> secondPass; // If offset falls outside the number of documents a group can provide use this collector // instead of secondPass TotalHitCountCollector fallBackCollector; AllGroupsCollector<MutableValue> allGroupsCollector; Collection<SearchGroup<MutableValue>> topGroups; @Override @SuppressWarnings({"unchecked"}) protected void prepare() throws IOException { context = ValueSource.newContext(searcher); groupBy.createWeight(context, searcher); actualGroupsToFind = getMax(offset, numGroups, maxDoc); } @Override protected Collector createFirstPassCollector() throws IOException { // Ok we don't want groups, but do want a total count if (actualGroupsToFind <= 0) { fallBackCollector = new TotalHitCountCollector(); return fallBackCollector; } groupSort = groupSort == null ? Sort.RELEVANCE : groupSort; firstPass = new FirstPassGroupingCollector<>( newSelector(), searcher.weightSort(groupSort), actualGroupsToFind); return firstPass; } @Override protected Collector createSecondPassCollector() throws IOException { if (actualGroupsToFind <= 0) { allGroupsCollector = new AllGroupsCollector<>(newSelector()); return totalCount == TotalCount.grouped ? allGroupsCollector : null; } topGroups = format == Format.grouped ? firstPass.getTopGroups(offset) : firstPass.getTopGroups(0); if (topGroups == null) { if (totalCount == TotalCount.grouped) { allGroupsCollector = new AllGroupsCollector<>(newSelector()); fallBackCollector = new TotalHitCountCollector(); return MultiCollector.wrap(allGroupsCollector, fallBackCollector); } else { fallBackCollector = new TotalHitCountCollector(); return fallBackCollector; } } int groupdDocsToCollect = getMax(groupOffset, docsPerGroup, maxDoc); groupdDocsToCollect = Math.max(groupdDocsToCollect, 1); Sort withinGroupSort = this.withinGroupSort != null ? this.withinGroupSort : Sort.RELEVANCE; secondPass = new TopGroupsCollector<>( newSelector(), topGroups, groupSort, withinGroupSort, groupdDocsToCollect, needScores); if (totalCount == TotalCount.grouped) { allGroupsCollector = new AllGroupsCollector<>(newSelector()); return MultiCollector.wrap(secondPass, allGroupsCollector); } else { return secondPass; } } @Override public AllGroupHeadsCollector<?> createAllGroupCollector() throws IOException { Sort sortWithinGroup = withinGroupSort != null ? withinGroupSort : Sort.RELEVANCE; return AllGroupHeadsCollector.newCollector(newSelector(), sortWithinGroup); } @Override protected void finish() throws IOException { if (secondPass != null) { result = secondPass.getTopGroups(0); populateScoresIfNecessary(); } if (main) { mainResult = createSimpleResponse(); return; } NamedList<Object> groupResult = commonResponse(); if (format == Format.simple) { groupResult.add("doclist", createSimpleResponse()); return; } List<NamedList<Object>> groupList = new ArrayList<>(); groupResult.add("groups", groupList); // grouped={ key={ groups=[ if (result == null) { return; } // handle case of rows=0 if (numGroups == 0) return; for (GroupDocs<MutableValue> group : result.groups) { NamedList<Object> nl = new SimpleOrderedMap<>(); groupList.add(nl); // grouped={ key={ groups=[ { nl.add("groupValue", group.groupValue().toObject()); addDocList(nl, group); } } @Override public int getMatches() { if (result == null && fallBackCollector == null) { return 0; } return result != null ? result.totalHitCount : fallBackCollector.getTotalHits(); } @Override protected Integer getNumberOfGroups() { return allGroupsCollector == null ? null : allGroupsCollector.getGroupCount(); } } }
googleapis/google-cloud-java
37,292
java-filestore/proto-google-cloud-filestore-v1/src/main/java/com/google/cloud/filestore/v1/CreateSnapshotRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/filestore/v1/cloud_filestore_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.filestore.v1; /** * * * <pre> * CreateSnapshotRequest creates a snapshot. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.CreateSnapshotRequest} */ public final class CreateSnapshotRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.filestore.v1.CreateSnapshotRequest) CreateSnapshotRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateSnapshotRequest.newBuilder() to construct. private CreateSnapshotRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateSnapshotRequest() { parent_ = ""; snapshotId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateSnapshotRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateSnapshotRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.CreateSnapshotRequest.class, com.google.cloud.filestore.v1.CreateSnapshotRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SNAPSHOT_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object snapshotId_ = ""; /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The snapshotId. */ @java.lang.Override public java.lang.String getSnapshotId() { java.lang.Object ref = snapshotId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); snapshotId_ = s; return s; } } /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for snapshotId. */ @java.lang.Override public com.google.protobuf.ByteString getSnapshotIdBytes() { java.lang.Object ref = snapshotId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); snapshotId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int SNAPSHOT_FIELD_NUMBER = 3; private com.google.cloud.filestore.v1.Snapshot snapshot_; /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the snapshot field is set. */ @java.lang.Override public boolean hasSnapshot() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The snapshot. */ @java.lang.Override public com.google.cloud.filestore.v1.Snapshot getSnapshot() { return snapshot_ == null ? com.google.cloud.filestore.v1.Snapshot.getDefaultInstance() : snapshot_; } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.filestore.v1.SnapshotOrBuilder getSnapshotOrBuilder() { return snapshot_ == null ? com.google.cloud.filestore.v1.Snapshot.getDefaultInstance() : snapshot_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, snapshotId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getSnapshot()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(snapshotId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, snapshotId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getSnapshot()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.filestore.v1.CreateSnapshotRequest)) { return super.equals(obj); } com.google.cloud.filestore.v1.CreateSnapshotRequest other = (com.google.cloud.filestore.v1.CreateSnapshotRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getSnapshotId().equals(other.getSnapshotId())) return false; if (hasSnapshot() != other.hasSnapshot()) return false; if (hasSnapshot()) { if (!getSnapshot().equals(other.getSnapshot())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + SNAPSHOT_ID_FIELD_NUMBER; hash = (53 * hash) + getSnapshotId().hashCode(); if (hasSnapshot()) { hash = (37 * hash) + SNAPSHOT_FIELD_NUMBER; hash = (53 * hash) + getSnapshot().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.filestore.v1.CreateSnapshotRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * CreateSnapshotRequest creates a snapshot. * </pre> * * Protobuf type {@code google.cloud.filestore.v1.CreateSnapshotRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1.CreateSnapshotRequest) com.google.cloud.filestore.v1.CreateSnapshotRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateSnapshotRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateSnapshotRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.filestore.v1.CreateSnapshotRequest.class, com.google.cloud.filestore.v1.CreateSnapshotRequest.Builder.class); } // Construct using com.google.cloud.filestore.v1.CreateSnapshotRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSnapshotFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; snapshotId_ = ""; snapshot_ = null; if (snapshotBuilder_ != null) { snapshotBuilder_.dispose(); snapshotBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.filestore.v1.CloudFilestoreServiceProto .internal_static_google_cloud_filestore_v1_CreateSnapshotRequest_descriptor; } @java.lang.Override public com.google.cloud.filestore.v1.CreateSnapshotRequest getDefaultInstanceForType() { return com.google.cloud.filestore.v1.CreateSnapshotRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.filestore.v1.CreateSnapshotRequest build() { com.google.cloud.filestore.v1.CreateSnapshotRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.filestore.v1.CreateSnapshotRequest buildPartial() { com.google.cloud.filestore.v1.CreateSnapshotRequest result = new com.google.cloud.filestore.v1.CreateSnapshotRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.filestore.v1.CreateSnapshotRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.snapshotId_ = snapshotId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.snapshot_ = snapshotBuilder_ == null ? snapshot_ : snapshotBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.filestore.v1.CreateSnapshotRequest) { return mergeFrom((com.google.cloud.filestore.v1.CreateSnapshotRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.filestore.v1.CreateSnapshotRequest other) { if (other == com.google.cloud.filestore.v1.CreateSnapshotRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getSnapshotId().isEmpty()) { snapshotId_ = other.snapshotId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasSnapshot()) { mergeSnapshot(other.getSnapshot()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { snapshotId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getSnapshotFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The Filestore Instance to create the snapshots of, in the format * `projects/{project_id}/locations/{location}/instances/{instance_id}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object snapshotId_ = ""; /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The snapshotId. */ public java.lang.String getSnapshotId() { java.lang.Object ref = snapshotId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); snapshotId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for snapshotId. */ public com.google.protobuf.ByteString getSnapshotIdBytes() { java.lang.Object ref = snapshotId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); snapshotId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The snapshotId to set. * @return This builder for chaining. */ public Builder setSnapshotId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } snapshotId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearSnapshotId() { snapshotId_ = getDefaultInstance().getSnapshotId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The ID to use for the snapshot. * The ID must be unique within the specified instance. * * This value must start with a lowercase letter followed by up to 62 * lowercase letters, numbers, or hyphens, and cannot end with a hyphen. * </pre> * * <code>string snapshot_id = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for snapshotId to set. * @return This builder for chaining. */ public Builder setSnapshotIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); snapshotId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.filestore.v1.Snapshot snapshot_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Snapshot, com.google.cloud.filestore.v1.Snapshot.Builder, com.google.cloud.filestore.v1.SnapshotOrBuilder> snapshotBuilder_; /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the snapshot field is set. */ public boolean hasSnapshot() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The snapshot. */ public com.google.cloud.filestore.v1.Snapshot getSnapshot() { if (snapshotBuilder_ == null) { return snapshot_ == null ? com.google.cloud.filestore.v1.Snapshot.getDefaultInstance() : snapshot_; } else { return snapshotBuilder_.getMessage(); } } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSnapshot(com.google.cloud.filestore.v1.Snapshot value) { if (snapshotBuilder_ == null) { if (value == null) { throw new NullPointerException(); } snapshot_ = value; } else { snapshotBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSnapshot(com.google.cloud.filestore.v1.Snapshot.Builder builderForValue) { if (snapshotBuilder_ == null) { snapshot_ = builderForValue.build(); } else { snapshotBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSnapshot(com.google.cloud.filestore.v1.Snapshot value) { if (snapshotBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && snapshot_ != null && snapshot_ != com.google.cloud.filestore.v1.Snapshot.getDefaultInstance()) { getSnapshotBuilder().mergeFrom(value); } else { snapshot_ = value; } } else { snapshotBuilder_.mergeFrom(value); } if (snapshot_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSnapshot() { bitField0_ = (bitField0_ & ~0x00000004); snapshot_ = null; if (snapshotBuilder_ != null) { snapshotBuilder_.dispose(); snapshotBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.filestore.v1.Snapshot.Builder getSnapshotBuilder() { bitField0_ |= 0x00000004; onChanged(); return getSnapshotFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.filestore.v1.SnapshotOrBuilder getSnapshotOrBuilder() { if (snapshotBuilder_ != null) { return snapshotBuilder_.getMessageOrBuilder(); } else { return snapshot_ == null ? com.google.cloud.filestore.v1.Snapshot.getDefaultInstance() : snapshot_; } } /** * * * <pre> * Required. A snapshot resource. * </pre> * * <code> * .google.cloud.filestore.v1.Snapshot snapshot = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Snapshot, com.google.cloud.filestore.v1.Snapshot.Builder, com.google.cloud.filestore.v1.SnapshotOrBuilder> getSnapshotFieldBuilder() { if (snapshotBuilder_ == null) { snapshotBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.filestore.v1.Snapshot, com.google.cloud.filestore.v1.Snapshot.Builder, com.google.cloud.filestore.v1.SnapshotOrBuilder>( getSnapshot(), getParentForChildren(), isClean()); snapshot_ = null; } return snapshotBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1.CreateSnapshotRequest) } // @@protoc_insertion_point(class_scope:google.cloud.filestore.v1.CreateSnapshotRequest) private static final com.google.cloud.filestore.v1.CreateSnapshotRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.filestore.v1.CreateSnapshotRequest(); } public static com.google.cloud.filestore.v1.CreateSnapshotRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateSnapshotRequest> PARSER = new com.google.protobuf.AbstractParser<CreateSnapshotRequest>() { @java.lang.Override public CreateSnapshotRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateSnapshotRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateSnapshotRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.filestore.v1.CreateSnapshotRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
37,180
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/WarehouseBasedDeliveryTime.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1beta/shippingsettings.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1beta; /** * * * <pre> * Indicates that the delivery time should be calculated per warehouse * (shipping origin location) based on the settings of the selected carrier. * When set, no other transit time related field in `delivery_time` should be * set. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime} */ public final class WarehouseBasedDeliveryTime extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) WarehouseBasedDeliveryTimeOrBuilder { private static final long serialVersionUID = 0L; // Use WarehouseBasedDeliveryTime.newBuilder() to construct. private WarehouseBasedDeliveryTime(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private WarehouseBasedDeliveryTime() { carrier_ = ""; carrierService_ = ""; warehouse_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new WarehouseBasedDeliveryTime(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_WarehouseBasedDeliveryTime_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_WarehouseBasedDeliveryTime_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime.class, com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime.Builder.class); } private int bitField0_; public static final int CARRIER_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object carrier_ = ""; /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the carrier field is set. */ @java.lang.Override public boolean hasCarrier() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The carrier. */ @java.lang.Override public java.lang.String getCarrier() { java.lang.Object ref = carrier_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); carrier_ = s; return s; } } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for carrier. */ @java.lang.Override public com.google.protobuf.ByteString getCarrierBytes() { java.lang.Object ref = carrier_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); carrier_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CARRIER_SERVICE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object carrierService_ = ""; /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the carrierService field is set. */ @java.lang.Override public boolean hasCarrierService() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The carrierService. */ @java.lang.Override public java.lang.String getCarrierService() { java.lang.Object ref = carrierService_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); carrierService_ = s; return s; } } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for carrierService. */ @java.lang.Override public com.google.protobuf.ByteString getCarrierServiceBytes() { java.lang.Object ref = carrierService_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); carrierService_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int WAREHOUSE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object warehouse_ = ""; /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the warehouse field is set. */ @java.lang.Override public boolean hasWarehouse() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The warehouse. */ @java.lang.Override public java.lang.String getWarehouse() { java.lang.Object ref = warehouse_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); warehouse_ = s; return s; } } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for warehouse. */ @java.lang.Override public com.google.protobuf.ByteString getWarehouseBytes() { java.lang.Object ref = warehouse_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); warehouse_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, carrier_); } if (((bitField0_ & 0x00000002) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, carrierService_); } if (((bitField0_ & 0x00000004) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, warehouse_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, carrier_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, carrierService_); } if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, warehouse_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime other = (com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) obj; if (hasCarrier() != other.hasCarrier()) return false; if (hasCarrier()) { if (!getCarrier().equals(other.getCarrier())) return false; } if (hasCarrierService() != other.hasCarrierService()) return false; if (hasCarrierService()) { if (!getCarrierService().equals(other.getCarrierService())) return false; } if (hasWarehouse() != other.hasWarehouse()) return false; if (hasWarehouse()) { if (!getWarehouse().equals(other.getWarehouse())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasCarrier()) { hash = (37 * hash) + CARRIER_FIELD_NUMBER; hash = (53 * hash) + getCarrier().hashCode(); } if (hasCarrierService()) { hash = (37 * hash) + CARRIER_SERVICE_FIELD_NUMBER; hash = (53 * hash) + getCarrierService().hashCode(); } if (hasWarehouse()) { hash = (37 * hash) + WAREHOUSE_FIELD_NUMBER; hash = (53 * hash) + getWarehouse().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Indicates that the delivery time should be calculated per warehouse * (shipping origin location) based on the settings of the selected carrier. * When set, no other transit time related field in `delivery_time` should be * set. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTimeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_WarehouseBasedDeliveryTime_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_WarehouseBasedDeliveryTime_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime.class, com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime.Builder .class); } // Construct using // com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; carrier_ = ""; carrierService_ = ""; warehouse_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1beta.ShippingSettingsProto .internal_static_google_shopping_merchant_accounts_v1beta_WarehouseBasedDeliveryTime_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime build() { com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime buildPartial() { com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime result = new com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.carrier_ = carrier_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.carrierService_ = carrierService_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.warehouse_ = warehouse_; to_bitField0_ |= 0x00000004; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) { return mergeFrom( (com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime other) { if (other == com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime .getDefaultInstance()) return this; if (other.hasCarrier()) { carrier_ = other.carrier_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCarrierService()) { carrierService_ = other.carrierService_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasWarehouse()) { warehouse_ = other.warehouse_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { carrier_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { carrierService_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { warehouse_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object carrier_ = ""; /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the carrier field is set. */ public boolean hasCarrier() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The carrier. */ public java.lang.String getCarrier() { java.lang.Object ref = carrier_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); carrier_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for carrier. */ public com.google.protobuf.ByteString getCarrierBytes() { java.lang.Object ref = carrier_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); carrier_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The carrier to set. * @return This builder for chaining. */ public Builder setCarrier(java.lang.String value) { if (value == null) { throw new NullPointerException(); } carrier_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearCarrier() { carrier_ = getDefaultInstance().getCarrier(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Carrier, such as `"UPS"` or `"Fedex"`. * </pre> * * <code>optional string carrier = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for carrier to set. * @return This builder for chaining. */ public Builder setCarrierBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); carrier_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object carrierService_ = ""; /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the carrierService field is set. */ public boolean hasCarrierService() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The carrierService. */ public java.lang.String getCarrierService() { java.lang.Object ref = carrierService_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); carrierService_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for carrierService. */ public com.google.protobuf.ByteString getCarrierServiceBytes() { java.lang.Object ref = carrierService_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); carrierService_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The carrierService to set. * @return This builder for chaining. */ public Builder setCarrierService(java.lang.String value) { if (value == null) { throw new NullPointerException(); } carrierService_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearCarrierService() { carrierService_ = getDefaultInstance().getCarrierService(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. Carrier service, such as `"ground"` or `"2 days"`. The name of * the service must be in the eddSupportedServices list. * </pre> * * <code>optional string carrier_service = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for carrierService to set. * @return This builder for chaining. */ public Builder setCarrierServiceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); carrierService_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object warehouse_ = ""; /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the warehouse field is set. */ public boolean hasWarehouse() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The warehouse. */ public java.lang.String getWarehouse() { java.lang.Object ref = warehouse_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); warehouse_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for warehouse. */ public com.google.protobuf.ByteString getWarehouseBytes() { java.lang.Object ref = warehouse_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); warehouse_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The warehouse to set. * @return This builder for chaining. */ public Builder setWarehouse(java.lang.String value) { if (value == null) { throw new NullPointerException(); } warehouse_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearWarehouse() { warehouse_ = getDefaultInstance().getWarehouse(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. Warehouse name. This should match * [warehouse][ShippingSetting.warehouses.name] * </pre> * * <code>optional string warehouse = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for warehouse to set. * @return This builder for chaining. */ public Builder setWarehouseBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); warehouse_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime) private static final com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime(); } public static com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<WarehouseBasedDeliveryTime> PARSER = new com.google.protobuf.AbstractParser<WarehouseBasedDeliveryTime>() { @java.lang.Override public WarehouseBasedDeliveryTime parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<WarehouseBasedDeliveryTime> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<WarehouseBasedDeliveryTime> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1beta.WarehouseBasedDeliveryTime getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
37,393
jdk/src/share/classes/sun/font/ExtendedTextSourceLabel.java
/* * Copyright (c) 1998, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * * (C) Copyright IBM Corp. 1998-2003 - All Rights Reserved */ package sun.font; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.Shape; import java.awt.font.FontRenderContext; import java.awt.font.GlyphJustificationInfo; import java.awt.font.GlyphMetrics; import java.awt.font.LineMetrics; import java.awt.font.TextAttribute; import java.awt.geom.AffineTransform; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.util.Map; /** * Default implementation of ExtendedTextLabel. */ // {jbr} I made this class package-private to keep the // Decoration.Label API package-private. /* public */ class ExtendedTextSourceLabel extends ExtendedTextLabel implements Decoration.Label { TextSource source; private Decoration decorator; // caches private Font font; private AffineTransform baseTX; private CoreMetrics cm; Rectangle2D lb; Rectangle2D ab; Rectangle2D vb; Rectangle2D ib; StandardGlyphVector gv; float[] charinfo; /** * Create from a TextSource. */ public ExtendedTextSourceLabel(TextSource source, Decoration decorator) { this.source = source; this.decorator = decorator; finishInit(); } /** * Create from a TextSource, optionally using cached data from oldLabel starting at the offset. * If present oldLabel must have been created from a run of text that includes the text used in * the new label. Start in source corresponds to logical character offset in oldLabel. */ public ExtendedTextSourceLabel(TextSource source, ExtendedTextSourceLabel oldLabel, int offset) { // currently no optimization. this.source = source; this.decorator = oldLabel.decorator; finishInit(); } private void finishInit() { font = source.getFont(); Map<TextAttribute, ?> atts = font.getAttributes(); baseTX = AttributeValues.getBaselineTransform(atts); if (baseTX == null){ cm = source.getCoreMetrics(); } else { AffineTransform charTX = AttributeValues.getCharTransform(atts); if (charTX == null) { charTX = new AffineTransform(); } font = font.deriveFont(charTX); LineMetrics lm = font.getLineMetrics(source.getChars(), source.getStart(), source.getStart() + source.getLength(), source.getFRC()); cm = CoreMetrics.get(lm); } } // TextLabel API public Rectangle2D getLogicalBounds() { return getLogicalBounds(0, 0); } public Rectangle2D getLogicalBounds(float x, float y) { if (lb == null) { lb = createLogicalBounds(); } return new Rectangle2D.Float((float)(lb.getX() + x), (float)(lb.getY() + y), (float)lb.getWidth(), (float)lb.getHeight()); } public float getAdvance() { if (lb == null) { lb = createLogicalBounds(); } return (float)lb.getWidth(); } public Rectangle2D getVisualBounds(float x, float y) { if (vb == null) { vb = decorator.getVisualBounds(this); } return new Rectangle2D.Float((float)(vb.getX() + x), (float)(vb.getY() + y), (float)vb.getWidth(), (float)vb.getHeight()); } public Rectangle2D getAlignBounds(float x, float y) { if (ab == null) { ab = createAlignBounds(); } return new Rectangle2D.Float((float)(ab.getX() + x), (float)(ab.getY() + y), (float)ab.getWidth(), (float)ab.getHeight()); } public Rectangle2D getItalicBounds(float x, float y) { if (ib == null) { ib = createItalicBounds(); } return new Rectangle2D.Float((float)(ib.getX() + x), (float)(ib.getY() + y), (float)ib.getWidth(), (float)ib.getHeight()); } public Rectangle getPixelBounds(FontRenderContext frc, float x, float y) { return getGV().getPixelBounds(frc, x, y); } public boolean isSimple() { return decorator == Decoration.getPlainDecoration() && baseTX == null; } public AffineTransform getBaselineTransform() { return baseTX; // passing internal object, caller must not modify! } public Shape handleGetOutline(float x, float y) { return getGV().getOutline(x, y); } public Shape getOutline(float x, float y) { return decorator.getOutline(this, x, y); } public void handleDraw(Graphics2D g, float x, float y) { g.drawGlyphVector(getGV(), x, y); } public void draw(Graphics2D g, float x, float y) { decorator.drawTextAndDecorations(this, g, x, y); } /** * The logical bounds extends from the origin of the glyphvector to the * position at which a following glyphvector's origin should be placed. * We always assume glyph vectors are rendered from left to right, so * the origin is always to the left. * <p> On a left-to-right run, combining marks and 'ligatured away' * characters are to the right of their base characters. The charinfo * array will record the character positions for these 'missing' characters * as being at the origin+advance of the base glyph, with zero advance. * (This is not necessarily the same as the glyph position, for example, * an umlaut glyph may have a position to the left of this point, it depends * on whether the font was designed so that such glyphs overhang to the left * of their origin, or whether it presumes some kind of kerning to position * the glyphs). Anyway, the left of the bounds is the origin of the first * logical (leftmost) character, and the right is the origin + advance of the * last logical (rightmost) character. * <p> On a right-to-left run, these special characters are to the left * of their base characters. Again, since 'glyph position' has been abstracted * away, we can use the origin of the leftmost character, and the origin + * advance of the rightmost character. * <p> On a mixed run (hindi) we can't rely on the first logical character * being the leftmost character. However we can again rely on the leftmost * character origin and the rightmost character + advance. */ protected Rectangle2D createLogicalBounds() { return getGV().getLogicalBounds(); } public Rectangle2D handleGetVisualBounds() { return getGV().getVisualBounds(); } /** * Like createLogicalBounds except ignore leading and logically trailing white space. * this assumes logically trailing whitespace is also visually trailing. * Whitespace is anything that has a zero visual width, regardless of its advance. * <p> We make the same simplifying assumptions as in createLogicalBounds, namely * that we can rely on the charinfo to shield us from any glyph positioning oddities * in the font that place the glyph for a character at other than the pos + advance * of the character to its left. So we no longer need to skip chars with zero * advance, as their bounds (right and left) are already correct. */ protected Rectangle2D createAlignBounds() { float[] info = getCharinfo(); float al = 0f; float at = -cm.ascent; float aw = 0f; float ah = cm.ascent + cm.descent; if (charinfo == null || charinfo.length == 0) { return new Rectangle2D.Float(al, at, aw, ah); } boolean lineIsLTR = (source.getLayoutFlags() & 0x8) == 0; int rn = info.length - numvals; if (lineIsLTR) { while (rn > 0 && info[rn+visw] == 0) { rn -= numvals; } } if (rn >= 0) { int ln = 0; while (ln < rn && ((info[ln+advx] == 0) || (!lineIsLTR && info[ln+visw] == 0))) { ln += numvals; } al = Math.max(0f, info[ln+posx]); aw = info[rn+posx] + info[rn+advx] - al; } /* boolean lineIsLTR = source.lineIsLTR(); int rn = info.length - numvals; while (rn > 0 && ((info[rn+advx] == 0) || (lineIsLTR && info[rn+visw] == 0))) { rn -= numvals; } if (rn >= 0) { int ln = 0; while (ln < rn && ((info[ln+advx] == 0) || (!lineIsLTR && info[ln+visw] == 0))) { ln += numvals; } al = Math.max(0f, info[ln+posx]); aw = info[rn+posx] + info[rn+advx] - al; } */ return new Rectangle2D.Float(al, at, aw, ah); } public Rectangle2D createItalicBounds() { float ia = cm.italicAngle; Rectangle2D lb = getLogicalBounds(); float l = (float)lb.getMinX(); float t = -cm.ascent; float r = (float)lb.getMaxX(); float b = cm.descent; if (ia != 0) { if (ia > 0) { l -= ia * (b - cm.ssOffset); r -= ia * (t - cm.ssOffset); } else { l -= ia * (t - cm.ssOffset); r -= ia * (b - cm.ssOffset); } } return new Rectangle2D.Float(l, t, r - l, b - t); } private final StandardGlyphVector getGV() { if (gv == null) { gv = createGV(); } return gv; } protected StandardGlyphVector createGV() { FontRenderContext frc = source.getFRC(); int flags = source.getLayoutFlags(); char[] context = source.getChars(); int start = source.getStart(); int length = source.getLength(); GlyphLayout gl = GlyphLayout.get(null); // !!! no custom layout engines gv = gl.layout(font, frc, context, start, length, flags, null); // ??? use textsource GlyphLayout.done(gl); return gv; } // ExtendedTextLabel API private static final int posx = 0, posy = 1, advx = 2, advy = 3, visx = 4, visy = 5, visw = 6, vish = 7; private static final int numvals = 8; public int getNumCharacters() { return source.getLength(); } public CoreMetrics getCoreMetrics() { return cm; } public float getCharX(int index) { validate(index); float[] charinfo = getCharinfo(); int idx = l2v(index) * numvals + posx; if (charinfo == null || idx >= charinfo.length) { return 0f; } else { return charinfo[idx]; } } public float getCharY(int index) { validate(index); float[] charinfo = getCharinfo(); int idx = l2v(index) * numvals + posy; if (charinfo == null || idx >= charinfo.length) { return 0f; } else { return charinfo[idx]; } } public float getCharAdvance(int index) { validate(index); float[] charinfo = getCharinfo(); int idx = l2v(index) * numvals + advx; if (charinfo == null || idx >= charinfo.length) { return 0f; } else { return charinfo[idx]; } } public Rectangle2D handleGetCharVisualBounds(int index) { validate(index); float[] charinfo = getCharinfo(); index = l2v(index) * numvals; if (charinfo == null || (index+vish) >= charinfo.length) { return new Rectangle2D.Float(); } return new Rectangle2D.Float( charinfo[index + visx], charinfo[index + visy], charinfo[index + visw], charinfo[index + vish]); } public Rectangle2D getCharVisualBounds(int index, float x, float y) { Rectangle2D bounds = decorator.getCharVisualBounds(this, index); if (x != 0 || y != 0) { bounds.setRect(bounds.getX()+x, bounds.getY()+y, bounds.getWidth(), bounds.getHeight()); } return bounds; } private void validate(int index) { if (index < 0) { throw new IllegalArgumentException("index " + index + " < 0"); } else if (index >= source.getLength()) { throw new IllegalArgumentException("index " + index + " < " + source.getLength()); } } /* public int hitTestChar(float x, float y) { // !!! return index of char hit, for swing // result is negative for trailing-edge hits // no italics so no problem at margins. // for now, ignore y since we assume horizontal text // find non-combining char origin to right of x float[] charinfo = getCharinfo(); int n = 0; int e = source.getLength(); while (n < e && charinfo[n + advx] != 0 && charinfo[n + posx] > x) { n += numvals; } float rightx = n < e ? charinfo[n+posx] : charinfo[e - numvals + posx] + charinfo[e - numvals + advx]; // find non-combining char to left of that char n -= numvals; while (n >= 0 && charinfo[n+advx] == 0) { n -= numvals; } float leftx = n >= 0 ? charinfo[n+posx] : 0; float lefta = n >= 0 ? charinfo[n+advx] : 0; n /= numvals; boolean left = true; if (x < leftx + lefta / 2f) { // left of prev char } else if (x < (leftx + lefta + rightx) / 2f) { // right of prev char left = false; } else { // left of follow char n += 1; } if ((source.getLayoutFlags() & 0x1) != 0) { n = getNumCharacters() - 1 - n; left = !left; } return left ? n : -n; } */ public int logicalToVisual(int logicalIndex) { validate(logicalIndex); return l2v(logicalIndex); } public int visualToLogical(int visualIndex) { validate(visualIndex); return v2l(visualIndex); } public int getLineBreakIndex(int start, float width) { float[] charinfo = getCharinfo(); int length = source.getLength(); --start; while (width >= 0 && ++start < length) { int cidx = l2v(start) * numvals + advx; if (cidx >= charinfo.length) { break; // layout bailed for some reason } float adv = charinfo[cidx]; width -= adv; } return start; } public float getAdvanceBetween(int start, int limit) { float a = 0f; float[] charinfo = getCharinfo(); --start; while (++start < limit) { int cidx = l2v(start) * numvals + advx; if (cidx >= charinfo.length) { break; // layout bailed for some reason } a += charinfo[cidx]; } return a; } public boolean caretAtOffsetIsValid(int offset) { // REMIND: improve this implementation // Ligature formation can either be done in logical order, // with the ligature glyph logically preceding the null // chars; or in visual order, with the ligature glyph to // the left of the null chars. This method's implementation // must reflect which strategy is used. if (offset == 0 || offset == source.getLength()) { return true; } char c = source.getChars()[source.getStart() + offset]; if (c == '\t' || c == '\n' || c == '\r') { // hack return true; } int v = l2v(offset); // If ligatures are always to the left, do this stuff: //if (!(source.getLayoutFlags() & 0x1) == 0) { // v += 1; // if (v == source.getLength()) { // return true; // } //} int idx = v * numvals + advx; float[] charinfo = getCharinfo(); if (charinfo == null || idx >= charinfo.length) { return false; } else { return charinfo[idx] != 0; } } private final float[] getCharinfo() { if (charinfo == null) { charinfo = createCharinfo(); } return charinfo; } /* * This takes the glyph info record obtained from the glyph vector and converts it into a similar record * adjusted to represent character data instead. For economy we don't use glyph info records in this processing. * * Here are some constraints: * - there can be more glyphs than characters (glyph insertion, perhaps based on normalization, has taken place) * - there can not be fewer glyphs than characters (0xffff glyphs are inserted for characters ligaturized away) * - each glyph maps to a single character, when multiple glyphs exist for a character they all map to it, but * no two characters map to the same glyph * - multiple glyphs mapping to the same character need not be in sequence (thai, tamil have split characters) * - glyphs may be arbitrarily reordered (Indic reorders glyphs) * - all glyphs share the same bidi level * - all glyphs share the same horizontal (or vertical) baseline * - combining marks visually follow their base character in the glyph array-- i.e. in an rtl gv they are * to the left of their base character-- and have zero advance. * * The output maps this to character positions, and therefore caret positions, via the following assumptions: * - zero-advance glyphs do not contribute to the advance of their character (i.e. position is ignored), conversely * if a glyph is to contribute to the advance of its character it must have a non-zero (float) advance * - no carets can appear between a zero width character and its preceding character, where 'preceding' is * defined logically. * - no carets can appear within a split character * - no carets can appear within a local reordering (i.e. Indic reordering, or non-adjacent split characters) * - all characters lie on the same baseline, and it is either horizontal or vertical * - the charinfo is in uniform ltr or rtl order (visual order), since local reorderings and split characters are removed * * The algorithm works in the following way: * 1) we scan the glyphs ltr or rtl based on the bidi run direction * 2) we can work in place, since we always consume a glyph for each char we write * a) if the line is ltr, we start writing at position 0 until we finish, there may be leftver space * b) if the line is rtl and 1-1, we start writing at position numChars/glyphs - 1 until we finish at 0 * c) otherwise if we don't finish at 0, we have to copy the data down * 3) we consume clusters in the following way: * a) the first element is always consumed * b) subsequent elements are consumed if: * i) their advance is zero * ii) their character index <= the character index of any character seen in this cluster * iii) the minimum character index seen in this cluster isn't adjacent to the previous cluster * c) character data is written as follows for horizontal lines (x/y and w/h are exchanged on vertical lines) * i) the x position is the position of the leftmost glyph whose advance is not zero * ii)the y position is the baseline * iii) the x advance is the distance to the maximum x + adv of all glyphs whose advance is not zero * iv) the y advance is the baseline * v) vis x,y,w,h tightly encloses the vis x,y,w,h of all the glyphs with nonzero w and h * 4) we can make some simple optimizations if we know some things: * a) if the mapping is 1-1, unidirectional, and there are no zero-adv glyphs, we just return the glyphinfo * b) if the mapping is 1-1, unidirectional, we just adjust the remaining glyphs to originate at right/left of the base * c) if the mapping is 1-1, we compute the base position and advance as we go, then go back to adjust the remaining glyphs * d) otherwise we keep separate track of the write position as we do (c) since no glyph in the cluster may be in the * position we are writing. * e) most clusters are simply the single base glyph in the same position as its character, so we try to avoid * copying its data unnecessarily. * 5) the glyph vector ought to provide access to these 'global' attributes to enable these optimizations. A single * int with flags set is probably ok, we could also provide accessors for each attribute. This doesn't map to * the GlyphMetrics flags very well, so I won't attempt to keep them similar. It might be useful to add those * in addition to these. * int FLAG_HAS_ZERO_ADVANCE_GLYPHS = 1; // set if there are zero-advance glyphs * int FLAG_HAS_NONUNIFORM_ORDER = 2; // set if some glyphs are rearranged out of character visual order * int FLAG_HAS_SPLIT_CHARACTERS = 4; // set if multiple glyphs per character * int getDescriptionFlags(); // return an int containing the above flags * boolean hasZeroAdvanceGlyphs(); * boolean hasNonuniformOrder(); * boolean hasSplitCharacters(); * The optimized cases in (4) correspond to values 0, 1, 3, and 7 returned by getDescriptionFlags(). */ protected float[] createCharinfo() { StandardGlyphVector gv = getGV(); float[] glyphinfo = null; try { glyphinfo = gv.getGlyphInfo(); } catch (Exception e) { System.out.println(source); } /* if ((gv.getDescriptionFlags() & 0x7) == 0) { return glyphinfo; } */ int numGlyphs = gv.getNumGlyphs(); if (numGlyphs == 0) { return glyphinfo; } int[] indices = gv.getGlyphCharIndices(0, numGlyphs, null); boolean DEBUG = false; if (DEBUG) { System.err.println("number of glyphs: " + numGlyphs); for (int i = 0; i < numGlyphs; ++i) { System.err.println("g: " + i + ", x: " + glyphinfo[i*numvals+posx] + ", a: " + glyphinfo[i*numvals+advx] + ", n: " + indices[i]); } } int minIndex = indices[0]; // smallest index seen this cluster int maxIndex = minIndex; // largest index seen this cluster int nextMin = 0; // expected smallest index for this cluster int cp = 0; // character position int cx = 0; // character index (logical) int gp = 0; // glyph position int gx = 0; // glyph index (visual) int gxlimit = numGlyphs; // limit of gx, when we reach this we're done int pdelta = numvals; // delta for incrementing positions int xdelta = 1; // delta for incrementing indices boolean ltr = (source.getLayoutFlags() & 0x1) == 0; if (!ltr) { minIndex = indices[numGlyphs - 1]; maxIndex = minIndex; nextMin = 0; // still logical cp = glyphinfo.length - numvals; cx = 0; // still logical gp = glyphinfo.length - numvals; gx = numGlyphs - 1; gxlimit = -1; pdelta = -numvals; xdelta = -1; } /* // to support vertical, use 'ixxxx' indices and swap horiz and vertical components if (source.isVertical()) { iposx = posy; iposy = posx; iadvx = advy; iadvy = advx; ivisx = visy; ivisy = visx; ivish = visw; ivisw = vish; } else { // use standard values } */ // use intermediates to reduce array access when we need to float cposl = 0, cposr = 0, cvisl = 0, cvist = 0, cvisr = 0, cvisb = 0; float baseline = 0; // record if we have to copy data even when no cluster boolean mustCopy = false; while (gx != gxlimit) { // start of new cluster boolean haveCopy = false; int clusterExtraGlyphs = 0; minIndex = indices[gx]; maxIndex = minIndex; // advance to next glyph gx += xdelta; gp += pdelta; /* while (gx != gxlimit && (glyphinfo[gp + advx] == 0 || minIndex != nextMin || indices[gx] <= maxIndex)) { */ while (gx != gxlimit && ((glyphinfo[gp + advx] == 0) || (minIndex != nextMin) || (indices[gx] <= maxIndex) || (maxIndex - minIndex > clusterExtraGlyphs))) { // initialize base data first time through, using base glyph if (!haveCopy) { int gps = gp - pdelta; cposl = glyphinfo[gps + posx]; cposr = cposl + glyphinfo[gps + advx]; cvisl = glyphinfo[gps + visx]; cvist = glyphinfo[gps + visy]; cvisr = cvisl + glyphinfo[gps + visw]; cvisb = cvist + glyphinfo[gps + vish]; haveCopy = true; } // have an extra glyph in this cluster ++clusterExtraGlyphs; // adjust advance only if new glyph has non-zero advance float radvx = glyphinfo[gp + advx]; if (radvx != 0) { float rposx = glyphinfo[gp + posx]; cposl = Math.min(cposl, rposx); cposr = Math.max(cposr, rposx + radvx); } // adjust visible bounds only if new glyph has non-empty bounds float rvisw = glyphinfo[gp + visw]; if (rvisw != 0) { float rvisx = glyphinfo[gp + visx]; float rvisy = glyphinfo[gp + visy]; cvisl = Math.min(cvisl, rvisx); cvist = Math.min(cvist, rvisy); cvisr = Math.max(cvisr, rvisx + rvisw); cvisb = Math.max(cvisb, rvisy + glyphinfo[gp + vish]); } // adjust min, max index minIndex = Math.min(minIndex, indices[gx]); maxIndex = Math.max(maxIndex, indices[gx]); // get ready to examine next glyph gx += xdelta; gp += pdelta; } // done with cluster, gx and gp are set for next glyph if (DEBUG) { System.out.println("minIndex = " + minIndex + ", maxIndex = " + maxIndex); } nextMin = maxIndex + 1; // do common character adjustments glyphinfo[cp + posy] = baseline; glyphinfo[cp + advy] = 0; if (haveCopy) { // save adjustments to the base character glyphinfo[cp + posx] = cposl; glyphinfo[cp + advx] = cposr - cposl; glyphinfo[cp + visx] = cvisl; glyphinfo[cp + visy] = cvist; glyphinfo[cp + visw] = cvisr - cvisl; glyphinfo[cp + vish] = cvisb - cvist; // compare number of chars read with number of glyphs read. // if more glyphs than chars, set mustCopy to true, as we'll always have // to copy the data from here on out. if (maxIndex - minIndex < clusterExtraGlyphs) { mustCopy = true; } // Fix the characters that follow the base character. // New values are all the same. Note we fix the number of characters // we saw, not the number of glyphs we saw. if (minIndex < maxIndex) { if (!ltr) { // if rtl, characters to left of base, else to right. reuse cposr. cposr = cposl; } cvisr -= cvisl; // reuse, convert to deltas. cvisb -= cvist; int iMinIndex = minIndex, icp = cp / 8; while (minIndex < maxIndex) { ++minIndex; cx += xdelta; cp += pdelta; if (cp < 0 || cp >= glyphinfo.length) { if (DEBUG) System.out.println("minIndex = " + iMinIndex + ", maxIndex = " + maxIndex + ", cp = " + icp); } glyphinfo[cp + posx] = cposr; glyphinfo[cp + posy] = baseline; glyphinfo[cp + advx] = 0; glyphinfo[cp + advy] = 0; glyphinfo[cp + visx] = cvisl; glyphinfo[cp + visy] = cvist; glyphinfo[cp + visw] = cvisr; glyphinfo[cp + vish] = cvisb; } } // no longer using this copy haveCopy = false; } else if (mustCopy) { // out of synch, so we have to copy all the time now int gpr = gp - pdelta; glyphinfo[cp + posx] = glyphinfo[gpr + posx]; glyphinfo[cp + advx] = glyphinfo[gpr + advx]; glyphinfo[cp + visx] = glyphinfo[gpr + visx]; glyphinfo[cp + visy] = glyphinfo[gpr + visy]; glyphinfo[cp + visw] = glyphinfo[gpr + visw]; glyphinfo[cp + vish] = glyphinfo[gpr + vish]; } // else glyphinfo is already at the correct character position, and is unchanged, so just leave it // reset for new cluster cp += pdelta; cx += xdelta; } if (mustCopy && !ltr) { // data written to wrong end of array, need to shift down cp -= pdelta; // undo last increment, get start of valid character data in array System.arraycopy(glyphinfo, cp, glyphinfo, 0, glyphinfo.length - cp); } if (DEBUG) { char[] chars = source.getChars(); int start = source.getStart(); int length = source.getLength(); System.out.println("char info for " + length + " characters"); for(int i = 0; i < length * numvals;) { System.out.println(" ch: " + Integer.toHexString(chars[start + v2l(i / numvals)]) + " x: " + glyphinfo[i++] + " y: " + glyphinfo[i++] + " xa: " + glyphinfo[i++] + " ya: " + glyphinfo[i++] + " l: " + glyphinfo[i++] + " t: " + glyphinfo[i++] + " w: " + glyphinfo[i++] + " h: " + glyphinfo[i++]); } } return glyphinfo; } /** * Map logical character index to visual character index. * <p> * This ignores hindi reordering. @see createCharinfo */ protected int l2v(int index) { return (source.getLayoutFlags() & 0x1) == 0 ? index : source.getLength() - 1 - index; } /** * Map visual character index to logical character index. * <p> * This ignores hindi reordering. @see createCharinfo */ protected int v2l(int index) { return (source.getLayoutFlags() & 0x1) == 0 ? index : source.getLength() - 1 - index; } public TextLineComponent getSubset(int start, int limit, int dir) { return new ExtendedTextSourceLabel(source.getSubSource(start, limit-start, dir), decorator); } public String toString() { if (true) { return source.toString(source.WITHOUT_CONTEXT); } StringBuffer buf = new StringBuffer(); buf.append(super.toString()); buf.append("[source:"); buf.append(source.toString(source.WITHOUT_CONTEXT)); buf.append(", lb:"); buf.append(lb); buf.append(", ab:"); buf.append(ab); buf.append(", vb:"); buf.append(vb); buf.append(", gv:"); buf.append(gv); buf.append(", ci: "); if (charinfo == null) { buf.append("null"); } else { buf.append(charinfo[0]); for (int i = 1; i < charinfo.length;) { buf.append(i % numvals == 0 ? "; " : ", "); buf.append(charinfo[i]); } } buf.append("]"); return buf.toString(); } //public static ExtendedTextLabel create(TextSource source) { // return new ExtendedTextSourceLabel(source); //} public int getNumJustificationInfos() { return getGV().getNumGlyphs(); } public void getJustificationInfos(GlyphJustificationInfo[] infos, int infoStart, int charStart, int charLimit) { // This simple implementation only uses spaces for justification. // Since regular characters aren't justified, we don't need to deal with // special infos for combining marks or ligature substitution glyphs. // added character justification for kanjii only 2/22/98 StandardGlyphVector gv = getGV(); float[] charinfo = getCharinfo(); float size = gv.getFont().getSize2D(); GlyphJustificationInfo nullInfo = new GlyphJustificationInfo(0, false, GlyphJustificationInfo.PRIORITY_NONE, 0, 0, false, GlyphJustificationInfo.PRIORITY_NONE, 0, 0); GlyphJustificationInfo spaceInfo = new GlyphJustificationInfo(size, true, GlyphJustificationInfo.PRIORITY_WHITESPACE, 0, size, true, GlyphJustificationInfo.PRIORITY_WHITESPACE, 0, size / 4f); GlyphJustificationInfo kanjiInfo = new GlyphJustificationInfo(size, true, GlyphJustificationInfo.PRIORITY_INTERCHAR, size, size, false, GlyphJustificationInfo.PRIORITY_NONE, 0, 0); char[] chars = source.getChars(); int offset = source.getStart(); // assume data is 1-1 and either all rtl or all ltr, for now int numGlyphs = gv.getNumGlyphs(); int minGlyph = 0; int maxGlyph = numGlyphs; boolean ltr = (source.getLayoutFlags() & 0x1) == 0; if (charStart != 0 || charLimit != source.getLength()) { if (ltr) { minGlyph = charStart; maxGlyph = charLimit; } else { minGlyph = numGlyphs - charLimit; maxGlyph = numGlyphs - charStart; } } for (int i = 0; i < numGlyphs; ++i) { GlyphJustificationInfo info = null; if (i >= minGlyph && i < maxGlyph) { if (charinfo[i * numvals + advx] == 0) { // combining marks don't justify info = nullInfo; } else { int ci = v2l(i); // 1-1 assumption again char c = chars[offset + ci]; if (Character.isWhitespace(c)) { info = spaceInfo; // CJK, Hangul, CJK Compatibility areas } else if (c >= 0x4e00 && (c < 0xa000) || (c >= 0xac00 && c < 0xd7b0) || (c >= 0xf900 && c < 0xfb00)) { info = kanjiInfo; } else { info = nullInfo; } } } infos[infoStart + i] = info; } } public TextLineComponent applyJustificationDeltas(float[] deltas, int deltaStart, boolean[] flags) { // when we justify, we need to adjust the charinfo since spaces // change their advances. preserve the existing charinfo. float[] newCharinfo = (float[])getCharinfo().clone(); // we only push spaces, so never need to rejustify flags[0] = false; // preserve the existing gv. StandardGlyphVector newgv = (StandardGlyphVector)getGV().clone(); float[] newPositions = newgv.getGlyphPositions(null); int numGlyphs = newgv.getNumGlyphs(); /* System.out.println("oldgv: " + getGV() + ", newgv: " + newgv); System.out.println("newpositions: " + newPositions); for (int i = 0; i < newPositions.length; i += 2) { System.out.println("[" + (i/2) + "] " + newPositions[i] + ", " + newPositions[i+1]); } System.out.println("deltas: " + deltas + " start: " + deltaStart); for (int i = deltaStart; i < deltaStart + numGlyphs; i += 2) { System.out.println("[" + (i/2) + "] " + deltas[i] + ", " + deltas[i+1]); } */ char[] chars = source.getChars(); int offset = source.getStart(); // accumulate the deltas to adjust positions and advances. // handle whitespace by modifying advance, // handle everything else by modifying position before and after float deltaPos = 0; for (int i = 0; i < numGlyphs; ++i) { if (Character.isWhitespace(chars[offset + v2l(i)])) { newPositions[i*2] += deltaPos; float deltaAdv = deltas[deltaStart + i*2] + deltas[deltaStart + i*2 + 1]; newCharinfo[i * numvals + posx] += deltaPos; newCharinfo[i * numvals + visx] += deltaPos; newCharinfo[i * numvals + advx] += deltaAdv; deltaPos += deltaAdv; } else { deltaPos += deltas[deltaStart + i*2]; newPositions[i*2] += deltaPos; newCharinfo[i * numvals + posx] += deltaPos; newCharinfo[i * numvals + visx] += deltaPos; deltaPos += deltas[deltaStart + i*2 + 1]; } } newPositions[numGlyphs * 2] += deltaPos; newgv.setGlyphPositions(newPositions); /* newPositions = newgv.getGlyphPositions(null); System.out.println(">> newpositions: " + newPositions); for (int i = 0; i < newPositions.length; i += 2) { System.out.println("[" + (i/2) + "] " + newPositions[i] + ", " + newPositions[i+1]); } */ ExtendedTextSourceLabel result = new ExtendedTextSourceLabel(source, decorator); result.gv = newgv; result.charinfo = newCharinfo; return result; } }
apache/xmlgraphics-batik
37,341
batik-svgrasterizer/src/main/java/org/apache/batik/apps/rasterizer/SVGConverter.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.apps.rasterizer; import java.awt.Color; import java.awt.geom.Rectangle2D; import java.io.File; import java.io.FileFilter; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.List; import java.util.ArrayList; import org.apache.batik.transcoder.Transcoder; import org.apache.batik.transcoder.TranscoderInput; import org.apache.batik.transcoder.TranscoderOutput; import org.apache.batik.transcoder.image.ImageTranscoder; import org.apache.batik.transcoder.image.JPEGTranscoder; import org.apache.batik.transcoder.image.PNGTranscoder; import org.apache.batik.util.ParsedURL; /** * This application can be used to convert SVG images to raster images. * <br> * Possible result raster image formats are PNG, JPEG, TIFF, and PDF. * The Batik Transcoder API is used to execute the conversion. FOP is * needed to be able to transcode to the PDF format<br> * * The source has to be list of files or URL (set by the <code>setSources</code> * method). <br> * * The destination can be:<br><ul> * <li><b>unspecified</b>. In that case, only file sources can be converted and * a file in the same directory as the source will be created.</li> * <li><b>a directory</b>, set by the <code>setDst</code> method. In that case, * the output files are created in that destination directory</li> * <li><b>a file</b>. In case there is a <i>single * source</i>, the destination can be a single named file * (set with the <code>setDst</code> method.)</li> * </ul> * * <hr> * * There are a number of options which control the way the image is * converted to the destination format:<br><ul> * <li>destinationType: controls the type of conversion which should be done. * see the {@link DestinationType} documentation.</li> * <li>width/height: they control the desired width and height, in user space, * for the output image.</li> * <li>maxWidth/maxHeight: control the maximum width and height, * in user space, of the output image.</li> * <li>area: controls the specific sub-area of the image which should be * rendered.</li> * <li>backgroundColor: controls the color which is used to fill the * background before rendering the image</li> * <li>quality: relevant only for JPEG destinations, this controls the * encoding quality.</li> * <li>indexed: relevant only for PNG, controls the number of bits * used in writting of a palletized files.</li> * <li>mediaType: controls the CSS media, or list of media, for which the * image should be rendered.</li> * <li>alternate: controls the alternate CSS stylesheet to activate, * if any.</li> * <li>language: controls the user language with which the SVG document * should be converted.</li> * <li>userStylesheet: defines the user stylesheet to apply to SVG documents * in addition to other stylesheets referenced by or embedded in the * SVG documents.</li> * <li>pixelUnitToMillimeter: defines the size of a pixel in millimeters * to use when processing the SVG documents.</li> * </ul> * * @version $Id$ * @author <a href="mailto:Henri.Ruini@nokia.com">Henri Ruini</a> * @author <a href="mailto:vhardy@apache.org">Vincent Hardy</a> */ public class SVGConverter { // // Error codes reported by the SVGConverter // // // Reported when no source file has been specified. // public static final String ERROR_NO_SOURCES_SPECIFIED = "SVGConverter.error.no.sources.specified"; // // Reported when there is more than one valid input source // and no output directory has been set and the source is // not a file. // public static final String ERROR_CANNOT_COMPUTE_DESTINATION = "SVGConverter.error.cannot.compute.destination"; // // Reported when the dst is a file and there are multiple // sources. // public static final String ERROR_CANNOT_USE_DST_FILE = "SVGConverter.error.cannot.use.dst.file"; // // Reported when the <code>Transcoder</code> for the requested // <code>destinationType</code> cannot be found. // public static final String ERROR_CANNOT_ACCESS_TRANSCODER = "SVGConverter.error.cannot.access.transcoder"; // // Reported when the source is found to be the same as // the destination. Note that it is not guaranteed that // this error condition will always be detected. // public static final String ERROR_SOURCE_SAME_AS_DESTINATION = "SVGConverter.error.source.same.as.destination"; // // Reported when one of the sources cannot be read. // public static final String ERROR_CANNOT_READ_SOURCE = "SVGConverter.error.cannot.read.source"; // // Reported when an error happens while opening a source // file. // public static final String ERROR_CANNOT_OPEN_SOURCE = "SVGConverter.error.cannot.open.source"; // // Reported if the output is not writeable. This may // happen if the output file already exists and does not // have write permission. // public static final String ERROR_OUTPUT_NOT_WRITEABLE = "SVGConverter.error.output.not.writeable"; // // Reported when an error happens while trying to open // the output file for writing. // public static final String ERROR_CANNOT_OPEN_OUTPUT_FILE = "SVGConverter.error.cannot.open.output.file"; // // Reported when the converter was not able to create // the destination directory for the files. // public static final String ERROR_UNABLE_TO_CREATE_OUTPUT_DIR = "SVGConverter.error.unable.to.create.output.dir"; // // Reported when an error occurs while convertion the // source file. // public static final String ERROR_WHILE_RASTERIZING_FILE = "SVGConverter.error.while.rasterizing.file"; // // Class variables and constants // /** SVG file extension */ protected static final String SVG_EXTENSION = ".svg"; /** Default quality value. A value of -1 means disabled. */ protected static final float DEFAULT_QUALITY = -1.0f; /** Maximum quality value */ protected static final float MAXIMUM_QUALITY = .99F; /** Default result type */ protected static final DestinationType DEFAULT_RESULT_TYPE = DestinationType.PNG; /** Default width */ protected static final float DEFAULT_WIDTH = -1; /** Default height */ protected static final float DEFAULT_HEIGHT = -1; /** Result type */ protected DestinationType destinationType = DEFAULT_RESULT_TYPE; /** Output image height. */ protected float height = DEFAULT_HEIGHT; /** Output image width. */ protected float width = DEFAULT_WIDTH; /** Maximum output image height. */ protected float maxHeight = DEFAULT_HEIGHT; /** Maximum output image width. */ protected float maxWidth = DEFAULT_WIDTH; /** Output image quality. */ protected float quality = DEFAULT_QUALITY; /** Should output Image be indexed . */ protected int indexed = -1; /** Output AOI area. */ protected Rectangle2D area = null; /** Language */ protected String language = null; /** User stylesheet */ protected String userStylesheet = null; /** Millimeters Per Pixel */ protected float pixelUnitToMillimeter = -1.0f; /** Validation flag */ protected boolean validate = false; /** Execute the 'onload' scripts flag */ protected boolean executeOnload = false; /** Document time to seek to. */ protected float snapshotTime = Float.NaN; /** Set of allowed script types. */ protected String allowedScriptTypes = null; /** Controls whether scripts can only have the same origin as the document which references them. */ protected boolean constrainScriptOrigin = true; protected boolean allowExternalResources; /** Controls whether scripts should be run securely or not */ protected boolean securityOff = false; /** Sources files or URLs */ protected List sources = null; /** * Destination image path. Can be a file (for single source) or * a directory */ protected File dst; /** Background color for the output images. */ protected Color backgroundColor = null; /** Media type for which the SVG image should be rendered */ protected String mediaType = null; /** Default value for the font-family when it is unspecified */ protected String defaultFontFamily = null; /** Alternate stylesheet for which should be applied to the SVG */ protected String alternateStylesheet = null; /** Contents of <code>fileset</code> elements. */ protected List files = new ArrayList(); /** * Controls some aspects of the converter's operation, * such as whether or not it should proceed in some * error situations. See {@link SVGConverterController} */ protected SVGConverterController controller; // // Default constructor // public SVGConverter(){ this(new DefaultSVGConverterController()); } // // Constructor // public SVGConverter(SVGConverterController controller){ if (controller == null){ throw new IllegalArgumentException(); } this.controller = controller; } // // Property get/set methods // /** * Sets the <code>destinationType</code> attribute value. * Should not be null. */ public void setDestinationType(DestinationType destinationType) { if(destinationType == null){ throw new IllegalArgumentException(); } this.destinationType = destinationType; } public DestinationType getDestinationType(){ return destinationType; } /** * In less than or equal to zero, the height is not * constrained on the output image. The height is in * user space. */ public void setHeight(float height) { this.height = height; } public float getHeight(){ return height; } /** * In less than or equal to zero, the width is not * constrained on the output image. The width is in * user space. */ public void setWidth(float width) { this.width = width; } public float getWidth(){ return width; } /** * If less than or equal to zero, the maximum height * does not have any effect on the output image. * The maximum height is in user space. */ public void setMaxHeight(float height) { this.maxHeight = height; } public float getMaxHeight(){ return maxHeight; } /** * If less than or equal to zero, the maximum width * does not have any effect on the output image. * The maximum width is in user space. */ public void setMaxWidth(float width) { this.maxWidth = width; } public float getMaxWidth(){ return maxWidth; } /** * Sets the JPEG encoding quality. The value should be strictly * less than 1. If the value is less than zero, then the maximum * encoding quality is used. */ public void setQuality(float quality) throws IllegalArgumentException { if(quality >= 1){ throw new IllegalArgumentException(); } this.quality = quality; } public float getQuality(){ return quality; } /** * Tells the PNG encoder to reduce the image to 256 colors, so the * PNG file is indexed. */ public void setIndexed(int bits) throws IllegalArgumentException { this.indexed = bits; } public int getIndexed(){ return indexed; } /** * Sets the user language. If the value is null, then the default (see * {@link org.apache.batik.bridge.UserAgent#getLanguages}) * is used. */ public void setLanguage(String language){ this.language = language; } public String getLanguage(){ return language; } /** * Sets the user stylesheet. May be null. */ public void setUserStylesheet(String userStylesheet){ this.userStylesheet = userStylesheet; } public String getUserStylesheet(){ return userStylesheet; } /** * Sets the millimeters per pixel constant. A negative * value will cause the default value * (see {@link org.apache.batik.bridge.UserAgent#getPixelUnitToMillimeter}) * to be used. */ public void setPixelUnitToMillimeter(float pixelUnitToMillimeter){ this.pixelUnitToMillimeter = pixelUnitToMillimeter; } public float getPixelUnitToMillimeter(){ return pixelUnitToMillimeter; } /** * Sets the <code>area</code> as a Rectangle. This value can * be null in which case the whole image will be rendered. If the * area is not null, then only the portion of the image it * defines will be rendered. */ public void setArea(Rectangle2D area){ this.area = area; } public Rectangle2D getArea(){ return area; } /** * Sets the list of individual SVG sources. The strings * can be either URLs or file names. Note that invalid * sources (e.g., read-protected files or invalid URLs) * will cause <code>SVGConverterExceptions</code> to be * thrown during the transcoding process (see {@link #execute}); */ public void setSources(String[] sources) { if(sources == null){ this.sources = null; } else{ this.sources = new ArrayList(); for (String source : sources) { if (source != null) { this.sources.add(source); } } if (this.sources.size() == 0){ this.sources = null; } } } public List getSources(){ return sources; } /** * When converting a single source, dst can be a file. * Othewise, it should be a directory. */ public void setDst(File dst) { this.dst = dst; } public File getDst(){ return dst; } /** * Sets the <code>backgroundColor</code> value. This can be * null in which case no color will be used to fill the * background before rendering this SVG image. */ public void setBackgroundColor(Color backgroundColor){ this.backgroundColor = backgroundColor; } public Color getBackgroundColor(){ return backgroundColor; } /** * Sets the <code>mediaType</code> value. This value controls * the CSS media for which the image should be rendered. It * can be null, in which case no specific media selectors will * apply. If it is not null, it can contain space separated values * of the medias for which the image should be rendered. For example, * "screen", "print" or "scree projection" are valid values. */ public void setMediaType(String mediaType){ this.mediaType = mediaType; } public String getMediaType(){ return mediaType; } /** * Sets the <code>defaultFontFamily</code> value. This value controls * the default value for the font-family CSS property when that * property is unspecified. */ public void setDefaultFontFamily(String defaultFontFamily) { this.defaultFontFamily = defaultFontFamily; } public String getDefaultFontFamily() { return defaultFontFamily; } /** * Sets the <code>alternateStyleSheet</code> value. This value * controls the CSS alternate stylesheet to select in the * rendered SVG file(s). It may be null, in which case no alternate * stylesheet will be selected. */ public void setAlternateStylesheet(String alternateStylesheet){ this.alternateStylesheet = alternateStylesheet; } public String getAlternateStylesheet(){ return alternateStylesheet; } /** * Defines whether or not input sources should be validated in * the conversion process */ public void setValidate(boolean validate){ this.validate = validate; } public boolean getValidate(){ return validate; } /** * Sets whether or not scripts attached to the DOM using 'onload' * event attribute must be executed before rasterizing. * * @param b true means scripts will be executed */ public void setExecuteOnload(boolean b){ this.executeOnload = b; } /** * Returns true if the scripts attached to the DOM using 'onload' * event attribute is going to be executed before rasterizing, * false otherwise. */ public boolean getExecuteOnload(){ return executeOnload; } /** * Sets the document time to seek to before rasterizing. * * @param t the document time, in seconds */ public void setSnapshotTime(float t) { snapshotTime = t; } /** * Returns the document to to seek to before rasterizing. */ public float getSnapshotTime() { return snapshotTime; } /** * Sets the set of allowed script types (i.e., the set of possible * values for the type attribute in the &lt;script&gt; element), * as a comma separated list of allowed values. */ public void setAllowedScriptTypes(String allowedScriptTypes){ this.allowedScriptTypes = allowedScriptTypes; } /** * Returns the list of allowed script types. * * @see #setAllowedScriptTypes */ public String getAllowedScriptTypes(){ return allowedScriptTypes; } /** * Sets whether scripts should only be loaded from the same * location as the documents referencing them. */ public void setConstrainScriptOrigin(boolean constrainScriptOrigin){ this.constrainScriptOrigin = constrainScriptOrigin; } /** * Returns whether scripts can only be loaded from the same * origin as the documents referencing them. */ public boolean getConstrainScriptOrigin(){ return constrainScriptOrigin; } /** * Sets whether or not scripts should be run securely */ public void setSecurityOff(boolean securityOff){ this.securityOff = securityOff; } /** * Returns whether or not scripts will be run securely */ public boolean getSecurityOff(){ return securityOff; } /** * Returns true if f is a File. <code>f</code> is found to be a file if * it exists and is a file. If it does not exist, it is declared * to be a file if it has the same extension as the DestinationType. */ protected boolean isFile(File f){ if (f.exists()){ return f.isFile(); } else { if (f.toString().toLowerCase().endsWith(destinationType.getExtension())){ return true; } } return false; } /** * Starts the conversion process. * @throws SVGConverterException thrown if parameters are not set correctly. */ public void execute() throws SVGConverterException { // Compute the set of SVGConverterSource from the source properties // (srcDir and srcFile); // This throws an exception if there is not at least one src file. List sources = computeSources(); // Compute the destination files from dest List dstFiles = null; if(sources.size() == 1 && dst != null && isFile(dst)){ dstFiles = new ArrayList(); dstFiles.add(dst); } else{ dstFiles = computeDstFiles(sources); } // Now, get the transcoder to use for the operation Transcoder transcoder = destinationType.getTranscoder(); if(transcoder == null) { throw new SVGConverterException(ERROR_CANNOT_ACCESS_TRANSCODER, new Object[]{destinationType.toString()}, true /* fatal error */); } // Now, compute the set of transcoding hints to use Map hints = computeTranscodingHints(); transcoder.setTranscodingHints(hints); // Notify listener that task has been computed if(!controller.proceedWithComputedTask(transcoder, hints, sources, dstFiles)){ return; } // Convert files one by one for(int i = 0 ; i < sources.size() ; i++) { // Get the file from the vector. SVGConverterSource currentFile = (SVGConverterSource)sources.get(i); File outputFile = (File)dstFiles.get(i); createOutputDir(outputFile); transcode(currentFile, outputFile, transcoder); } } /** * Populates a vector with destination files names * computed from the names of the files in the sources vector * and the value of the dst property */ protected List computeDstFiles(List sources) throws SVGConverterException { List dstFiles = new ArrayList(); if (dst != null) { if (dst.exists() && dst.isFile()) { throw new SVGConverterException(ERROR_CANNOT_USE_DST_FILE); } // // Either dst exist and is a directory or dst does not // exist and we may fail later on in createOutputDir // int n = sources.size(); for (Object source : sources) { SVGConverterSource src = (SVGConverterSource) source; // Generate output filename from input filename. File outputName = new File(dst.getPath(), getDestinationFile(src.getName())); dstFiles.add(outputName); } } else { // // No destination directory has been specified. // Try and create files in the same directory as the // sources. This only work if sources are files. // int n = sources.size(); for (Object source : sources) { SVGConverterSource src = (SVGConverterSource) source; if (!(src instanceof SVGConverterFileSource)) { throw new SVGConverterException(ERROR_CANNOT_COMPUTE_DESTINATION, new Object[]{src}); } // Generate output filename from input filename. SVGConverterFileSource fs = (SVGConverterFileSource) src; File outputName = new File(fs.getFile().getParent(), getDestinationFile(src.getName())); dstFiles.add(outputName); } } return dstFiles; } /** * Populates a vector with the set of SVG files from the * srcDir if it is not null and with the sources (files or URLs) * if any. */ protected List computeSources() throws SVGConverterException{ List sources = new ArrayList(); // Check that at least one source has been specified. if (this.sources == null){ throw new SVGConverterException(ERROR_NO_SOURCES_SPECIFIED); } int n = this.sources.size(); for (Object source : this.sources) { String sourceString = (String) source; File file = new File(sourceString); if (file.exists()) { sources.add(new SVGConverterFileSource(file)); } else { String[] fileNRef = getFileNRef(sourceString); file = new File(fileNRef[0]); if (file.exists()) { sources.add(new SVGConverterFileSource(file, fileNRef[1])); } else { sources.add(new SVGConverterURLSource(sourceString)); } } } return sources; } public String[] getFileNRef(String fileName){ int n = fileName.lastIndexOf('#'); String[] result = {fileName, ""}; if (n > -1){ result[0] = fileName.substring(0, n); if (n+1 < fileName.length()){ result[1] = fileName.substring(n+1); } } return result; } // ----------------------------------------------------------------------- // Internal methods // ----------------------------------------------------------------------- /** * Computes the set of transcoding hints to use for the operation */ protected Map computeTranscodingHints(){ Map map = new HashMap(); // Set AOI. ---------------------------------------------------------- if (area != null) { map.put(ImageTranscoder.KEY_AOI, area); } // Set image quality. ------------------------------------------------ if (quality > 0) { map.put(JPEGTranscoder.KEY_QUALITY, this.quality); } // Set image indexed. ------------------------------------------------ if (indexed != -1) { map.put(PNGTranscoder.KEY_INDEXED, indexed); } // Set image background color ----------------------------------------- if (backgroundColor != null){ map.put(ImageTranscoder.KEY_BACKGROUND_COLOR, backgroundColor); } // Set image height and width. ---------------------------------------- if (height > 0) { map.put(ImageTranscoder.KEY_HEIGHT, this.height); } if (width > 0){ map.put(ImageTranscoder.KEY_WIDTH, this.width); } // Set maximum height and width --------------------------------------- if (maxHeight > 0) { map.put(ImageTranscoder.KEY_MAX_HEIGHT, this.maxHeight); } if (maxWidth > 0){ map.put(ImageTranscoder.KEY_MAX_WIDTH, this.maxWidth); } // Set CSS Media if (mediaType != null){ map.put(ImageTranscoder.KEY_MEDIA, mediaType); } // Set default font-family if (defaultFontFamily != null) { map.put(ImageTranscoder.KEY_DEFAULT_FONT_FAMILY, defaultFontFamily); } // Set alternateStylesheet if (alternateStylesheet != null){ map.put(ImageTranscoder.KEY_ALTERNATE_STYLESHEET, alternateStylesheet); } // Set user stylesheet if (userStylesheet != null) { String userStylesheetURL; try { URL userDir = new File(System.getProperty("user.dir")).toURI().toURL(); userStylesheetURL = new ParsedURL(userDir, userStylesheet).toString(); } catch (Exception e) { userStylesheetURL = userStylesheet; } map.put(ImageTranscoder.KEY_USER_STYLESHEET_URI, userStylesheetURL); } // Set the user language if (language != null){ map.put(ImageTranscoder.KEY_LANGUAGE, language); } // Sets the millimeters per pixel if (pixelUnitToMillimeter > 0){ map.put(ImageTranscoder.KEY_PIXEL_UNIT_TO_MILLIMETER, pixelUnitToMillimeter); } // Set validation if (validate){ map.put(ImageTranscoder.KEY_XML_PARSER_VALIDATING, Boolean.TRUE); } // Set onload if (executeOnload) { map.put(ImageTranscoder.KEY_EXECUTE_ONLOAD, Boolean.TRUE); } // Set snapshot time if (!Float.isNaN(snapshotTime)) { map.put(ImageTranscoder.KEY_SNAPSHOT_TIME, snapshotTime); } // Set allowed scripts if (allowedScriptTypes != null) { map.put(ImageTranscoder.KEY_ALLOWED_SCRIPT_TYPES, allowedScriptTypes); } // Set constrain script origin if (!constrainScriptOrigin) { map.put(ImageTranscoder.KEY_CONSTRAIN_SCRIPT_ORIGIN, Boolean.FALSE); } if (allowExternalResources) { map.put(ImageTranscoder.KEY_ALLOW_EXTERNAL_RESOURCES, Boolean.TRUE); } return map; } /** * Converts the input image to the result image. * with the given transcoder. If a failure happens, the * controller is notified and decides whether to proceed * or not. If it decides to proceed, the converter will * continue processing other files. Otherwise, it will * throw an exception. */ protected void transcode(SVGConverterSource inputFile, File outputFile, Transcoder transcoder) throws SVGConverterException { TranscoderInput input = null; TranscoderOutput output = null; OutputStream outputStream = null; if (!controller.proceedWithSourceTranscoding(inputFile, outputFile)){ return; } try { if (inputFile.isSameAs(outputFile.getPath())) { throw new SVGConverterException(ERROR_SOURCE_SAME_AS_DESTINATION, true /* fatal error */); } // Compute transcoder input. if (!inputFile.isReadable()) { throw new SVGConverterException(ERROR_CANNOT_READ_SOURCE, new Object[]{inputFile.getName()}); } try { InputStream in = inputFile.openStream(); in.close(); } catch(IOException ioe) { throw new SVGConverterException(ERROR_CANNOT_OPEN_SOURCE, new Object[] {inputFile.getName(), ioe.toString()}); } input = new TranscoderInput(inputFile.getURI()); // Compute transcoder output. if (!isWriteable(outputFile)) { throw new SVGConverterException(ERROR_OUTPUT_NOT_WRITEABLE, new Object[] {outputFile.getName()}); } try { outputStream = new FileOutputStream(outputFile); } catch(FileNotFoundException fnfe) { throw new SVGConverterException(ERROR_CANNOT_OPEN_OUTPUT_FILE, new Object[] {outputFile.getName()}); } output = new TranscoderOutput(outputStream); } catch(SVGConverterException e){ boolean proceed = controller.proceedOnSourceTranscodingFailure (inputFile, outputFile, e.getErrorCode()); if (proceed){ e.printStackTrace(); return; } else { throw e; } } // Transcode now boolean success = false; try { transcoder.transcode(input, output); success = true; } catch(Exception te) { te.printStackTrace(); try { outputStream.flush(); outputStream.close(); } catch(IOException ioe) {} // Report error to the controller. If controller decides // to stop, throw an exception boolean proceed = controller.proceedOnSourceTranscodingFailure (inputFile, outputFile, ERROR_WHILE_RASTERIZING_FILE); if (!proceed){ throw new SVGConverterException(ERROR_WHILE_RASTERIZING_FILE, new Object[] {outputFile.getName(), te.getMessage()}); } } // Close streams and clean up. try { outputStream.flush(); outputStream.close(); } catch(IOException ioe) { return; } if (success){ controller.onSourceTranscodingSuccess(inputFile, outputFile); } } /** * Get the name of the result image file. * * <P>This method modifies the result filename, it changes the existing * suffix to correspong the result file type. It also adds the suffix * if the file doesn't have one.</P> * * @param file Result file name as a String object. * * @return Name of the file. The directory of the file is not returned. * The returned string is empty if the parameter is not a file. */ protected String getDestinationFile(String file) { int suffixStart; // Location of the first char of // the suffix in a String. String oldName; // Existing filename. String newSuffix = destinationType.getExtension(); // New suffix. oldName = file; // Find the first char of the suffix. suffixStart = oldName.lastIndexOf( '.' ); String dest = null; if (suffixStart != -1) { // Replace existing suffix. dest = oldName.substring(0, suffixStart) + newSuffix; } else { // Add new suffix. dest = oldName + newSuffix; } return dest; } /** * Creates directories for output files if needed. * * @param output Output file with path. * * @throws SVGConverterException Output directory doesn't exist and it can't be created. */ protected void createOutputDir(File output) throws SVGConverterException { File outputDir; // Output directory object. boolean success = true; // false if the output directory // doesn't exist and it can't be created // true otherwise // Create object from output directory. String parentDir = output.getParent(); if (parentDir != null){ outputDir = new File(output.getParent()); if ( ! outputDir.exists() ) { // Output directory doesn't exist, so create it. success = outputDir.mkdirs(); } else { if ( ! outputDir.isDirectory() ) { // File, which have a same name as the output directory, exists. // Create output directory. success = outputDir.mkdirs(); } } } if (!success) { throw new SVGConverterException(ERROR_UNABLE_TO_CREATE_OUTPUT_DIR); } } /** * Checks if the application is allowed to write to the file. * * @param file File to be checked. * * @return <code>true</code> if the file is writeable and <code>false</code> otherwise. */ protected boolean isWriteable(File file) { if (file.exists()) { // Check the existing file. if (!file.canWrite()) { return false; } } else { // Check the file that doesn't exist yet. // Create a new file. The file is writeable if // the creation succeeds. try { file.createNewFile(); } catch(IOException ioe) { return false; } } return true; } // ----------------------------------------------------------------------- // Inner classes // ----------------------------------------------------------------------- /** * Convenience class to filter svg files */ public static class SVGFileFilter implements FileFilter { public static final String SVG_EXTENSION = ".svg"; public boolean accept(File file){ if (file != null && file.getName().toLowerCase().endsWith(SVG_EXTENSION)){ return true; } return false; } } }
apache/sis
37,066
incubator/src/org.apache.sis.storage.gsf/main/org/apache/sis/storage/gsf/specific/Reson7100.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.storage.gsf.specific; import java.lang.invoke.*; import java.lang.foreign.*; import static java.lang.foreign.ValueLayout.*; import static java.lang.foreign.MemoryLayout.PathElement.*; import org.apache.sis.storage.gsf.GSF; import org.apache.sis.storage.gsf.StructClass; /** * * @author Johann Sorel (Geomatys) */ public final class Reson7100 extends StructClass { public static final GroupLayout LAYOUT = MemoryLayout.structLayout( GSF.C_INT.withName("protocol_version"), GSF.C_INT.withName("device_id"), MemoryLayout.sequenceLayout(16, GSF.C_CHAR).withName("reserved_1"), GSF.C_INT.withName("major_serial_number"), GSF.C_INT.withName("minor_serial_number"), GSF.C_INT.withName("ping_number"), GSF.C_INT.withName("multi_ping_seq"), GSF.C_DOUBLE.withName("frequency"), GSF.C_DOUBLE.withName("sample_rate"), GSF.C_DOUBLE.withName("receiver_bandwdth"), GSF.C_DOUBLE.withName("tx_pulse_width"), GSF.C_INT.withName("tx_pulse_type_id"), GSF.C_INT.withName("tx_pulse_envlp_id"), GSF.C_DOUBLE.withName("tx_pulse_envlp_param"), GSF.C_INT.withName("tx_pulse_reserved"), MemoryLayout.paddingLayout(4), GSF.C_DOUBLE.withName("max_ping_rate"), GSF.C_DOUBLE.withName("ping_period"), GSF.C_DOUBLE.withName("range"), GSF.C_DOUBLE.withName("power"), GSF.C_DOUBLE.withName("gain"), GSF.C_INT.withName("control_flags"), GSF.C_INT.withName("projector_id"), GSF.C_DOUBLE.withName("projector_steer_angl_vert"), GSF.C_DOUBLE.withName("projector_steer_angl_horz"), GSF.C_DOUBLE.withName("projector_beam_wdth_vert"), GSF.C_DOUBLE.withName("projector_beam_wdth_horz"), GSF.C_DOUBLE.withName("projector_beam_focal_pt"), GSF.C_INT.withName("projector_beam_weighting_window_type"), GSF.C_INT.withName("projector_beam_weighting_window_param"), GSF.C_INT.withName("transmit_flags"), GSF.C_INT.withName("hydrophone_id"), GSF.C_INT.withName("receiving_beam_weighting_window_type"), GSF.C_INT.withName("receiving_beam_weighting_window_param"), GSF.C_INT.withName("receive_flags"), MemoryLayout.paddingLayout(4), GSF.C_DOUBLE.withName("receive_beam_width"), GSF.C_DOUBLE.withName("range_filt_min"), GSF.C_DOUBLE.withName("range_filt_max"), GSF.C_DOUBLE.withName("depth_filt_min"), GSF.C_DOUBLE.withName("depth_filt_max"), GSF.C_DOUBLE.withName("absorption"), GSF.C_DOUBLE.withName("sound_velocity"), GSF.C_DOUBLE.withName("spreading"), GSF.C_CHAR.withName("raw_data_from_7027"), MemoryLayout.sequenceLayout(15, GSF.C_CHAR).withName("reserved_2"), GSF.C_CHAR.withName("sv_source"), GSF.C_CHAR.withName("layer_comp_flag"), MemoryLayout.sequenceLayout(8, GSF.C_CHAR).withName("reserved_3"), MemoryLayout.paddingLayout(6) ).withName("t_gsfReson7100Specific"); public Reson7100(MemorySegment struct) { super(struct); } @Override protected MemoryLayout getLayout() { return LAYOUT; } private static final OfInt protocol_versionLAYOUT = (OfInt)LAYOUT.select(groupElement("protocol_version")); private static final long protocol_version$OFFSET = 0; /** * Getter for field: * {@snippet lang=c : * unsigned int protocol_version * } */ public int protocol_version() { return struct.get(protocol_versionLAYOUT, protocol_version$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int protocol_version * } */ public void protocol_version(int fieldValue) { struct.set(protocol_versionLAYOUT, protocol_version$OFFSET, fieldValue); } private static final OfInt device_idLAYOUT = (OfInt)LAYOUT.select(groupElement("device_id")); private static final long device_id$OFFSET = 4; /** * Getter for field: * {@snippet lang=c : * unsigned int device_id * } */ public int device_id() { return struct.get(device_idLAYOUT, device_id$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int device_id * } */ public void device_id(int fieldValue) { struct.set(device_idLAYOUT, device_id$OFFSET, fieldValue); } private static final SequenceLayout reserved_1LAYOUT = (SequenceLayout)LAYOUT.select(groupElement("reserved_1")); private static final long reserved_1$OFFSET = 8; /** * Getter for field: * {@snippet lang=c : * unsigned char reserved_1[16] * } */ public MemorySegment reserved_1() { return struct.asSlice(reserved_1$OFFSET, reserved_1LAYOUT.byteSize()); } /** * Setter for field: * {@snippet lang=c : * unsigned char reserved_1[16] * } */ public void reserved_1(MemorySegment fieldValue) { MemorySegment.copy(fieldValue, 0L, struct, reserved_1$OFFSET, reserved_1LAYOUT.byteSize()); } private static final VarHandle reserved_1$ELEM_HANDLE = reserved_1LAYOUT.varHandle(sequenceElement()); /** * Indexed getter for field: * {@snippet lang=c : * unsigned char reserved_1[16] * } */ public byte reserved_1(long index0) { return (byte)reserved_1$ELEM_HANDLE.get(struct, 0L, index0); } /** * Indexed setter for field: * {@snippet lang=c : * unsigned char reserved_1[16] * } */ public void reserved_1(long index0, byte fieldValue) { reserved_1$ELEM_HANDLE.set(struct, 0L, index0, fieldValue); } private static final OfInt major_serial_numberLAYOUT = (OfInt)LAYOUT.select(groupElement("major_serial_number")); private static final long major_serial_number$OFFSET = 24; /** * Getter for field: * {@snippet lang=c : * unsigned int major_serial_number * } */ public int major_serial_number() { return struct.get(major_serial_numberLAYOUT, major_serial_number$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int major_serial_number * } */ public void major_serial_number(int fieldValue) { struct.set(major_serial_numberLAYOUT, major_serial_number$OFFSET, fieldValue); } private static final OfInt minor_serial_numberLAYOUT = (OfInt)LAYOUT.select(groupElement("minor_serial_number")); private static final long minor_serial_number$OFFSET = 28; /** * Getter for field: * {@snippet lang=c : * unsigned int minor_serial_number * } */ public int minor_serial_number() { return struct.get(minor_serial_numberLAYOUT, minor_serial_number$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int minor_serial_number * } */ public void minor_serial_number(int fieldValue) { struct.set(minor_serial_numberLAYOUT, minor_serial_number$OFFSET, fieldValue); } private static final OfInt ping_numberLAYOUT = (OfInt)LAYOUT.select(groupElement("ping_number")); private static final long ping_number$OFFSET = 32; /** * Getter for field: * {@snippet lang=c : * unsigned int ping_number * } */ public int ping_number() { return struct.get(ping_numberLAYOUT, ping_number$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int ping_number * } */ public void ping_number(int fieldValue) { struct.set(ping_numberLAYOUT, ping_number$OFFSET, fieldValue); } private static final OfInt multi_ping_seqLAYOUT = (OfInt)LAYOUT.select(groupElement("multi_ping_seq")); private static final long multi_ping_seq$OFFSET = 36; /** * Getter for field: * {@snippet lang=c : * unsigned int multi_ping_seq * } */ public int multi_ping_seq() { return struct.get(multi_ping_seqLAYOUT, multi_ping_seq$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int multi_ping_seq * } */ public void multi_ping_seq(int fieldValue) { struct.set(multi_ping_seqLAYOUT, multi_ping_seq$OFFSET, fieldValue); } private static final OfDouble frequencyLAYOUT = (OfDouble)LAYOUT.select(groupElement("frequency")); private static final long frequency$OFFSET = 40; /** * Getter for field: * {@snippet lang=c : * double frequency * } */ public double frequency() { return struct.get(frequencyLAYOUT, frequency$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double frequency * } */ public void frequency(double fieldValue) { struct.set(frequencyLAYOUT, frequency$OFFSET, fieldValue); } private static final OfDouble sample_rateLAYOUT = (OfDouble)LAYOUT.select(groupElement("sample_rate")); private static final long sample_rate$OFFSET = 48; /** * Getter for field: * {@snippet lang=c : * double sample_rate * } */ public double sample_rate() { return struct.get(sample_rateLAYOUT, sample_rate$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double sample_rate * } */ public void sample_rate(double fieldValue) { struct.set(sample_rateLAYOUT, sample_rate$OFFSET, fieldValue); } private static final OfDouble receiver_bandwdthLAYOUT = (OfDouble)LAYOUT.select(groupElement("receiver_bandwdth")); private static final long receiver_bandwdth$OFFSET = 56; /** * Getter for field: * {@snippet lang=c : * double receiver_bandwdth * } */ public double receiver_bandwdth() { return struct.get(receiver_bandwdthLAYOUT, receiver_bandwdth$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double receiver_bandwdth * } */ public void receiver_bandwdth(double fieldValue) { struct.set(receiver_bandwdthLAYOUT, receiver_bandwdth$OFFSET, fieldValue); } private static final OfDouble tx_pulse_widthLAYOUT = (OfDouble)LAYOUT.select(groupElement("tx_pulse_width")); private static final long tx_pulse_width$OFFSET = 64; /** * Getter for field: * {@snippet lang=c : * double tx_pulse_width * } */ public double tx_pulse_width() { return struct.get(tx_pulse_widthLAYOUT, tx_pulse_width$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double tx_pulse_width * } */ public void tx_pulse_width(double fieldValue) { struct.set(tx_pulse_widthLAYOUT, tx_pulse_width$OFFSET, fieldValue); } private static final OfInt tx_pulse_type_idLAYOUT = (OfInt)LAYOUT.select(groupElement("tx_pulse_type_id")); private static final long tx_pulse_type_id$OFFSET = 72; /** * Getter for field: * {@snippet lang=c : * unsigned int tx_pulse_type_id * } */ public int tx_pulse_type_id() { return struct.get(tx_pulse_type_idLAYOUT, tx_pulse_type_id$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int tx_pulse_type_id * } */ public void tx_pulse_type_id(int fieldValue) { struct.set(tx_pulse_type_idLAYOUT, tx_pulse_type_id$OFFSET, fieldValue); } private static final OfInt tx_pulse_envlp_idLAYOUT = (OfInt)LAYOUT.select(groupElement("tx_pulse_envlp_id")); private static final long tx_pulse_envlp_id$OFFSET = 76; /** * Getter for field: * {@snippet lang=c : * unsigned int tx_pulse_envlp_id * } */ public int tx_pulse_envlp_id() { return struct.get(tx_pulse_envlp_idLAYOUT, tx_pulse_envlp_id$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int tx_pulse_envlp_id * } */ public void tx_pulse_envlp_id(int fieldValue) { struct.set(tx_pulse_envlp_idLAYOUT, tx_pulse_envlp_id$OFFSET, fieldValue); } private static final OfDouble tx_pulse_envlp_paramLAYOUT = (OfDouble)LAYOUT.select(groupElement("tx_pulse_envlp_param")); private static final long tx_pulse_envlp_param$OFFSET = 80; /** * Getter for field: * {@snippet lang=c : * double tx_pulse_envlp_param * } */ public double tx_pulse_envlp_param() { return struct.get(tx_pulse_envlp_paramLAYOUT, tx_pulse_envlp_param$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double tx_pulse_envlp_param * } */ public void tx_pulse_envlp_param(double fieldValue) { struct.set(tx_pulse_envlp_paramLAYOUT, tx_pulse_envlp_param$OFFSET, fieldValue); } private static final OfInt tx_pulse_reservedLAYOUT = (OfInt)LAYOUT.select(groupElement("tx_pulse_reserved")); private static final long tx_pulse_reserved$OFFSET = 88; /** * Getter for field: * {@snippet lang=c : * unsigned int tx_pulse_reserved * } */ public int tx_pulse_reserved() { return struct.get(tx_pulse_reservedLAYOUT, tx_pulse_reserved$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int tx_pulse_reserved * } */ public void tx_pulse_reserved(int fieldValue) { struct.set(tx_pulse_reservedLAYOUT, tx_pulse_reserved$OFFSET, fieldValue); } private static final OfDouble max_ping_rateLAYOUT = (OfDouble)LAYOUT.select(groupElement("max_ping_rate")); private static final long max_ping_rate$OFFSET = 96; /** * Getter for field: * {@snippet lang=c : * double max_ping_rate * } */ public double max_ping_rate() { return struct.get(max_ping_rateLAYOUT, max_ping_rate$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double max_ping_rate * } */ public void max_ping_rate(double fieldValue) { struct.set(max_ping_rateLAYOUT, max_ping_rate$OFFSET, fieldValue); } private static final OfDouble ping_periodLAYOUT = (OfDouble)LAYOUT.select(groupElement("ping_period")); private static final long ping_period$OFFSET = 104; /** * Getter for field: * {@snippet lang=c : * double ping_period * } */ public double ping_period() { return struct.get(ping_periodLAYOUT, ping_period$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double ping_period * } */ public void ping_period(double fieldValue) { struct.set(ping_periodLAYOUT, ping_period$OFFSET, fieldValue); } private static final OfDouble rangeLAYOUT = (OfDouble)LAYOUT.select(groupElement("range")); private static final long range$OFFSET = 112; /** * Getter for field: * {@snippet lang=c : * double range * } */ public double range() { return struct.get(rangeLAYOUT, range$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double range * } */ public void range(double fieldValue) { struct.set(rangeLAYOUT, range$OFFSET, fieldValue); } private static final OfDouble powerLAYOUT = (OfDouble)LAYOUT.select(groupElement("power")); private static final long power$OFFSET = 120; /** * Getter for field: * {@snippet lang=c : * double power * } */ public double power() { return struct.get(powerLAYOUT, power$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double power * } */ public void power(double fieldValue) { struct.set(powerLAYOUT, power$OFFSET, fieldValue); } private static final OfDouble gainLAYOUT = (OfDouble)LAYOUT.select(groupElement("gain")); private static final long gain$OFFSET = 128; /** * Getter for field: * {@snippet lang=c : * double gain * } */ public double gain() { return struct.get(gainLAYOUT, gain$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double gain * } */ public void gain(double fieldValue) { struct.set(gainLAYOUT, gain$OFFSET, fieldValue); } private static final OfInt control_flagsLAYOUT = (OfInt)LAYOUT.select(groupElement("control_flags")); private static final long control_flags$OFFSET = 136; /** * Getter for field: * {@snippet lang=c : * unsigned int control_flags * } */ public int control_flags() { return struct.get(control_flagsLAYOUT, control_flags$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int control_flags * } */ public void control_flags(int fieldValue) { struct.set(control_flagsLAYOUT, control_flags$OFFSET, fieldValue); } private static final OfInt projector_idLAYOUT = (OfInt)LAYOUT.select(groupElement("projector_id")); private static final long projector_id$OFFSET = 140; /** * Getter for field: * {@snippet lang=c : * unsigned int projector_id * } */ public int projector_id() { return struct.get(projector_idLAYOUT, projector_id$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int projector_id * } */ public void projector_id(int fieldValue) { struct.set(projector_idLAYOUT, projector_id$OFFSET, fieldValue); } private static final OfDouble projector_steer_angl_vertLAYOUT = (OfDouble)LAYOUT.select(groupElement("projector_steer_angl_vert")); private static final long projector_steer_angl_vert$OFFSET = 144; /** * Getter for field: * {@snippet lang=c : * double projector_steer_angl_vert * } */ public double projector_steer_angl_vert() { return struct.get(projector_steer_angl_vertLAYOUT, projector_steer_angl_vert$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double projector_steer_angl_vert * } */ public void projector_steer_angl_vert(double fieldValue) { struct.set(projector_steer_angl_vertLAYOUT, projector_steer_angl_vert$OFFSET, fieldValue); } private static final OfDouble projector_steer_angl_horzLAYOUT = (OfDouble)LAYOUT.select(groupElement("projector_steer_angl_horz")); private static final long projector_steer_angl_horz$OFFSET = 152; /** * Getter for field: * {@snippet lang=c : * double projector_steer_angl_horz * } */ public double projector_steer_angl_horz() { return struct.get(projector_steer_angl_horzLAYOUT, projector_steer_angl_horz$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double projector_steer_angl_horz * } */ public void projector_steer_angl_horz(double fieldValue) { struct.set(projector_steer_angl_horzLAYOUT, projector_steer_angl_horz$OFFSET, fieldValue); } private static final OfDouble projector_beam_wdth_vertLAYOUT = (OfDouble)LAYOUT.select(groupElement("projector_beam_wdth_vert")); private static final long projector_beam_wdth_vert$OFFSET = 160; /** * Getter for field: * {@snippet lang=c : * double projector_beam_wdth_vert * } */ public double projector_beam_wdth_vert() { return struct.get(projector_beam_wdth_vertLAYOUT, projector_beam_wdth_vert$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double projector_beam_wdth_vert * } */ public void projector_beam_wdth_vert(double fieldValue) { struct.set(projector_beam_wdth_vertLAYOUT, projector_beam_wdth_vert$OFFSET, fieldValue); } private static final OfDouble projector_beam_wdth_horzLAYOUT = (OfDouble)LAYOUT.select(groupElement("projector_beam_wdth_horz")); private static final long projector_beam_wdth_horz$OFFSET = 168; /** * Getter for field: * {@snippet lang=c : * double projector_beam_wdth_horz * } */ public double projector_beam_wdth_horz() { return struct.get(projector_beam_wdth_horzLAYOUT, projector_beam_wdth_horz$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double projector_beam_wdth_horz * } */ public void projector_beam_wdth_horz(double fieldValue) { struct.set(projector_beam_wdth_horzLAYOUT, projector_beam_wdth_horz$OFFSET, fieldValue); } private static final OfDouble projector_beam_focal_ptLAYOUT = (OfDouble)LAYOUT.select(groupElement("projector_beam_focal_pt")); private static final long projector_beam_focal_pt$OFFSET = 176; /** * Getter for field: * {@snippet lang=c : * double projector_beam_focal_pt * } */ public double projector_beam_focal_pt() { return struct.get(projector_beam_focal_ptLAYOUT, projector_beam_focal_pt$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double projector_beam_focal_pt * } */ public void projector_beam_focal_pt(double fieldValue) { struct.set(projector_beam_focal_ptLAYOUT, projector_beam_focal_pt$OFFSET, fieldValue); } private static final OfInt projector_beam_weighting_window_typeLAYOUT = (OfInt)LAYOUT.select(groupElement("projector_beam_weighting_window_type")); private static final long projector_beam_weighting_window_type$OFFSET = 184; /** * Getter for field: * {@snippet lang=c : * unsigned int projector_beam_weighting_window_type * } */ public int projector_beam_weighting_window_type() { return struct.get(projector_beam_weighting_window_typeLAYOUT, projector_beam_weighting_window_type$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int projector_beam_weighting_window_type * } */ public void projector_beam_weighting_window_type(int fieldValue) { struct.set(projector_beam_weighting_window_typeLAYOUT, projector_beam_weighting_window_type$OFFSET, fieldValue); } private static final OfInt projector_beam_weighting_window_paramLAYOUT = (OfInt)LAYOUT.select(groupElement("projector_beam_weighting_window_param")); private static final long projector_beam_weighting_window_param$OFFSET = 188; /** * Getter for field: * {@snippet lang=c : * unsigned int projector_beam_weighting_window_param * } */ public int projector_beam_weighting_window_param() { return struct.get(projector_beam_weighting_window_paramLAYOUT, projector_beam_weighting_window_param$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int projector_beam_weighting_window_param * } */ public void projector_beam_weighting_window_param(int fieldValue) { struct.set(projector_beam_weighting_window_paramLAYOUT, projector_beam_weighting_window_param$OFFSET, fieldValue); } private static final OfInt transmit_flagsLAYOUT = (OfInt)LAYOUT.select(groupElement("transmit_flags")); private static final long transmit_flags$OFFSET = 192; /** * Getter for field: * {@snippet lang=c : * unsigned int transmit_flags * } */ public int transmit_flags() { return struct.get(transmit_flagsLAYOUT, transmit_flags$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int transmit_flags * } */ public void transmit_flags(int fieldValue) { struct.set(transmit_flagsLAYOUT, transmit_flags$OFFSET, fieldValue); } private static final OfInt hydrophone_idLAYOUT = (OfInt)LAYOUT.select(groupElement("hydrophone_id")); private static final long hydrophone_id$OFFSET = 196; /** * Getter for field: * {@snippet lang=c : * unsigned int hydrophone_id * } */ public int hydrophone_id() { return struct.get(hydrophone_idLAYOUT, hydrophone_id$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int hydrophone_id * } */ public void hydrophone_id(int fieldValue) { struct.set(hydrophone_idLAYOUT, hydrophone_id$OFFSET, fieldValue); } private static final OfInt receiving_beam_weighting_window_typeLAYOUT = (OfInt)LAYOUT.select(groupElement("receiving_beam_weighting_window_type")); private static final long receiving_beam_weighting_window_type$OFFSET = 200; /** * Getter for field: * {@snippet lang=c : * unsigned int receiving_beam_weighting_window_type * } */ public int receiving_beam_weighting_window_type() { return struct.get(receiving_beam_weighting_window_typeLAYOUT, receiving_beam_weighting_window_type$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int receiving_beam_weighting_window_type * } */ public void receiving_beam_weighting_window_type(int fieldValue) { struct.set(receiving_beam_weighting_window_typeLAYOUT, receiving_beam_weighting_window_type$OFFSET, fieldValue); } private static final OfInt receiving_beam_weighting_window_paramLAYOUT = (OfInt)LAYOUT.select(groupElement("receiving_beam_weighting_window_param")); private static final long receiving_beam_weighting_window_param$OFFSET = 204; /** * Getter for field: * {@snippet lang=c : * unsigned int receiving_beam_weighting_window_param * } */ public int receiving_beam_weighting_window_param() { return struct.get(receiving_beam_weighting_window_paramLAYOUT, receiving_beam_weighting_window_param$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int receiving_beam_weighting_window_param * } */ public void receiving_beam_weighting_window_param(int fieldValue) { struct.set(receiving_beam_weighting_window_paramLAYOUT, receiving_beam_weighting_window_param$OFFSET, fieldValue); } private static final OfInt receive_flagsLAYOUT = (OfInt)LAYOUT.select(groupElement("receive_flags")); private static final long receive_flags$OFFSET = 208; /** * Getter for field: * {@snippet lang=c : * unsigned int receive_flags * } */ public int receive_flags() { return struct.get(receive_flagsLAYOUT, receive_flags$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned int receive_flags * } */ public void receive_flags(int fieldValue) { struct.set(receive_flagsLAYOUT, receive_flags$OFFSET, fieldValue); } private static final OfDouble receive_beam_widthLAYOUT = (OfDouble)LAYOUT.select(groupElement("receive_beam_width")); private static final long receive_beam_width$OFFSET = 216; /** * Getter for field: * {@snippet lang=c : * double receive_beam_width * } */ public double receive_beam_width() { return struct.get(receive_beam_widthLAYOUT, receive_beam_width$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double receive_beam_width * } */ public void receive_beam_width(double fieldValue) { struct.set(receive_beam_widthLAYOUT, receive_beam_width$OFFSET, fieldValue); } private static final OfDouble range_filt_minLAYOUT = (OfDouble)LAYOUT.select(groupElement("range_filt_min")); private static final long range_filt_min$OFFSET = 224; /** * Getter for field: * {@snippet lang=c : * double range_filt_min * } */ public double range_filt_min() { return struct.get(range_filt_minLAYOUT, range_filt_min$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double range_filt_min * } */ public void range_filt_min(double fieldValue) { struct.set(range_filt_minLAYOUT, range_filt_min$OFFSET, fieldValue); } private static final OfDouble range_filt_maxLAYOUT = (OfDouble)LAYOUT.select(groupElement("range_filt_max")); private static final long range_filt_max$OFFSET = 232; /** * Getter for field: * {@snippet lang=c : * double range_filt_max * } */ public double range_filt_max() { return struct.get(range_filt_maxLAYOUT, range_filt_max$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double range_filt_max * } */ public void range_filt_max(double fieldValue) { struct.set(range_filt_maxLAYOUT, range_filt_max$OFFSET, fieldValue); } private static final OfDouble depth_filt_minLAYOUT = (OfDouble)LAYOUT.select(groupElement("depth_filt_min")); private static final long depth_filt_min$OFFSET = 240; /** * Getter for field: * {@snippet lang=c : * double depth_filt_min * } */ public double depth_filt_min() { return struct.get(depth_filt_minLAYOUT, depth_filt_min$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double depth_filt_min * } */ public void depth_filt_min(double fieldValue) { struct.set(depth_filt_minLAYOUT, depth_filt_min$OFFSET, fieldValue); } private static final OfDouble depth_filt_maxLAYOUT = (OfDouble)LAYOUT.select(groupElement("depth_filt_max")); private static final long depth_filt_max$OFFSET = 248; /** * Getter for field: * {@snippet lang=c : * double depth_filt_max * } */ public double depth_filt_max() { return struct.get(depth_filt_maxLAYOUT, depth_filt_max$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double depth_filt_max * } */ public void depth_filt_max(double fieldValue) { struct.set(depth_filt_maxLAYOUT, depth_filt_max$OFFSET, fieldValue); } private static final OfDouble absorptionLAYOUT = (OfDouble)LAYOUT.select(groupElement("absorption")); private static final long absorption$OFFSET = 256; /** * Getter for field: * {@snippet lang=c : * double absorption * } */ public double absorption() { return struct.get(absorptionLAYOUT, absorption$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double absorption * } */ public void absorption(double fieldValue) { struct.set(absorptionLAYOUT, absorption$OFFSET, fieldValue); } private static final OfDouble sound_velocityLAYOUT = (OfDouble)LAYOUT.select(groupElement("sound_velocity")); private static final long sound_velocity$OFFSET = 264; /** * Getter for field: * {@snippet lang=c : * double sound_velocity * } */ public double sound_velocity() { return struct.get(sound_velocityLAYOUT, sound_velocity$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double sound_velocity * } */ public void sound_velocity(double fieldValue) { struct.set(sound_velocityLAYOUT, sound_velocity$OFFSET, fieldValue); } private static final OfDouble spreadingLAYOUT = (OfDouble)LAYOUT.select(groupElement("spreading")); private static final long spreading$OFFSET = 272; /** * Getter for field: * {@snippet lang=c : * double spreading * } */ public double spreading() { return struct.get(spreadingLAYOUT, spreading$OFFSET); } /** * Setter for field: * {@snippet lang=c : * double spreading * } */ public void spreading(double fieldValue) { struct.set(spreadingLAYOUT, spreading$OFFSET, fieldValue); } private static final OfByte raw_data_from_7027LAYOUT = (OfByte)LAYOUT.select(groupElement("raw_data_from_7027")); private static final long raw_data_from_7027$OFFSET = 280; /** * Getter for field: * {@snippet lang=c : * unsigned char raw_data_from_7027 * } */ public byte raw_data_from_7027() { return struct.get(raw_data_from_7027LAYOUT, raw_data_from_7027$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned char raw_data_from_7027 * } */ public void raw_data_from_7027(byte fieldValue) { struct.set(raw_data_from_7027LAYOUT, raw_data_from_7027$OFFSET, fieldValue); } private static final SequenceLayout reserved_2LAYOUT = (SequenceLayout)LAYOUT.select(groupElement("reserved_2")); private static final long reserved_2$OFFSET = 281; /** * Getter for field: * {@snippet lang=c : * char reserved_2[15] * } */ public MemorySegment reserved_2() { return struct.asSlice(reserved_2$OFFSET, reserved_2LAYOUT.byteSize()); } /** * Setter for field: * {@snippet lang=c : * char reserved_2[15] * } */ public void reserved_2(MemorySegment fieldValue) { MemorySegment.copy(fieldValue, 0L, struct, reserved_2$OFFSET, reserved_2LAYOUT.byteSize()); } private static final VarHandle reserved_2$ELEM_HANDLE = reserved_2LAYOUT.varHandle(sequenceElement()); /** * Indexed getter for field: * {@snippet lang=c : * char reserved_2[15] * } */ public byte reserved_2(long index0) { return (byte)reserved_2$ELEM_HANDLE.get(struct, 0L, index0); } /** * Indexed setter for field: * {@snippet lang=c : * char reserved_2[15] * } */ public void reserved_2(long index0, byte fieldValue) { reserved_2$ELEM_HANDLE.set(struct, 0L, index0, fieldValue); } private static final OfByte sv_sourceLAYOUT = (OfByte)LAYOUT.select(groupElement("sv_source")); private static final long sv_source$OFFSET = 296; /** * Getter for field: * {@snippet lang=c : * unsigned char sv_source * } */ public byte sv_source() { return struct.get(sv_sourceLAYOUT, sv_source$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned char sv_source * } */ public void sv_source(byte fieldValue) { struct.set(sv_sourceLAYOUT, sv_source$OFFSET, fieldValue); } private static final OfByte layer_comp_flagLAYOUT = (OfByte)LAYOUT.select(groupElement("layer_comp_flag")); private static final long layer_comp_flag$OFFSET = 297; /** * Getter for field: * {@snippet lang=c : * unsigned char layer_comp_flag * } */ public byte layer_comp_flag() { return struct.get(layer_comp_flagLAYOUT, layer_comp_flag$OFFSET); } /** * Setter for field: * {@snippet lang=c : * unsigned char layer_comp_flag * } */ public void layer_comp_flag(byte fieldValue) { struct.set(layer_comp_flagLAYOUT, layer_comp_flag$OFFSET, fieldValue); } private static final SequenceLayout reserved_3LAYOUT = (SequenceLayout)LAYOUT.select(groupElement("reserved_3")); private static final long reserved_3$OFFSET = 298; /** * Getter for field: * {@snippet lang=c : * char reserved_3[8] * } */ public MemorySegment reserved_3() { return struct.asSlice(reserved_3$OFFSET, reserved_3LAYOUT.byteSize()); } /** * Setter for field: * {@snippet lang=c : * char reserved_3[8] * } */ public void reserved_3(MemorySegment fieldValue) { MemorySegment.copy(fieldValue, 0L, struct, reserved_3$OFFSET, reserved_3LAYOUT.byteSize()); } private static final VarHandle reserved_3$ELEM_HANDLE = reserved_3LAYOUT.varHandle(sequenceElement()); /** * Indexed getter for field: * {@snippet lang=c : * char reserved_3[8] * } */ public byte reserved_3(long index0) { return (byte)reserved_3$ELEM_HANDLE.get(struct, 0L, index0); } /** * Indexed setter for field: * {@snippet lang=c : * char reserved_3[8] * } */ public void reserved_3(long index0, byte fieldValue) { reserved_3$ELEM_HANDLE.set(struct, 0L, index0, fieldValue); } }
apache/groovy
37,779
src/main/java/org/codehaus/groovy/classgen/asm/sc/StaticInvocationWriter.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.codehaus.groovy.classgen.asm.sc; import org.codehaus.groovy.ast.ClassHelper; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.ConstructorNode; import org.codehaus.groovy.ast.EnumConstantClassNode; import org.codehaus.groovy.ast.FieldNode; import org.codehaus.groovy.ast.GroovyCodeVisitor; import org.codehaus.groovy.ast.InnerClassNode; import org.codehaus.groovy.ast.MethodNode; import org.codehaus.groovy.ast.Parameter; import org.codehaus.groovy.ast.decompiled.DecompiledClassNode; import org.codehaus.groovy.ast.expr.ArgumentListExpression; import org.codehaus.groovy.ast.expr.ArrayExpression; import org.codehaus.groovy.ast.expr.AttributeExpression; import org.codehaus.groovy.ast.expr.ClassExpression; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.ConstructorCallExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.ast.expr.ExpressionTransformer; import org.codehaus.groovy.ast.expr.MethodCallExpression; import org.codehaus.groovy.ast.expr.PropertyExpression; import org.codehaus.groovy.ast.expr.SpreadExpression; import org.codehaus.groovy.ast.expr.TupleExpression; import org.codehaus.groovy.ast.expr.VariableExpression; import org.codehaus.groovy.ast.stmt.ForStatement; import org.codehaus.groovy.classgen.AsmClassGenerator; import org.codehaus.groovy.classgen.Verifier; import org.codehaus.groovy.classgen.asm.BytecodeHelper; import org.codehaus.groovy.classgen.asm.CallSiteWriter; import org.codehaus.groovy.classgen.asm.CompileStack; import org.codehaus.groovy.classgen.asm.ExpressionAsVariableSlot; import org.codehaus.groovy.classgen.asm.InvocationWriter; import org.codehaus.groovy.classgen.asm.MethodCallerMultiAdapter; import org.codehaus.groovy.classgen.asm.OperandStack; import org.codehaus.groovy.classgen.asm.TypeChooser; import org.codehaus.groovy.classgen.asm.VariableSlotLoader; import org.codehaus.groovy.classgen.asm.WriterController; import org.codehaus.groovy.syntax.SyntaxException; import org.codehaus.groovy.transform.sc.StaticCompilationMetadataKeys; import org.codehaus.groovy.transform.sc.StaticCompilationVisitor; import org.codehaus.groovy.transform.sc.TemporaryVariableExpression; import org.codehaus.groovy.transform.stc.ExtensionMethodNode; import org.codehaus.groovy.transform.stc.StaticTypeCheckingSupport; import org.codehaus.groovy.transform.stc.StaticTypeCheckingVisitor; import org.codehaus.groovy.transform.stc.StaticTypesMarker; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static org.apache.groovy.ast.tools.ClassNodeUtils.formatTypeName; import static org.apache.groovy.ast.tools.ClassNodeUtils.isSubtype; import static org.apache.groovy.ast.tools.ExpressionUtils.isNullConstant; import static org.apache.groovy.ast.tools.ExpressionUtils.isSuperExpression; import static org.apache.groovy.ast.tools.ExpressionUtils.isThisExpression; import static org.apache.groovy.ast.tools.ExpressionUtils.isThisOrSuper; import static org.codehaus.groovy.ast.tools.GeneralUtils.args; import static org.codehaus.groovy.ast.tools.GeneralUtils.attrX; import static org.codehaus.groovy.ast.tools.GeneralUtils.callX; import static org.codehaus.groovy.ast.tools.GeneralUtils.classX; import static org.codehaus.groovy.ast.tools.GeneralUtils.ctorX; import static org.codehaus.groovy.ast.tools.GeneralUtils.inSamePackage; import static org.codehaus.groovy.ast.tools.GeneralUtils.nullX; import static org.codehaus.groovy.ast.tools.GeneralUtils.propX; import static org.codehaus.groovy.ast.tools.GeneralUtils.stmt; import static org.codehaus.groovy.ast.tools.GeneralUtils.varX; import static org.codehaus.groovy.ast.tools.GenericsUtils.makeClassSafe0; import static org.codehaus.groovy.transform.trait.Traits.isTrait; import static org.objectweb.asm.Opcodes.ACONST_NULL; import static org.objectweb.asm.Opcodes.ALOAD; import static org.objectweb.asm.Opcodes.CHECKCAST; import static org.objectweb.asm.Opcodes.GOTO; import static org.objectweb.asm.Opcodes.IFNONNULL; import static org.objectweb.asm.Opcodes.IFNULL; import static org.objectweb.asm.Opcodes.INVOKESTATIC; public class StaticInvocationWriter extends InvocationWriter { private final AtomicInteger labelCounter = new AtomicInteger(); public StaticInvocationWriter(final WriterController wc) { super(wc); } Expression getCurrentCall() { return currentCall; } @Override public void writeInvokeConstructor(final ConstructorCallExpression call) { MethodNode mn = call.getNodeMetaData(StaticTypesMarker.DIRECT_METHOD_CALL_TARGET); if (mn == null) { super.writeInvokeConstructor(call); return; } if (writeAICCall(call)) return; ConstructorNode cn; if (mn instanceof ConstructorNode) { cn = (ConstructorNode) mn; } else { cn = new ConstructorNode(mn.getModifiers(), mn.getParameters(), mn.getExceptions(), mn.getCode()); cn.setDeclaringClass(mn.getDeclaringClass()); } ClassNode declaringClass = cn.getDeclaringClass(); ClassNode enclosingClass = controller.getClassNode(); List<Expression> argList = call.getArguments() instanceof TupleExpression ? ((TupleExpression) call.getArguments()).getExpressions() : List.of(call.getArguments()); if (!AsmClassGenerator.isMemberDirectlyAccessible(cn.getModifiers(), declaringClass, enclosingClass)) { MethodNode bridge = null; if (call.getNodeMetaData(StaticTypesMarker.PV_METHODS_ACCESS) != null) { Map<MethodNode, MethodNode> bridgeMethods = declaringClass.getNodeMetaData(StaticCompilationMetadataKeys.PRIVATE_BRIDGE_METHODS); if (bridgeMethods != null) bridge = bridgeMethods.get(cn); } if (bridge instanceof ConstructorNode) { cn = (ConstructorNode) bridge; argList = new ArrayList<>(argList); argList.add(0, nullX()); } else { controller.getSourceUnit().addError(new SyntaxException( "Cannot call private constructor for " + declaringClass.toString(false) + " from class " + enclosingClass.toString(false), call )); } } String ownerDescriptor = prepareConstructorCall(cn); int before = controller.getOperandStack().getStackLength(); loadArguments(argList, cn.getParameters()); finnishConstructorCall(cn, ownerDescriptor, controller.getOperandStack().getStackLength() - before); } @Override public void writeSpecialConstructorCall(final ConstructorCallExpression call) { MethodNode mn = call.getNodeMetaData(StaticTypesMarker.DIRECT_METHOD_CALL_TARGET); if (mn == null) { super.writeSpecialConstructorCall(call); return; } controller.getCompileStack().pushInSpecialConstructorCall(); ConstructorNode cn; if (mn instanceof ConstructorNode) { cn = (ConstructorNode) mn; } else { cn = new ConstructorNode(mn.getModifiers(), mn.getParameters(), mn.getExceptions(), mn.getCode()); cn.setDeclaringClass(mn.getDeclaringClass()); } // load "this" controller.getMethodVisitor().visitVarInsn(ALOAD, 0); String ownerDescriptor = BytecodeHelper.getClassInternalName(cn.getDeclaringClass()); TupleExpression args = makeArgumentList(call.getArguments()); int before = controller.getOperandStack().getStackLength(); loadArguments(args.getExpressions(), cn.getParameters()); finnishConstructorCall(cn, ownerDescriptor, controller.getOperandStack().getStackLength() - before); // on a special call, there's no object on stack controller.getOperandStack().remove(1); controller.getCompileStack().pop(); } private Expression thisObjectExpression(final ClassNode source, final ClassNode target) { ClassNode thisType = source; while (ClassHelper.isGeneratedFunction(thisType)) { thisType = thisType.getOuterClass(); } Expression thisExpr; if (isTrait(thisType.getOuterClass())) { thisExpr = varX("thisObject"); // GROOVY-7242, GROOVY-8127 } else if (controller.isStaticContext()) { thisExpr = varX("thisObject", makeClassSafe0(ClassHelper.CLASS_Type, thisType.asGenericsType())); } else { thisExpr = varX("thisObject", thisType); // adjust for multiple levels of nesting while (!isSubtype(target, thisType)) { FieldNode thisZero = thisType.getDeclaredField("this$0"); if (thisZero != null) { thisExpr = attrX(thisExpr, "this$0"); thisExpr.setType(thisZero.getType()); thisType = thisType.getOuterClass(); if (thisType != null) continue; } break; } } return thisExpr; } /** * Attempts to make a direct method call on a bridge method, if it exists. */ @Deprecated protected boolean tryBridgeMethod(final MethodNode target, final Expression receiver, final boolean implicitThis, final TupleExpression args) { return tryBridgeMethod(target, receiver, implicitThis, args, null); } /** * Attempts to make a direct method call on a bridge method, if it exists. */ protected boolean tryBridgeMethod(final MethodNode target, final Expression receiver, final boolean implicitThis, final TupleExpression args, final ClassNode thisClass) { ClassNode lookupClassNode; if (target.isProtected()) { lookupClassNode = controller.getClassNode(); while (lookupClassNode != null && !isSubtype(target.getDeclaringClass(), lookupClassNode)) { lookupClassNode = lookupClassNode.getOuterClass(); } if (lookupClassNode == null) { return false; } } else { lookupClassNode = target.getDeclaringClass().redirect(); } Map<MethodNode, MethodNode> bridges = lookupClassNode.getNodeMetaData(StaticCompilationMetadataKeys.PRIVATE_BRIDGE_METHODS); MethodNode bridge = (bridges == null ? null : bridges.get(target)); if (bridge != null) { Expression newReceiver = receiver; if (implicitThis) { if (!controller.isInGeneratedFunction()) { if (!isSubtype(lookupClassNode, thisClass)) newReceiver = propX(classX(lookupClassNode), "this"); } else if (thisClass != null) { newReceiver = thisObjectExpression(thisClass, lookupClassNode); } } ArgumentListExpression newArguments = args(target.isStatic() ? nullX() : newReceiver); for (Expression expression : args.getExpressions()) { newArguments.addExpression(expression); } return writeDirectMethodCall(bridge, implicitThis, newReceiver, newArguments); } return false; } @Override protected boolean writeDirectMethodCall(final MethodNode target, final boolean implicitThis, final Expression receiver, final TupleExpression args) { if (target == null) return false; ClassNode enclosingClass = controller.getClassNode(); ClassNode declaringClass = target.getDeclaringClass(); if (target instanceof ExtensionMethodNode) { ExtensionMethodNode emn = (ExtensionMethodNode) target; MethodVisitor mv = controller.getMethodVisitor(); MethodNode mn = emn.getExtensionMethodNode(); Parameter[] parameters = mn.getParameters(); ClassNode returnType = mn.getReturnType(); List<Expression> argumentList = new ArrayList<>(); if (emn.isStaticExtension()) { argumentList.add(nullX()); } else if (!isThisOrSuper(receiver) || !controller.isInGeneratedFunction()) { argumentList.add(receiver); } else { argumentList.add(thisObjectExpression(enclosingClass, declaringClass)); } argumentList.addAll(args.getExpressions()); loadArguments(argumentList, parameters); String owner = BytecodeHelper.getClassInternalName(mn.getDeclaringClass()); String desc = BytecodeHelper.getMethodDescriptor(returnType, parameters); mv.visitMethodInsn(INVOKESTATIC, owner, target.getName(), desc, false); controller.getOperandStack().remove(argumentList.size()); if (ClassHelper.isPrimitiveVoid(returnType)) { if (currentCall != null && currentCall.getNodeMetaData(AsmClassGenerator.ELIDE_EXPRESSION_VALUE) != null) { return true; // do not load value } returnType = ClassHelper.OBJECT_TYPE; mv.visitInsn(ACONST_NULL); } controller.getOperandStack().push(returnType); return true; } if (target == StaticTypeCheckingVisitor.CLOSURE_CALL_VARGS) { // wrap arguments in an array Expression array = new ArrayExpression(ClassHelper.OBJECT_TYPE, args.getExpressions()); return super.writeDirectMethodCall(target, implicitThis, receiver, args(array)); } ClassNode receiverType = receiver == null ? ClassHelper.OBJECT_TYPE : controller.getTypeChooser().resolveType(receiver, controller.getThisType()); if (StaticTypeCheckingSupport.isClassClassNodeWrappingConcreteType(receiverType) && !ClassHelper.isClassType(declaringClass)) { receiverType = receiverType.getGenericsTypes()[0].getType(); // GROOVY-11694 } if (AsmClassGenerator.isMemberDirectlyAccessible(target.getModifiers(), declaringClass, enclosingClass) && !(target.isProtected() && !inSamePackage(declaringClass, enclosingClass) && !isSubtype(receiverType, enclosingClass))) { // GROOVY-7325 boolean isThisImplicit = implicitThis; Expression theReceiver = receiver; if (implicitThis && isThisExpression(receiver) && !isSubtype(declaringClass, enclosingClass)) { if (target.isStatic()) { theReceiver = classX(declaringClass); } else if (!controller.isInGeneratedFunction()) { theReceiver = propX(classX(declaringClass), "this"); } else { theReceiver = thisObjectExpression(enclosingClass, declaringClass); } if (!(theReceiver instanceof VariableExpression)) isThisImplicit = false; theReceiver.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, declaringClass); } if (theReceiver != null && !isSuperExpression(receiver) && !isClassWithSuper(receiver) && currentCall.getNodeMetaData(StaticTypesMarker.IMPLICIT_RECEIVER) == null) { // call CHECKCAST in order to avoid calls to castToType (aka dynamic behaviour) theReceiver = new CheckcastReceiverExpression(theReceiver, target); } return super.writeDirectMethodCall(target, isThisImplicit, theReceiver, args); } else if (tryBridgeMethod(target, receiver, implicitThis, args, enclosingClass)) { return true; } else { writeMethodAccessError(target, receiver != null ? receiver : args); return false; } } private void writeMethodAccessError(final MethodNode target, final Expression origin) { var descriptor = new java.util.StringJoiner(", ", target.getName() + "(", ")"); for (Parameter parameter : target.getParameters()) { descriptor.add(formatTypeName(parameter.getOriginType())); } String message = "Cannot access method: " + descriptor + " of class: " + formatTypeName(target.getDeclaringClass()); controller.getSourceUnit().addError(new SyntaxException(message, origin)); } private boolean isClassWithSuper(Expression receiver) { if (receiver instanceof PropertyExpression) { PropertyExpression pexp = (PropertyExpression) receiver; return pexp.getObjectExpression() instanceof ClassExpression && "super".equals(pexp.getPropertyAsString()); } return false; } protected static boolean isPrivateBridgeMethodsCallAllowed(final ClassNode receiver, final ClassNode caller) { if (receiver == null) return false; if (receiver.redirect() == caller) return true; if (isPrivateBridgeMethodsCallAllowed(receiver.getOuterClass(), caller)) return true; if (caller.getOuterClass() != null && isPrivateBridgeMethodsCallAllowed(receiver, caller.getOuterClass())) return true; return false; } @Override protected void loadArguments(final List<Expression> argumentList, final Parameter[] parameters) { final int nArgs = argumentList.size(), nPrms = parameters.length; if (nPrms == 0) return; ClassNode classNode = controller.getClassNode(); TypeChooser typeChooser = controller.getTypeChooser(); ClassNode lastArgType = nArgs == 0 ? null : typeChooser.resolveType(argumentList.get(nArgs - 1), classNode); ClassNode lastPrmType = parameters[nPrms - 1].getType(); // target is variadic and args are too many or one short or just enough with array compatibility if (lastPrmType.isArray() && (nArgs > nPrms || nArgs == nPrms - 1 || (nArgs == nPrms && !lastArgType.isArray() && (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(lastArgType, lastPrmType.getComponentType()) || ClassHelper.isGStringType(lastArgType) && ClassHelper.isStringType(lastPrmType.getComponentType()))) )) { OperandStack operandStack = controller.getOperandStack(); // first arguments/parameters as usual for (int i = 0; i < nPrms - 1; i += 1) { visitArgument(argumentList.get(i), parameters[i].getType()); } // wrap remaining arguments in an array for last parameter boolean spread = false; List<Expression> lastArgs = new ArrayList<>(); for (int i = nPrms - 1; i < nArgs; i += 1) { Expression arg = argumentList.get(i); lastArgs.add(arg); spread = spread || arg instanceof SpreadExpression; } if (spread) { // GROOVY-10597 controller.getAcg().despreadList(lastArgs, true); operandStack.push(ClassHelper.OBJECT_TYPE.makeArray()); controller.getInvocationWriter().coerce(operandStack.getTopOperand(), lastPrmType); } else { controller.getAcg().visitArrayExpression(new ArrayExpression(lastPrmType.getComponentType(), lastArgs)); } // adjust operand stack if (nArgs == nPrms - 1) { operandStack.remove(1); } else { for (int n = lastArgs.size(); n > 1; n -= 1) operandStack.push(ClassHelper.OBJECT_TYPE); } } else if (nArgs == nPrms) { for (int i = 0; i < nArgs; i += 1) { visitArgument(argumentList.get(i), parameters[i].getType()); } } else { // call with default arguments Expression[] arguments = new Expression[nPrms]; for (int i = 0, j = 0; i < nPrms; i += 1) { Parameter p = parameters[i]; ClassNode pType = p.getType(); Expression a = (j < nArgs ? argumentList.get(j) : null); ClassNode aType = (a == null ? null : typeChooser.resolveType(a, classNode)); Expression expression = getInitialExpression(p); // default argument if (expression != null && !isCompatibleArgumentType(aType, pType)) { arguments[i] = expression; } else if (a != null) { arguments[i] = a; j += 1; } else { String errorMessage = "Binding failed for arguments [" + argumentList.stream().map(arg -> typeChooser.resolveType(arg, classNode).toString(false)).collect(Collectors.joining(", ")) + "] and parameters [" + Arrays.stream(parameters).map(prm -> prm.getType().toString(false)).collect(Collectors.joining(", ")) + "]"; controller.getSourceUnit().addFatalError(errorMessage, currentCall); } } for (int i = 0; i < nArgs; i += 1) { visitArgument(arguments[i], parameters[i].getType()); } } } private static Expression getInitialExpression(final Parameter parameter) { Expression initialExpression = parameter.getNodeMetaData(StaticTypesMarker.INITIAL_EXPRESSION); if (initialExpression == null && parameter.hasInitialExpression()) { initialExpression = parameter.getInitialExpression(); } if (initialExpression == null && parameter.getNodeMetaData(Verifier.INITIAL_EXPRESSION) != null) { initialExpression = parameter.getNodeMetaData(Verifier.INITIAL_EXPRESSION); } return initialExpression; } private static boolean isCompatibleArgumentType(final ClassNode argumentType, final ClassNode parameterType) { if (argumentType == null) return false; if (ClassHelper.getWrapper(argumentType).equals(ClassHelper.getWrapper(parameterType))) return true; if (parameterType.isInterface()) return argumentType.implementsInterface(parameterType); if (parameterType.isArray() && argumentType.isArray()) return isCompatibleArgumentType(argumentType.getComponentType(), parameterType.getComponentType()); return ClassHelper.getWrapper(argumentType).isDerivedFrom(ClassHelper.getWrapper(parameterType)); } private void visitArgument(final Expression argument, final ClassNode parameterType) { argument.visit(controller.getAcg()); if (!isNullConstant(argument)) { controller.getOperandStack().doGroovyCast(parameterType); } } @Override public void makeCall(final Expression origin, final Expression receiver, final Expression message, final Expression arguments, final MethodCallerMultiAdapter adapter, final boolean safe, final boolean spreadSafe, final boolean implicitThis) { if (origin.getNodeMetaData(StaticTypesMarker.DYNAMIC_RESOLUTION) != null) { StaticTypesWriterController staticController = (StaticTypesWriterController) controller; if (origin instanceof MethodCallExpression) { ((MethodCallExpression) origin).setMethodTarget(null); } InvocationWriter dynamicInvocationWriter = staticController.getRegularInvocationWriter(); dynamicInvocationWriter.makeCall(origin, receiver, message, arguments, adapter, safe, spreadSafe, implicitThis); return; } if (implicitThis && tryImplicitReceiver(origin, message, arguments, adapter, safe, spreadSafe)) { return; } // if call is spread safe, replace it with a for in loop if (spreadSafe && (origin instanceof MethodCallExpression || (origin instanceof PropertyExpression && !controller.getCompileStack().isLHS()))) { // receiver expressions with side-effects should not be re-visited; avoid by using a temporary variable Expression tmpReceiver = receiver; if (!(receiver instanceof VariableExpression || receiver instanceof ConstantExpression)) { tmpReceiver = new TemporaryVariableExpression(receiver); } Label nonNull = new Label(); Label allDone = new Label(); MethodVisitor mv = controller.getMethodVisitor(); OperandStack operandStack = controller.getOperandStack(); boolean produceResultList = origin.getNodeMetaData(AsmClassGenerator.ELIDE_EXPRESSION_VALUE) == null; // if (receiver == null) tmpReceiver.visit(controller.getAcg()); mv.visitJumpInsn(IFNONNULL, nonNull); operandStack.remove(1); // result is null if (produceResultList) mv.visitInsn(ACONST_NULL); mv.visitJumpInsn(GOTO, allDone); // else mv.visitLabel(nonNull); ClassNode resultType = origin.getNodeMetaData(StaticTypesMarker.INFERRED_TYPE); ClassNode valuesType = origin.getNodeMetaData(StaticCompilationMetadataKeys.COMPONENT_TYPE); if (valuesType == null) valuesType = StaticTypeCheckingVisitor.inferLoopElementType(resultType); // def result = new ArrayList() ConstructorCallExpression cce = ctorX(StaticCompilationVisitor.ARRAYLIST_CLASSNODE); cce.putNodeMetaData(StaticTypesMarker.DIRECT_METHOD_CALL_TARGET, StaticCompilationVisitor.ARRAYLIST_CONSTRUCTOR); var result = new TemporaryVariableExpression(cce); if (produceResultList) result.visit(controller.getAcg()); ClassNode elementType = StaticTypeCheckingVisitor.inferLoopElementType(controller.getTypeChooser().resolveType(receiver, controller.getClassNode())); Parameter element = new Parameter(elementType, "for$it$" + labelCounter.incrementAndGet()); Expression nextValue; if (origin instanceof MethodCallExpression) { MethodCallExpression oldMCE = (MethodCallExpression) origin; MethodCallExpression newMCE = callX( varX(element), oldMCE.getMethod(), oldMCE.getArguments() ); newMCE.setGenericsTypes(oldMCE.getGenericsTypes()); newMCE.setImplicitThis(false); MethodNode target = oldMCE.getMethodTarget(); newMCE.setMethodTarget(target); if (target == null || !target.isVoidMethod()) newMCE.setNodeMetaData(StaticTypesMarker.INFERRED_TYPE, valuesType); newMCE.setSafe(true); nextValue = newMCE; } else { PropertyExpression oldPE = (PropertyExpression) origin; PropertyExpression newPE = origin instanceof AttributeExpression ? new AttributeExpression(varX(element), oldPE.getProperty(), true) : new PropertyExpression(varX(element), oldPE.getProperty(), true); newPE.setImplicitThis(false); newPE.setNodeMetaData(StaticTypesMarker.INFERRED_TYPE, valuesType); nextValue = newPE; } MethodCallExpression addNextValue = callX(result, "add", /*castX(valuesType, */nextValue/*)*/); addNextValue.setImplicitThis(false); addNextValue.setMethodTarget(StaticCompilationVisitor.ARRAYLIST_ADD_METHOD); // for (element in receiver) result.add(element?.method(arguments)); var stmt = new ForStatement( element, tmpReceiver, stmt(produceResultList ? addNextValue : nextValue) ); stmt.visit(controller.getAcg()); if (produceResultList) { result.remove(controller); } // end of if/else mv.visitLabel(allDone); if (tmpReceiver instanceof TemporaryVariableExpression) { ((TemporaryVariableExpression) tmpReceiver).remove(controller); } } else if (safe && origin instanceof MethodCallExpression) { CompileStack compileStack = controller.getCompileStack(); OperandStack operandStack = controller.getOperandStack(); MethodVisitor mv = controller.getMethodVisitor(); int counter = labelCounter.incrementAndGet(); // (receiver != null) ? receiver.name(args) : null Label ifnull = compileStack.createLocalLabel("ifnull_" + counter); Label nonull = compileStack.createLocalLabel("nonull_" + counter); Label theEnd = compileStack.createLocalLabel("ending_" + counter); var slot = new ExpressionAsVariableSlot(controller, receiver); slot.visit(controller.getAcg()); operandStack.box(); mv.visitJumpInsn(IFNULL, ifnull); operandStack.remove(1); // receiver consumed mv.visitLabel(nonull); var newMCE = (MethodCallExpression) origin.transformExpression((expression) -> expression); newMCE.setObjectExpression(new VariableSlotLoader(slot.getType(), slot.getIndex(), operandStack)); newMCE.getObjectExpression().setSourcePosition(((MethodCallExpression) origin).getObjectExpression()); newMCE.setSafe(false); int osl = operandStack.getStackLength(); newMCE.visit(controller.getAcg()); compileStack.removeVar(slot.getIndex()); if (operandStack.getStackLength() > osl) { operandStack.box(); // non-void method mv.visitJumpInsn(GOTO, theEnd); mv.visitLabel(ifnull); mv.visitInsn(ACONST_NULL); mv.visitLabel(theEnd); } else { mv.visitLabel(ifnull); } } else { if (origin instanceof AttributeExpression && (adapter == AsmClassGenerator.getField || adapter == AsmClassGenerator.getGroovyObjectField)) { CallSiteWriter callSiteWriter = controller.getCallSiteWriter(); String fieldName = ((AttributeExpression) origin).getPropertyAsString(); if (fieldName != null && callSiteWriter instanceof StaticTypesCallSiteWriter) { ClassNode receiverType = controller.getTypeChooser().resolveType(receiver, controller.getClassNode()); if (((StaticTypesCallSiteWriter) callSiteWriter).makeGetField(receiver, receiverType, fieldName, safe, false)) { return; } } } super.makeCall(origin, receiver, message, arguments, adapter, safe, spreadSafe, implicitThis); } } private boolean tryImplicitReceiver(final Expression origin, final Expression message, final Expression arguments, final MethodCallerMultiAdapter adapter, final boolean safe, final boolean spreadSafe) { Object implicitReceiver = origin.getNodeMetaData(StaticTypesMarker.IMPLICIT_RECEIVER); if (implicitReceiver == null && origin instanceof MethodCallExpression) { implicitReceiver = ((MethodCallExpression) origin).getObjectExpression().getNodeMetaData(StaticTypesMarker.IMPLICIT_RECEIVER); } if (implicitReceiver != null) { String[] path = ((String) implicitReceiver).split("\\."); // GROOVY-6021 PropertyExpression pexp = propX(varX("this", ClassHelper.CLOSURE_TYPE), path[0]); pexp.setImplicitThis(true); for (int i = 1, n = path.length; i < n; i += 1) { pexp.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, ClassHelper.CLOSURE_TYPE); pexp = propX(pexp, path[i]); } pexp.putNodeMetaData(StaticTypesMarker.IMPLICIT_RECEIVER, implicitReceiver); origin.removeNodeMetaData(StaticTypesMarker.IMPLICIT_RECEIVER); if (origin instanceof PropertyExpression) { PropertyExpression rewritten = propX(pexp, ((PropertyExpression) origin).getProperty(), ((PropertyExpression) origin).isSafe()); rewritten.setSpreadSafe(((PropertyExpression) origin).isSpreadSafe()); rewritten.visit(controller.getAcg()); rewritten.putNodeMetaData(StaticTypesMarker.INFERRED_TYPE, origin.getNodeMetaData(StaticTypesMarker.INFERRED_TYPE)); } else { makeCall(origin, pexp, message, arguments, adapter, safe, spreadSafe, false); } return true; } return false; } private class CheckcastReceiverExpression extends Expression { private final Expression receiver; private final MethodNode target; public CheckcastReceiverExpression(final Expression receiver, final MethodNode target) { this.receiver = receiver; this.target = target; setType(null); } @Override public Expression transformExpression(final ExpressionTransformer transformer) { return this; } @Override public void visit(final GroovyCodeVisitor visitor) { receiver.visit(visitor); if (visitor instanceof AsmClassGenerator) { ClassNode topOperand = controller.getOperandStack().getTopOperand(); ClassNode type = getType(); if (ClassHelper.isGStringType(topOperand) && ClassHelper.isStringType(type)) { // perform regular type conversion controller.getOperandStack().doGroovyCast(type); return; } if (ClassHelper.isPrimitiveType(topOperand) && !ClassHelper.isPrimitiveType(type)) { controller.getOperandStack().box(); } else if (!ClassHelper.isPrimitiveType(topOperand) && ClassHelper.isPrimitiveType(type)) { controller.getOperandStack().doGroovyCast(type); } if (StaticTypeCheckingSupport.implementsInterfaceOrIsSubclassOf(topOperand, type)) return; controller.getMethodVisitor().visitTypeInsn(CHECKCAST, type.isArray() ? BytecodeHelper.getTypeDescription(type) : BytecodeHelper.getClassInternalName(type.getName())); controller.getOperandStack().replace(type); } } @Override public ClassNode getType() { ClassNode type = super.getType(); if (type == null) { if (target instanceof ExtensionMethodNode) { type = ((ExtensionMethodNode) target).getExtensionMethodNode().getDeclaringClass(); } else { type = controller.getTypeChooser().resolveType(receiver, controller.getClassNode()); if (ClassHelper.isPrimitiveType(type)) { type = ClassHelper.getWrapper(type); } ClassNode declaringClass = target.getDeclaringClass(); Class<?> typeClass = type.getClass(); if (typeClass != ClassNode.class && typeClass != InnerClassNode.class && typeClass != DecompiledClassNode.class && typeClass != EnumConstantClassNode.class) { type = declaringClass; // ex: LUB type } if (ClassHelper.isObjectType(declaringClass)) { // checkcast not necessary because Object never evolves // and it prevents a potential ClassCastException if the // delegate of a closure is changed in an SC closure type = ClassHelper.OBJECT_TYPE; } else if (ClassHelper.isObjectType(type)) { // can happen for compiler rewritten code, where type information is missing type = declaringClass; } } setType(type); } return type; } } @Override protected boolean makeCachedCall(final Expression origin, final ClassExpression sender, final Expression receiver, final Expression message, final Expression arguments, final MethodCallerMultiAdapter adapter, final boolean safe, final boolean spreadSafe, final boolean implicitThis, final boolean containsSpreadExpression) { return false; } }
apache/pig
37,269
test/org/apache/pig/test/TestPigServer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Random; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.pig.ExecType; import org.apache.pig.PigConfiguration; import org.apache.pig.PigServer; import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileLocalizer; import org.apache.pig.impl.logicalLayer.FrontendException; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.util.JarManager; import org.apache.pig.impl.util.PropertiesUtil; import org.apache.pig.impl.util.Utils; import org.junit.After; import org.junit.AfterClass; import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.google.common.io.Files; public class TestPigServer { private static Properties properties; private static MiniGenericCluster cluster; private File tempDir; @Before public void setUp() throws Exception{ tempDir = Files.createTempDir(); tempDir.deleteOnExit(); registerNewResource(tempDir.getAbsolutePath()); } @After public void tearDown() throws Exception{ tempDir.delete(); } @BeforeClass public static void oneTimeSetup() { cluster = MiniGenericCluster.buildCluster(); properties = cluster.getProperties(); } @AfterClass public static void oneTimeTearDown() throws Exception { cluster.shutDown(); } private final static String FILE_SEPARATOR = System.getProperty("file.separator"); // make sure that name is included or not (depending on flag "included") // in the given list of stings private static void verifyStringContained(List<URL> list, String name, boolean included) { int count = 0; for (URL url : list) { if (url.toString().contains(name)) { if (!included) { fail("Included is false, but url ["+url+"] contains name ["+name+"]"); } assertEquals("Too many urls contain name: " + name, 1, ++count); } } if (included) { assertEquals("Number of urls that contain name [" + name + "] != 1", 1, count); } } // creates an empty jar file private static void createFakeJarFile(String location, String name) throws IOException { createFakeJarFile(location, name, FileSystem.getLocal(cluster.getConfiguration()).getRaw()); } // creates an empty jar file private static void createFakeJarFile(String location, String name, FileSystem fs) throws IOException { System.err. println("Location: " + location + " name: " + name); Path dir = new Path(location); fs.mkdirs(dir); assertTrue(fs.createNewFile(new Path(dir, name))); } // dynamically add more resources to the system class loader private static void registerNewResource(String file) throws Exception { URL urlToAdd = new File(file).toURI().toURL(); ClassLoader sysLoader = ClassLoader.getSystemClassLoader(); // Find the class loader below bootstrap class loader // It is either the system class loader (first invocation), or the new URLClassLoader added below while (sysLoader.getParent().getParent() != null) { sysLoader = sysLoader.getParent(); } // Check if this class loader is instance of URLClassLoader // On Java 8 and before it is, add resources via addURL method // On Java 11 and after it isn't, add a new URLClassLoader with the new resources above it if (sysLoader instanceof URLClassLoader) { Method addMethod = URLClassLoader.class. getDeclaredMethod("addURL", new Class[]{URL.class}); addMethod.setAccessible(true); addMethod.invoke(sysLoader, new Object[]{urlToAdd}); } else { Field parent = ClassLoader.class.getDeclaredField("parent"); parent.setAccessible(true); ClassLoader urlClassLoader = new URLClassLoader(new URL[]{urlToAdd}, ClassLoader.getSystemClassLoader().getParent()); parent.set(ClassLoader.getSystemClassLoader(), urlClassLoader); } } /** * The jar file to register is not present */ @Test public void testRegisterJarFileNotPresent() throws Throwable { // resister a jar file that does not exist String jarName = "BadFileNameTestJarNotPresent.jar"; // jar name is not present to start with PigServer pig = new PigServer(cluster.getExecType(), properties); verifyStringContained(pig.getPigContext().extraJars, jarName, false); boolean raisedException = false; try { pig.registerJar(jarName); } catch (IOException e) { raisedException = true; } assertTrue("registerJar on jarName ["+jarName+"] should have raised an exception", raisedException); verifyStringContained(pig.getPigContext().extraJars, jarName, false); } /** * Jar file to register is not present in the system resources * in this case name of jar file is relative to current working dir */ @Test public void testRegisterJarLocalDir() throws Throwable { String dir1 = "test1_register_jar_local"; String dir2 = "test2_register_jar_local"; String jarLocation = dir1 + FILE_SEPARATOR + dir2 + FILE_SEPARATOR; String jarName = "TestRegisterJarLocal.jar"; createFakeJarFile(jarLocation, jarName); PigServer pig = new PigServer(cluster.getExecType(), properties); verifyStringContained(pig.getPigContext().extraJars, jarName, false); pig.registerJar(jarLocation + jarName); verifyStringContained(pig.getPigContext().extraJars, jarName, true); // clean-up assertTrue((new File(jarLocation + jarName)).delete()); (new File(dir1 + FILE_SEPARATOR + dir2)).delete(); (new File(dir1)).delete(); } /** * Jar file is located via system resources * Test verifies that even with multiple resources matching, * only one of them is registered. */ @Test public void testRegisterJarFromResources () throws Throwable { String dir = "test_register_jar_res_dir"; String subDir1 = "test_register_jar_res_sub_dir1"; String subDir2 = "test_register_jar_res_sub_dir2"; String jarName = "TestRegisterJarFromRes.jar"; String jarLocation1 = dir + FILE_SEPARATOR + subDir1 + FILE_SEPARATOR; String jarLocation2 = dir + FILE_SEPARATOR + subDir2 + FILE_SEPARATOR; createFakeJarFile(jarLocation1, jarName); createFakeJarFile(jarLocation2, jarName); PigServer pig = new PigServer(cluster.getExecType(), properties); verifyStringContained(pig.getPigContext().extraJars, jarName, false); registerNewResource(jarLocation1); registerNewResource(jarLocation2); pig.registerJar(jarName); verifyStringContained(pig.getPigContext().extraJars, jarName, true); // clean-up assertTrue((new File(jarLocation1 + jarName)).delete()); assertTrue((new File(jarLocation2 + jarName)).delete()); (new File(jarLocation1)).delete(); (new File(jarLocation2)).delete(); (new File(dir)).delete(); } /** * Use a resource inside a jar file. * Verify that the containing jar file is registered correctly. * @throws Exception */ @Test public void testRegisterJarResourceInJar() throws Throwable { String dir = "test_register_jar_res_in_jar"; String subDir = "sub_dir"; String jarName = "TestRegisterJarNonEmpty.jar"; String className = "TestRegisterJar"; String javaSrc = "package " + subDir + "; class " + className + " { }"; // create dirs (new File(dir + FILE_SEPARATOR + subDir)).mkdirs(); // generate java file FileOutputStream outStream = new FileOutputStream(new File(dir + FILE_SEPARATOR + subDir + FILE_SEPARATOR + className + ".java")); OutputStreamWriter outWriter = new OutputStreamWriter(outStream); outWriter.write(javaSrc); outWriter.close(); // compile int status; status = Util.executeJavaCommand("javac " + dir + FILE_SEPARATOR + subDir + FILE_SEPARATOR + className + ".java"); assertEquals(0, status); // remove src file (new File(dir + FILE_SEPARATOR + subDir + FILE_SEPARATOR + className + ".java")).delete(); // generate jar file status = Util.executeJavaCommand("jar -cf " + dir + FILE_SEPARATOR + jarName + " " + "-C " + dir + " " + subDir); assertEquals(0, status); // remove class file and sub_dir (new File(dir + FILE_SEPARATOR + subDir + FILE_SEPARATOR + className + ".class")).delete(); (new File(dir + FILE_SEPARATOR + subDir)).delete(); // register resource registerNewResource(dir + FILE_SEPARATOR + jarName); // load the specific resource boolean exceptionRaised = false; PigServer pig = new PigServer(cluster.getExecType(), properties); try { pig.registerJar("sub_dir/TestRegisterJar.class"); } catch (IOException e) { exceptionRaised = true; } // verify proper jar file is located assertFalse(exceptionRaised); verifyStringContained(pig.getPigContext().extraJars, jarName, true); // clean up Jar file and test dir (new File(dir + FILE_SEPARATOR + jarName)).delete(); (new File(dir)).delete(); } @Test public void testRegisterJarGlobbingRelative() throws Throwable { String dir = "test1_register_jar_globbing_relative"; String jarLocation = dir + FILE_SEPARATOR; String jar1Name = "TestRegisterJarGlobbing1.jar"; String jar2Name = "TestRegisterJarGlobbing2.jar"; createFakeJarFile(jarLocation, jar1Name); createFakeJarFile(jarLocation, jar2Name); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerJar(jarLocation + "TestRegisterJarGlobbing*.jar"); verifyStringContained(pig.getPigContext().extraJars, jar1Name, true); verifyStringContained(pig.getPigContext().extraJars, jar2Name, true); // clean-up assertTrue((new File(jarLocation + jar1Name)).delete()); assertTrue((new File(jarLocation + jar2Name)).delete()); (new File(dir)).delete(); } @Test public void testRegisterJarGlobbingAbsolute() throws Throwable { String dir = "test1_register_jar_globbing_absolute"; String jarLocation = dir + FILE_SEPARATOR; String jar1Name = "TestRegisterJarGlobbing1.jar"; String jar2Name = "TestRegisterJarGlobbing2.jar"; createFakeJarFile(jarLocation, jar1Name); createFakeJarFile(jarLocation, jar2Name); String currentDir = System.getProperty("user.dir"); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerJar(new File(currentDir, dir) + FILE_SEPARATOR + "TestRegisterJarGlobbing*.jar"); verifyStringContained(pig.getPigContext().extraJars, jar1Name, true); verifyStringContained(pig.getPigContext().extraJars, jar2Name, true); // clean-up assertTrue((new File(jarLocation + jar1Name)).delete()); assertTrue((new File(jarLocation + jar2Name)).delete()); (new File(dir)).delete(); } @Test public void testRegisterRemoteGlobbingJar() throws Throwable { String dir = "test1_register_remote_jar_globbing"; String jarLocation = dir + FILE_SEPARATOR; String jar1Name = "TestRegisterRemoteJarGlobbing1.jar"; String jar2Name = "TestRegisterRemoteJarGlobbing2.jar"; FileSystem fs = cluster.getFileSystem(); createFakeJarFile(jarLocation, jar1Name, fs); createFakeJarFile(jarLocation, jar2Name, fs); // find the absolute path for the directory so that it does not // depend on configuration String absPath = fs.getFileStatus(new Path(jarLocation)).getPath().toString(); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerJar(absPath + FILE_SEPARATOR + "TestRegister{Remote}Jar*.jar"); verifyStringContained(pig.getPigContext().extraJars, jar1Name, true); verifyStringContained(pig.getPigContext().extraJars, jar2Name, true); // clean-up assertTrue(fs.delete(new Path(jarLocation), true)); } @Test public void testDescribeLoad() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; Schema dumpedSchema = pig.dumpSchema("a") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: float,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeFilter() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = filter a by field1 > 10;") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: float,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeDistinct() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = distinct a ;") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: float,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeSort() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = order a by * desc;") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: float,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeLimit() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = limit a 10;") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: float,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeForeach() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = foreach a generate field1 + 10;") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = new Schema(new Schema.FieldSchema(null, DataType.INTEGER)); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeForeachFail() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = foreach a generate field1 + 10;") ; try { pig.dumpSchema("c") ; fail("Error expected"); } catch (Exception e) { assertTrue(e.getMessage().contains("Unable to describe schema for alias c")); } } @Test public void testDescribeForeachNoSchema() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' ;") ; pig.registerQuery("b = foreach a generate *;") ; Schema dumpedSchema = pig.dumpSchema("b") ; assertNull(dumpedSchema); } @Test public void testDescribeCogroup() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = load 'b' as (field4, field5: double, field6: chararray );") ; pig.registerQuery("c = cogroup a by field1, b by field4;") ; Schema dumpedSchema = pig.dumpSchema("c") ; Schema expectedSchema = Utils.getSchemaFromString("group:int,a:{(field1:int,field2:float,field3:chararray)},b:{(field4:bytearray,field5:double,field6:chararray)}"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeCross() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = load 'b' as (field4, field5: double, field6: chararray );") ; pig.registerQuery("c = cross a, b;") ; Schema dumpedSchema = pig.dumpSchema("c") ; Schema expectedSchema = Utils.getSchemaFromString("a::field1: int,a::field2: float,a::field3: chararray,b::field4: bytearray,b::field5: double,b::field6: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeJoin() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = load 'b' as (field4, field5: double, field6: chararray );") ; pig.registerQuery("c = join a by field1, b by field4;") ; Schema dumpedSchema = pig.dumpSchema("c"); Schema expectedSchema = Utils.getSchemaFromString("a::field1: int,a::field2: float,a::field3: chararray,b::field4: bytearray,b::field5: double,b::field6: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeUnion() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: float, field3: chararray );") ; pig.registerQuery("b = load 'b' as (field4, field5: double, field6: chararray );") ; pig.registerQuery("c = union a, b;") ; Schema dumpedSchema = pig.dumpSchema("c") ; Schema expectedSchema = Utils.getSchemaFromString("field1: int,field2: double,field3: chararray"); assertEquals(expectedSchema, dumpedSchema); } @Test public void testDescribeTuple2Elem() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int, field2: int, field3: int );") ; pig.registerQuery("b = foreach a generate field1, (field2, field3);") ; Schema dumpedSchema = pig.dumpSchema("b") ; assertTrue(dumpedSchema.getField(0).type==DataType.INTEGER); assertTrue(dumpedSchema.getField(1).type==DataType.TUPLE); } @Test public void testDescribeComplex() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (site: chararray, count: int, itemCounts: bag { itemCountsTuple: tuple (type: chararray, typeCount: int, f: float, m: map[]) } ) ;") ; pig.registerQuery("b = foreach a generate site, count, FLATTEN(itemCounts);") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString( "site: chararray,count: int," + "itemCounts::type: chararray,itemCounts::typeCount: int," + "itemCounts::f: float,itemCounts::m: map[ ]"); assertEquals(expectedSchema, dumpedSchema); } // PIG-5243 @Test public void testDescribeAsClause() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (field1: int);"); pig.registerQuery("b = FOREACH a generate field1 as (new_field:chararray);") ; Schema dumpedSchema = pig.dumpSchema("b") ; Schema expectedSchema = Utils.getSchemaFromString("new_field: chararray"); assertEquals(expectedSchema, dumpedSchema); } private void registerScalarScript(boolean useScalar, String expectedSchemaStr) throws IOException { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("A = load 'adata' AS (a: int, b: int);"); //scalar pig.registerQuery("C = FOREACH A GENERATE *;"); String overrideScalar = useScalar ? "C = FILTER A BY b % 2 == 0; " : ""; pig.registerQuery("B = FOREACH (GROUP A BY a) { " + overrideScalar + "D = FILTER A BY b % 2 == 1;" + "GENERATE group AS a, A.b AS every, C.b AS even, D.b AS odd;" + "};"); Schema dumpedSchema = pig.dumpSchema("B"); Schema expectedSchema = Utils.getSchemaFromString( expectedSchemaStr); assertEquals(expectedSchema, dumpedSchema); } // PIG-3581 @Test public void testScalarPrecedence() throws Throwable { registerScalarScript(true, "a: int,every: {(b: int)},even: {(b: int)},odd: {(b: int)}"); } // PIG-3581 @Test public void testScalarResolution() throws Throwable { registerScalarScript(false, "a: int,every: {(b: int)},even: int,odd: {(b: int)}"); } @Test public void testExplainXmlComplex() throws Throwable { // TODO: Explain XML output is not supported in non-MR mode. Remove the // following condition once it's implemented in Tez. String execType = cluster.getExecType().toString().toLowerCase(); Assume.assumeTrue("Skip this test for TEZ", Util.isMapredExecType(cluster.getExecType()) || Util.isSparkExecType(cluster.getExecType())); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a' as (site: chararray, count: int, itemCounts: bag { itemCountsTuple: tuple (type: chararray, typeCount: int, f: float, m: map[]) } ) ;") ; pig.registerQuery("b = foreach a generate site, count, FLATTEN(itemCounts);") ; pig.registerQuery("c = group b by site;"); pig.registerQuery("d = foreach c generate FLATTEN($1);"); pig.registerQuery("e = group d by $2;"); ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream ps = new PrintStream(baos); pig.explain("e", "xml", true, false, ps, ps, null, null); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(bais); //Verify Logical and Physical Plans aren't supported. NodeList logicalPlan = doc.getElementsByTagName("logicalPlan"); assertEquals(1, logicalPlan.getLength()); assertTrue(logicalPlan.item(0).getTextContent().contains("Not Supported")); NodeList physicalPlan = doc.getElementsByTagName("physicalPlan"); assertEquals(1, physicalPlan.getLength()); assertTrue(physicalPlan.item(0).getTextContent().contains("Not Supported")); if (execType.equals(ExecType.MAPREDUCE.name().toLowerCase())){ verifyExplainXmlComplexMR(doc); } else if (execType.equals(MiniGenericCluster.EXECTYPE_SPARK)){ verifyExplainXmlComplexSpark(doc); } } private void verifyExplainXmlComplexSpark(Document doc) { NodeList stores = doc.getElementsByTagName("POStore"); assertEquals(1, stores.getLength()); NodeList groups = doc.getElementsByTagName("POJoinGroupSpark"); assertEquals(2, groups.getLength()); Node innerGroup = groups.item(1); NodeList groupChildren = innerGroup.getChildNodes(); int foreachCount = 0; int castCount = 0; int loadCount = 0; for (int i = 0; i < groupChildren.getLength(); i++) { Node node = groupChildren.item(i); if (node.getNodeName().equals("POForEach")){ ++foreachCount; NodeList foreachNodes = node.getChildNodes(); for (int j = 0; j < foreachNodes.getLength(); j++) { Node innerNode = foreachNodes.item(j); if (innerNode.getNodeName().equals("alias")){ assertEquals("b",innerNode.getTextContent()); }else if (innerNode.getNodeName().equals("POCast")){ ++castCount; }else if (innerNode.getNodeName().equals("POLoad")) { ++loadCount; } } } } assertEquals(1,foreachCount); assertEquals(3,castCount); assertEquals(1,loadCount); } private void verifyExplainXmlComplexMR(Document doc) { //Verify we have two loads and one is temporary NodeList loads = doc.getElementsByTagName("POLoad"); assertEquals(2, loads.getLength()); boolean sawTempLoad = false; boolean sawNonTempLoad = false; for (int i = 0; i < loads.getLength(); i++) { Boolean isTempLoad = null; boolean hasAlias = false; Node poLoad = loads.item(i); NodeList children = poLoad.getChildNodes(); for (int j = 0; j < children.getLength(); j++) { Node child = children.item(j); if (child.getNodeName().equals("alias")) { hasAlias = true; } if (child.getNodeName().equals("isTmpLoad")) { if (child.getTextContent().equals("false")) { isTempLoad = false; } else if (child.getTextContent().equals("true")) { isTempLoad = true; } } } if (isTempLoad == null) { fail("POLoad elements should have isTmpLoad child node."); } else if (isTempLoad && hasAlias) { fail("Temp loads should not have aliases"); } else if (!isTempLoad && !hasAlias) { fail("Non temporary loads should be associated with alias."); } sawTempLoad = sawTempLoad || isTempLoad; sawNonTempLoad = sawNonTempLoad || !isTempLoad; } assertTrue(sawTempLoad && sawNonTempLoad); } @Test public void testRegisterRemoteScript() throws Throwable { String scriptName = "script.py"; File scriptFile = File.createTempFile("tmp", ""); PrintWriter pw = new PrintWriter(new FileWriter(scriptFile)); pw.println("@outputSchema(\"word:chararray\")\ndef helloworld():\n return 'Hello, World'"); pw.close(); FileSystem fs = cluster.getFileSystem(); fs.copyFromLocalFile(new Path(scriptFile.getAbsolutePath()), new Path(scriptName)); // find the absolute path for the directory so that it does not // depend on configuration String absPath = fs.getFileStatus(new Path(scriptName)).getPath().toString(); Util.createInputFile(cluster, "testRegisterRemoteScript_input", new String[]{"1", "2"}); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerCode(absPath, "jython", "pig"); pig.registerQuery("a = load 'testRegisterRemoteScript_input';"); pig.registerQuery("b = foreach a generate pig.helloworld($0);"); Iterator<Tuple> iter = pig.openIterator("b"); assertTrue(iter.hasNext()); Tuple t = iter.next(); assertTrue(t.size() > 0); assertEquals("Hello, World", t.get(0)); assertTrue(iter.hasNext()); t = iter.next(); assertTrue(t.size() > 0); assertEquals("Hello, World", t.get(0)); assertFalse(iter.hasNext()); } @Test public void testPigProperties() throws Throwable { File propertyFile = new File(tempDir, "pig.properties"); propertyFile.deleteOnExit(); TestPigServer.registerNewResource(propertyFile.getAbsolutePath()); File cliPropertyFile = new File("commandLine_pig.properties"); cliPropertyFile.deleteOnExit(); TestPigServer.registerNewResource(cliPropertyFile.getAbsolutePath()); Properties properties = PropertiesUtil.loadDefaultProperties(); assertEquals("40000000", properties.getProperty("pig.spill.gc.activation.size")); assertNull(properties.getProperty("test123")); PrintWriter out = new PrintWriter(new FileWriter(propertyFile)); out.println("test123=properties"); out.close(); properties = PropertiesUtil.loadDefaultProperties(); assertEquals("properties", properties.getProperty("test123")); out = new PrintWriter(new FileWriter(cliPropertyFile)); out.println("test123=cli_properties"); out.close(); properties = PropertiesUtil.loadDefaultProperties(); PropertiesUtil.loadPropertiesFromFile(properties, "commandLine_pig.properties"); assertEquals("cli_properties", properties.getProperty("test123")); propertyFile.delete(); cliPropertyFile.delete(); } @Test public void testPigTempDir() throws Throwable { Properties properties = PropertiesUtil.loadDefaultProperties(); File pigTempDir = new File(tempDir, FILE_SEPARATOR + "tmp" + FILE_SEPARATOR + "test"); properties.put("pig.temp.dir", pigTempDir.getPath()); PigContext pigContext=new PigContext(ExecType.LOCAL, properties); pigContext.connect(); FileLocalizer.setInitialized(false); String tempPath= FileLocalizer.getTemporaryPath(pigContext).toString(); Path path = new Path(tempPath); assertTrue(tempPath.startsWith(pigTempDir.toURI().toString())); FileSystem fs = FileSystem.get(path.toUri(), ConfigurationUtil.toConfiguration(pigContext.getProperties())); FileStatus status = fs.getFileStatus(path.getParent()); // Temporary root dir should have 700 as permission assertEquals("rwx------", status.getPermission().toString()); pigTempDir.delete(); FileLocalizer.setInitialized(false); } @Test public void testUniquePigTempDir() throws Throwable { Properties properties = PropertiesUtil.loadDefaultProperties(); File pigTempDir = new File(tempDir, FILE_SEPARATOR + "tmp" + FILE_SEPARATOR + "test"); properties.put("pig.temp.dir", pigTempDir.getPath()); PigContext pigContext = new PigContext(ExecType.LOCAL, properties); pigContext.connect(); FileLocalizer.setInitialized(false); Random r = new Random(5); FileLocalizer.setR(r); String tempPath1 = FileLocalizer.getTemporaryPath(pigContext).toString(); FileLocalizer.setInitialized(false); r = new Random(5); FileLocalizer.setR(r); String tempPath2 = FileLocalizer.getTemporaryPath(pigContext).toString(); assertFalse(tempPath1.toString().equals(tempPath2.toString())); // cleanup pigTempDir.delete(); FileLocalizer.setInitialized(false); } @Test public void testDescribeForEachFlatten() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerQuery("a = load 'a';") ; pig.registerQuery("b = group a by $0;") ; pig.registerQuery("c = foreach b generate flatten(a);") ; Schema s = pig.dumpSchema("c") ; assertNull(s); } @Test // PIG-2059 public void test1() throws Throwable { PigServer pig = new PigServer(cluster.getExecType(), properties); pig.setValidateEachStatement(true); pig.registerQuery("A = load 'x' as (u, v);") ; try { pig.registerQuery("B = foreach A generate $2;") ; fail("Query is supposed to fail."); } catch(FrontendException ex) { String msg = "Out of bound access. " + "Trying to access non-existent column: 2"; Util.checkMessageInException(ex, msg); } } @Test public void testDefaultPigProperties() throws Throwable { //Test with PigServer PigServer pigServer = new PigServer(cluster.getExecType()); Properties properties = pigServer.getPigContext().getProperties(); assertEquals("999", properties.getProperty("pig.exec.reducers.max")); assertEquals("true", properties.getProperty("aggregate.warning")); assertEquals("true", properties.getProperty(PigConfiguration.PIG_OPT_MULTIQUERY)); assertEquals("false", properties.getProperty("stop.on.failure")); //Test with properties file File propertyFile = new File(tempDir, "pig.properties"); properties = PropertiesUtil.loadDefaultProperties(); assertEquals("999", properties.getProperty("pig.exec.reducers.max")); assertEquals("true", properties.getProperty("aggregate.warning")); assertEquals("true", properties.getProperty(PigConfiguration.PIG_OPT_MULTIQUERY)); assertEquals("false", properties.getProperty("stop.on.failure")); PrintWriter out = new PrintWriter(new FileWriter(propertyFile)); out.println("aggregate.warning=false"); out.println("opt.multiquery=false"); out.println("stop.on.failure=true"); out.close(); properties = PropertiesUtil.loadDefaultProperties(); assertEquals("false", properties.getProperty("aggregate.warning")); assertEquals("false", properties.getProperty(PigConfiguration.PIG_OPT_MULTIQUERY)); assertEquals("true", properties.getProperty("stop.on.failure")); propertyFile.delete(); } @Test // See PIG-4109 public void testRegisterJarRemoteScript() throws Throwable { if (Util.WINDOWS) { properties.setProperty("pig.jars.relative.to.dfs", "true"); String jarName = JarManager.findContainingJar(org.codehaus.jackson.JsonParser.class); PigServer pig = new PigServer(cluster.getExecType(), properties); pig.registerJar(jarName); } } }
apache/hadoop
37,214
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Keiron Liddle, Aftex Software * <keiron@aftexsw.com> to whom the Ant project is very grateful for his * great code. */ package org.apache.hadoop.io.compress.bzip2; import java.io.BufferedInputStream; import java.io.InputStream; import java.io.IOException; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.io.compress.SplittableCompressionCodec.READ_MODE; /** * An input stream that decompresses from the BZip2 format (without the file * header chars) to be read as any other stream. * * <p> * The decompression requires large amounts of memory. Thus you should call the * {@link #close() close()} method as soon as possible, to force * <code>CBZip2InputStream</code> to release the allocated memory. See * {@link CBZip2OutputStream CBZip2OutputStream} for information about memory * usage. * </p> * * <p> * <code>CBZip2InputStream</code> reads bytes from the compressed source stream via * the single byte {@link java.io.InputStream#read() read()} method exclusively. * Thus you should consider to use a buffered source stream. * </p> * * <p> * This Ant code was enhanced so that it can de-compress blocks of bzip2 data. * Current position in the stream is an important statistic for Hadoop. For * example in LineRecordReader, we solely depend on the current position in the * stream to know about the progress. The notion of position becomes complicated * for compressed files. The Hadoop splitting is done in terms of compressed * file. But a compressed file deflates to a large amount of data. So we have * handled this problem in the following way. * * On object creation time, we find the next block start delimiter. Once such a * marker is found, the stream stops there (we discard any read compressed data * in this process) and the position is reported as the beginning of the block * start delimiter. At this point we are ready for actual reading * (i.e. decompression) of data. * * The subsequent read calls give out data. The position is updated when the * caller of this class has read off the current block + 1 bytes. In between the * block reading, position is not updated. (We can only update the position on * block boundaries). * </p> * * <p> * Instances of this class are not threadsafe. * </p> */ public class CBZip2InputStream extends InputStream implements BZip2Constants { public static final long BLOCK_DELIMITER = 0X314159265359L;// start of block public static final long EOS_DELIMITER = 0X177245385090L;// end of bzip2 stream private static final int DELIMITER_BIT_LENGTH = 48; READ_MODE readMode = READ_MODE.CONTINUOUS; // The variable records the current advertised position of the stream. private long reportedBytesReadFromCompressedStream = 0L; // The following variable keep record of compressed bytes read. private long bytesReadFromCompressedStream = 0L; private boolean lazyInitialization = false; private byte array[] = new byte[1]; /** * Index of the last char in the block, so the block size == last + 1. */ private int last; /** * Index in zptr[] of original string after sorting. */ private int origPtr; /** * always: in the range 0 .. 9. The current block size is 100000 * this * number. */ private int blockSize100k; private boolean blockRandomised = false; private long bsBuff; private long bsLive; private final CRC crc = new CRC(); private int nInUse; private BufferedInputStream in; private int currentChar = -1; /** * A state machine to keep track of current state of the de-coder * */ public enum STATE { EOF, START_BLOCK_STATE, RAND_PART_A_STATE, RAND_PART_B_STATE, RAND_PART_C_STATE, NO_RAND_PART_A_STATE, NO_RAND_PART_B_STATE, NO_RAND_PART_C_STATE, NO_PROCESS_STATE }; private STATE currentState = STATE.START_BLOCK_STATE; private int storedBlockCRC, storedCombinedCRC; private int computedBlockCRC, computedCombinedCRC; private boolean skipResult = false;// used by skipToNextMarker private boolean skipDecompression = false; // Variables used by setup* methods exclusively private int su_count; private int su_ch2; private int su_chPrev; private int su_i2; private int su_j2; private int su_rNToGo; private int su_rTPos; private int su_tPos; private char su_z; /** * All memory intensive stuff. This field is initialized by initBlock(). */ private CBZip2InputStream.Data data; /** * This method reports the processed bytes so far. Please note that this * statistic is only updated on block boundaries and only when the stream is * initiated in BYBLOCK mode. * @return ProcessedByteCount. */ public long getProcessedByteCount() { return reportedBytesReadFromCompressedStream; } /** * This method keeps track of raw processed compressed * bytes. * * @param count count is the number of bytes to be * added to raw processed bytes */ protected void updateProcessedByteCount(int count) { this.bytesReadFromCompressedStream += count; } /** * This method is called by the client of this * class in case there are any corrections in * the stream position. One common example is * when client of this code removes starting BZ * characters from the compressed stream. * * @param count count bytes are added to the reported bytes * */ public void updateReportedByteCount(int count) { this.reportedBytesReadFromCompressedStream += count; this.updateProcessedByteCount(count); } /** * This method reads a Byte from the compressed stream. Whenever we need to * read from the underlying compressed stream, this method should be called * instead of directly calling the read method of the underlying compressed * stream. This method does important record keeping to have the statistic * that how many bytes have been read off the compressed stream. */ private int readAByte(InputStream inStream) throws IOException { int read = inStream.read(); if (read >= 0) { this.updateProcessedByteCount(1); } return read; } /** * This method tries to find the marker (passed to it as the first parameter) * in the stream. It can find bit patterns of length &lt;= 63 bits. * Specifically this method is used in CBZip2InputStream to find the end of * block (EOB) delimiter in the stream, starting from the current position * of the stream. If marker is found, the stream position will be at the * byte containing the starting bit of the marker. * @param marker The bit pattern to be found in the stream * @param markerBitLength No of bits in the marker * @return true if the marker was found otherwise false * @throws IOException raised on errors performing I/O. * @throws IllegalArgumentException if marketBitLength is greater than 63 */ public boolean skipToNextMarker(long marker, int markerBitLength) throws IOException, IllegalArgumentException { try { if (markerBitLength > 63) { throw new IllegalArgumentException( "skipToNextMarker can not find patterns greater than 63 bits"); } // pick next marketBitLength bits in the stream long bytes = 0; bytes = this.bsR(markerBitLength); if (bytes == -1) { this.reportedBytesReadFromCompressedStream = this.bytesReadFromCompressedStream; return false; } while (true) { if (bytes == marker) { // Report the byte position where the marker starts long markerBytesRead = (markerBitLength + this.bsLive + 7) / 8; this.reportedBytesReadFromCompressedStream = this.bytesReadFromCompressedStream - markerBytesRead; return true; } else { bytes = bytes << 1; bytes = bytes & ((1L << markerBitLength) - 1); int oneBit = (int) this.bsR(1); if (oneBit != -1) { bytes = bytes | oneBit; } else { this.reportedBytesReadFromCompressedStream = this.bytesReadFromCompressedStream; return false; } } } } catch (IOException ex) { this.reportedBytesReadFromCompressedStream = this.bytesReadFromCompressedStream; return false; } } protected void reportCRCError() throws IOException { throw new IOException("crc error"); } private void makeMaps() { final boolean[] inUse = this.data.inUse; final byte[] seqToUnseq = this.data.seqToUnseq; int nInUseShadow = 0; for (int i = 0; i < 256; i++) { if (inUse[i]) seqToUnseq[nInUseShadow++] = (byte) i; } this.nInUse = nInUseShadow; } /** * Constructs a new CBZip2InputStream which decompresses bytes read from the * specified stream. * * <p> * Although BZip2 headers are marked with the magic <code>"Bz"</code> this * constructor expects the next byte in the stream to be the first one after * the magic. Thus callers have to skip the first two bytes. Otherwise this * constructor will throw an exception. * </p> * @param in in. * @param readMode READ_MODE. * @throws IOException * if the stream content is malformed or an I/O error occurs. * @throws NullPointerException * if <code>in == null</code> */ public CBZip2InputStream(final InputStream in, READ_MODE readMode) throws IOException { this(in, readMode, false); } private CBZip2InputStream(final InputStream in, READ_MODE readMode, boolean skipDecompression) throws IOException { super(); int blockSize = 0X39;// i.e 9 this.blockSize100k = blockSize - '0'; this.in = new BufferedInputStream(in, 1024 * 9);// >1 MB buffer this.readMode = readMode; this.skipDecompression = skipDecompression; if (readMode == READ_MODE.CONTINUOUS) { currentState = STATE.START_BLOCK_STATE; lazyInitialization = (in.available() == 0)?true:false; if(!lazyInitialization){ init(); } } else if (readMode == READ_MODE.BYBLOCK) { this.currentState = STATE.NO_PROCESS_STATE; skipResult = skipToNextBlockMarker(); if(!skipDecompression){ changeStateToProcessABlock(); } } } /** * Skips bytes in the stream until the start marker of a block is reached * or end of stream is reached. Used for testing purposes to identify the * start offsets of blocks. */ @VisibleForTesting boolean skipToNextBlockMarker() throws IOException { return skipToNextMarker( CBZip2InputStream.BLOCK_DELIMITER, DELIMITER_BIT_LENGTH); } /** * Returns the number of bytes between the current stream position * and the immediate next BZip2 block marker. * * @param in * The InputStream * * @return long Number of bytes between current stream position and the * next BZip2 block start marker. * @throws IOException raised on errors performing I/O. * */ public static long numberOfBytesTillNextMarker(final InputStream in) throws IOException{ CBZip2InputStream anObject = new CBZip2InputStream(in, READ_MODE.BYBLOCK, true); return anObject.getProcessedByteCount(); } public CBZip2InputStream(final InputStream in) throws IOException { this(in, READ_MODE.CONTINUOUS); } private void changeStateToProcessABlock() throws IOException { if (skipResult == true) { initBlock(); setupBlock(); } else { this.currentState = STATE.EOF; } } @Override public int read() throws IOException { if (this.in != null) { int result = this.read(array, 0, 1); int value = 0XFF & array[0]; return (result > 0 ? value : result); } else { throw new IOException("stream closed"); } } /** * In CONTINOUS reading mode, this read method starts from the * start of the compressed stream and end at the end of file by * emitting un-compressed data. In this mode stream positioning * is not announced and should be ignored. * * In BYBLOCK reading mode, this read method informs about the end * of a BZip2 block by returning EOB. At this event, the compressed * stream position is also announced. This announcement tells that * how much of the compressed stream has been de-compressed and read * out of this class. In between EOB events, the stream position is * not updated. * * * @throws IOException * if the stream content is malformed or an I/O error occurs. * * @return int The return value greater than 0 are the bytes read. A value * of -1 means end of stream while -2 represents end of block */ @Override public int read(final byte[] dest, final int offs, final int len) throws IOException { if (offs < 0) { throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); } if (len < 0) { throw new IndexOutOfBoundsException("len(" + len + ") < 0."); } if (offs + len > dest.length) { throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + len + ") > dest.length(" + dest.length + ")."); } if (this.in == null) { throw new IOException("stream closed"); } if(lazyInitialization){ this.init(); this.lazyInitialization = false; } if(skipDecompression){ changeStateToProcessABlock(); skipDecompression = false; } final int hi = offs + len; int destOffs = offs; int b = 0; for (; ((destOffs < hi) && ((b = read0())) >= 0);) { dest[destOffs++] = (byte) b; } int result = destOffs - offs; if (result == 0) { //report 'end of block' or 'end of stream' result = b; skipResult = skipToNextBlockMarker(); changeStateToProcessABlock(); } return result; } private int read0() throws IOException { final int retChar = this.currentChar; switch (this.currentState) { case EOF: return END_OF_STREAM;// return -1 case NO_PROCESS_STATE: return END_OF_BLOCK;// return -2 case START_BLOCK_STATE: throw new IllegalStateException(); case RAND_PART_A_STATE: throw new IllegalStateException(); case RAND_PART_B_STATE: setupRandPartB(); break; case RAND_PART_C_STATE: setupRandPartC(); break; case NO_RAND_PART_A_STATE: throw new IllegalStateException(); case NO_RAND_PART_B_STATE: setupNoRandPartB(); break; case NO_RAND_PART_C_STATE: setupNoRandPartC(); break; default: throw new IllegalStateException(); } return retChar; } private void init() throws IOException { int magic2 = this.readAByte(in); if (magic2 != 'h') { throw new IOException("Stream is not BZip2 formatted: expected 'h'" + " as first byte but got '" + (char) magic2 + "'"); } int blockSize = this.readAByte(in); if ((blockSize < '1') || (blockSize > '9')) { throw new IOException("Stream is not BZip2 formatted: illegal " + "blocksize " + (char) blockSize); } this.blockSize100k = blockSize - '0'; initBlock(); setupBlock(); } private void initBlock() throws IOException { if (this.readMode == READ_MODE.BYBLOCK) { // this.checkBlockIntegrity(); this.storedBlockCRC = bsGetInt(); this.blockRandomised = bsR(1) == 1; /** * Allocate data here instead in constructor, so we do not allocate * it if the input file is empty. */ if (this.data == null) { this.data = new Data(this.blockSize100k); } // currBlockNo++; getAndMoveToFrontDecode(); this.crc.initialiseCRC(); this.currentState = STATE.START_BLOCK_STATE; return; } char magic0 = bsGetUByte(); char magic1 = bsGetUByte(); char magic2 = bsGetUByte(); char magic3 = bsGetUByte(); char magic4 = bsGetUByte(); char magic5 = bsGetUByte(); if (magic0 == 0x17 && magic1 == 0x72 && magic2 == 0x45 && magic3 == 0x38 && magic4 == 0x50 && magic5 == 0x90) { complete(); // end of file } else if (magic0 != 0x31 || // '1' magic1 != 0x41 || // ')' magic2 != 0x59 || // 'Y' magic3 != 0x26 || // '&' magic4 != 0x53 || // 'S' magic5 != 0x59 // 'Y' ) { this.currentState = STATE.EOF; throw new IOException("bad block header"); } else { this.storedBlockCRC = bsGetInt(); this.blockRandomised = bsR(1) == 1; /** * Allocate data here instead in constructor, so we do not allocate * it if the input file is empty. */ if (this.data == null) { this.data = new Data(this.blockSize100k); } // currBlockNo++; getAndMoveToFrontDecode(); this.crc.initialiseCRC(); this.currentState = STATE.START_BLOCK_STATE; } } private void endBlock() throws IOException { this.computedBlockCRC = this.crc.getFinalCRC(); // A bad CRC is considered a fatal error. if (this.storedBlockCRC != this.computedBlockCRC) { // make next blocks readable without error // (repair feature, not yet documented, not tested) this.computedCombinedCRC = (this.storedCombinedCRC << 1) | (this.storedCombinedCRC >>> 31); this.computedCombinedCRC ^= this.storedBlockCRC; reportCRCError(); } this.computedCombinedCRC = (this.computedCombinedCRC << 1) | (this.computedCombinedCRC >>> 31); this.computedCombinedCRC ^= this.computedBlockCRC; } private void complete() throws IOException { this.storedCombinedCRC = bsGetInt(); this.currentState = STATE.EOF; this.data = null; if (this.storedCombinedCRC != this.computedCombinedCRC) { reportCRCError(); } } @Override public void close() throws IOException { InputStream inShadow = this.in; if (inShadow != null) { try { if (inShadow != System.in) { inShadow.close(); } } finally { this.data = null; this.in = null; } } } private long bsR(final long n) throws IOException { long bsLiveShadow = this.bsLive; long bsBuffShadow = this.bsBuff; if (bsLiveShadow < n) { final InputStream inShadow = this.in; do { int thech = readAByte(inShadow); if (thech < 0) { throw new IOException("unexpected end of stream"); } bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; } while (bsLiveShadow < n); this.bsBuff = bsBuffShadow; } this.bsLive = bsLiveShadow - n; return (bsBuffShadow >> (bsLiveShadow - n)) & ((1L << n) - 1); } private boolean bsGetBit() throws IOException { long bsLiveShadow = this.bsLive; long bsBuffShadow = this.bsBuff; if (bsLiveShadow < 1) { int thech = this.readAByte(in); if (thech < 0) { throw new IOException("unexpected end of stream"); } bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; this.bsBuff = bsBuffShadow; } this.bsLive = bsLiveShadow - 1; return ((bsBuffShadow >> (bsLiveShadow - 1)) & 1) != 0; } private char bsGetUByte() throws IOException { return (char) bsR(8); } private int bsGetInt() throws IOException { return (int) ((((((bsR(8) << 8) | bsR(8)) << 8) | bsR(8)) << 8) | bsR(8)); } /** * Called by createHuffmanDecodingTables() exclusively. */ private static void hbCreateDecodeTables(final int[] limit, final int[] base, final int[] perm, final char[] length, final int minLen, final int maxLen, final int alphaSize) { for (int i = minLen, pp = 0; i <= maxLen; i++) { for (int j = 0; j < alphaSize; j++) { if (length[j] == i) { perm[pp++] = j; } } } for (int i = MAX_CODE_LEN; --i > 0;) { base[i] = 0; limit[i] = 0; } for (int i = 0; i < alphaSize; i++) { base[length[i] + 1]++; } for (int i = 1, b = base[0]; i < MAX_CODE_LEN; i++) { b += base[i]; base[i] = b; } for (int i = minLen, vec = 0, b = base[i]; i <= maxLen; i++) { final int nb = base[i + 1]; vec += nb - b; b = nb; limit[i] = vec - 1; vec <<= 1; } for (int i = minLen + 1; i <= maxLen; i++) { base[i] = ((limit[i - 1] + 1) << 1) - base[i]; } } private void recvDecodingTables() throws IOException { final Data dataShadow = this.data; final boolean[] inUse = dataShadow.inUse; final byte[] pos = dataShadow.recvDecodingTables_pos; final byte[] selector = dataShadow.selector; final byte[] selectorMtf = dataShadow.selectorMtf; int inUse16 = 0; /* Receive the mapping table */ for (int i = 0; i < 16; i++) { if (bsGetBit()) { inUse16 |= 1 << i; } } for (int i = 256; --i >= 0;) { inUse[i] = false; } for (int i = 0; i < 16; i++) { if ((inUse16 & (1 << i)) != 0) { final int i16 = i << 4; for (int j = 0; j < 16; j++) { if (bsGetBit()) { inUse[i16 + j] = true; } } } } makeMaps(); final int alphaSize = this.nInUse + 2; /* Now the selectors */ final int nGroups = (int) bsR(3); final int nSelectors = (int) bsR(15); for (int i = 0; i < nSelectors; i++) { int j = 0; while (bsGetBit()) { j++; } selectorMtf[i] = (byte) j; } /* Undo the MTF values for the selectors. */ for (int v = nGroups; --v >= 0;) { pos[v] = (byte) v; } for (int i = 0; i < nSelectors; i++) { int v = selectorMtf[i] & 0xff; final byte tmp = pos[v]; while (v > 0) { // nearly all times v is zero, 4 in most other cases pos[v] = pos[v - 1]; v--; } pos[0] = tmp; selector[i] = tmp; } final char[][] len = dataShadow.temp_charArray2d; /* Now the coding tables */ for (int t = 0; t < nGroups; t++) { int curr = (int) bsR(5); final char[] len_t = len[t]; for (int i = 0; i < alphaSize; i++) { while (bsGetBit()) { curr += bsGetBit() ? -1 : 1; } len_t[i] = (char) curr; } } // finally create the Huffman tables createHuffmanDecodingTables(alphaSize, nGroups); } /** * Called by recvDecodingTables() exclusively. */ private void createHuffmanDecodingTables(final int alphaSize, final int nGroups) { final Data dataShadow = this.data; final char[][] len = dataShadow.temp_charArray2d; final int[] minLens = dataShadow.minLens; final int[][] limit = dataShadow.limit; final int[][] base = dataShadow.base; final int[][] perm = dataShadow.perm; for (int t = 0; t < nGroups; t++) { int minLen = 32; int maxLen = 0; final char[] len_t = len[t]; for (int i = alphaSize; --i >= 0;) { final char lent = len_t[i]; if (lent > maxLen) { maxLen = lent; } if (lent < minLen) { minLen = lent; } } hbCreateDecodeTables(limit[t], base[t], perm[t], len[t], minLen, maxLen, alphaSize); minLens[t] = minLen; } } private void getAndMoveToFrontDecode() throws IOException { this.origPtr = (int) bsR(24); recvDecodingTables(); final InputStream inShadow = this.in; final Data dataShadow = this.data; final byte[] ll8 = dataShadow.ll8; final int[] unzftab = dataShadow.unzftab; final byte[] selector = dataShadow.selector; final byte[] seqToUnseq = dataShadow.seqToUnseq; final char[] yy = dataShadow.getAndMoveToFrontDecode_yy; final int[] minLens = dataShadow.minLens; final int[][] limit = dataShadow.limit; final int[][] base = dataShadow.base; final int[][] perm = dataShadow.perm; final int limitLast = this.blockSize100k * 100000; /* * Setting up the unzftab entries here is not strictly necessary, but it * does save having to do it later in a separate pass, and so saves a * block's worth of cache misses. */ for (int i = 256; --i >= 0;) { yy[i] = (char) i; unzftab[i] = 0; } int groupNo = 0; int groupPos = G_SIZE - 1; final int eob = this.nInUse + 1; int nextSym = getAndMoveToFrontDecode0(0); int bsBuffShadow = (int) this.bsBuff; int bsLiveShadow = (int) this.bsLive; int lastShadow = -1; int zt = selector[groupNo] & 0xff; int[] base_zt = base[zt]; int[] limit_zt = limit[zt]; int[] perm_zt = perm[zt]; int minLens_zt = minLens[zt]; while (nextSym != eob) { if ((nextSym == RUNA) || (nextSym == RUNB)) { int s = -1; for (int n = 1; true; n <<= 1) { if (nextSym == RUNA) { s += n; } else if (nextSym == RUNB) { s += n << 1; } else { break; } if (groupPos == 0) { groupPos = G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = base[zt]; limit_zt = limit[zt]; perm_zt = perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; while (bsLiveShadow < zn) { final int thech = readAByte(inShadow); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new IOException("unexpected end of stream"); } } long zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = readAByte(inShadow); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new IOException( "unexpected end of stream"); } } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } nextSym = perm_zt[(int) (zvec - base_zt[zn])]; } final byte ch = seqToUnseq[yy[0]]; unzftab[ch & 0xff] += s + 1; while (s-- >= 0) { ll8[++lastShadow] = ch; } if (lastShadow >= limitLast) { throw new IOException("block overrun"); } } else { if (++lastShadow >= limitLast) { throw new IOException("block overrun"); } final char tmp = yy[nextSym - 1]; unzftab[seqToUnseq[tmp] & 0xff]++; ll8[lastShadow] = seqToUnseq[tmp]; /* * This loop is hammered during decompression, hence avoid * native method call overhead of System.arraycopy for very * small ranges to copy. */ if (nextSym <= 16) { for (int j = nextSym - 1; j > 0;) { yy[j] = yy[--j]; } } else { System.arraycopy(yy, 0, yy, 1, nextSym - 1); } yy[0] = tmp; if (groupPos == 0) { groupPos = G_SIZE - 1; zt = selector[++groupNo] & 0xff; base_zt = base[zt]; limit_zt = limit[zt]; perm_zt = perm[zt]; minLens_zt = minLens[zt]; } else { groupPos--; } int zn = minLens_zt; while (bsLiveShadow < zn) { final int thech = readAByte(inShadow); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new IOException("unexpected end of stream"); } } int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) & ((1 << zn) - 1); bsLiveShadow -= zn; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = readAByte(inShadow); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new IOException("unexpected end of stream"); } } bsLiveShadow--; zvec = ((zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1)); } nextSym = perm_zt[zvec - base_zt[zn]]; } } this.last = lastShadow; this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; } private int getAndMoveToFrontDecode0(final int groupNo) throws IOException { final InputStream inShadow = this.in; final Data dataShadow = this.data; final int zt = dataShadow.selector[groupNo] & 0xff; final int[] limit_zt = dataShadow.limit[zt]; int zn = dataShadow.minLens[zt]; int zvec = (int) bsR(zn); int bsLiveShadow = (int) this.bsLive; int bsBuffShadow = (int) this.bsBuff; while (zvec > limit_zt[zn]) { zn++; while (bsLiveShadow < 1) { final int thech = readAByte(inShadow); if (thech >= 0) { bsBuffShadow = (bsBuffShadow << 8) | thech; bsLiveShadow += 8; continue; } else { throw new IOException("unexpected end of stream"); } } bsLiveShadow--; zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); } this.bsLive = bsLiveShadow; this.bsBuff = bsBuffShadow; return dataShadow.perm[zt][zvec - dataShadow.base[zt][zn]]; } private void setupBlock() throws IOException { if (this.data == null) { return; } final int[] cftab = this.data.cftab; final int[] tt = this.data.initTT(this.last + 1); final byte[] ll8 = this.data.ll8; cftab[0] = 0; System.arraycopy(this.data.unzftab, 0, cftab, 1, 256); for (int i = 1, c = cftab[0]; i <= 256; i++) { c += cftab[i]; cftab[i] = c; } for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) { tt[cftab[ll8[i] & 0xff]++] = i; } if ((this.origPtr < 0) || (this.origPtr >= tt.length)) { throw new IOException("stream corrupted"); } this.su_tPos = tt[this.origPtr]; this.su_count = 0; this.su_i2 = 0; this.su_ch2 = 256; /* not a char and not EOF */ if (this.blockRandomised) { this.su_rNToGo = 0; this.su_rTPos = 0; setupRandPartA(); } else { setupNoRandPartA(); } } private void setupRandPartA() throws IOException { if (this.su_i2 <= this.last) { this.su_chPrev = this.su_ch2; int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; this.su_tPos = this.data.tt[this.su_tPos]; if (this.su_rNToGo == 0) { this.su_rNToGo = BZip2Constants.rNums[this.su_rTPos] - 1; if (++this.su_rTPos == 512) { this.su_rTPos = 0; } } else { this.su_rNToGo--; } this.su_ch2 = su_ch2Shadow ^= (this.su_rNToGo == 1) ? 1 : 0; this.su_i2++; this.currentChar = su_ch2Shadow; this.currentState = STATE.RAND_PART_B_STATE; this.crc.updateCRC(su_ch2Shadow); } else { endBlock(); if (readMode == READ_MODE.CONTINUOUS) { initBlock(); setupBlock(); } else if (readMode == READ_MODE.BYBLOCK) { this.currentState = STATE.NO_PROCESS_STATE; } } } private void setupNoRandPartA() throws IOException { if (this.su_i2 <= this.last) { this.su_chPrev = this.su_ch2; int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; this.su_ch2 = su_ch2Shadow; this.su_tPos = this.data.tt[this.su_tPos]; this.su_i2++; this.currentChar = su_ch2Shadow; this.currentState = STATE.NO_RAND_PART_B_STATE; this.crc.updateCRC(su_ch2Shadow); } else { this.currentState = STATE.NO_RAND_PART_A_STATE; endBlock(); if (readMode == READ_MODE.CONTINUOUS) { initBlock(); setupBlock(); } else if (readMode == READ_MODE.BYBLOCK) { this.currentState = STATE.NO_PROCESS_STATE; } } } private void setupRandPartB() throws IOException { if (this.su_ch2 != this.su_chPrev) { this.currentState = STATE.RAND_PART_A_STATE; this.su_count = 1; setupRandPartA(); } else if (++this.su_count >= 4) { this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); this.su_tPos = this.data.tt[this.su_tPos]; if (this.su_rNToGo == 0) { this.su_rNToGo = BZip2Constants.rNums[this.su_rTPos] - 1; if (++this.su_rTPos == 512) { this.su_rTPos = 0; } } else { this.su_rNToGo--; } this.su_j2 = 0; this.currentState = STATE.RAND_PART_C_STATE; if (this.su_rNToGo == 1) { this.su_z ^= 1; } setupRandPartC(); } else { this.currentState = STATE.RAND_PART_A_STATE; setupRandPartA(); } } private void setupRandPartC() throws IOException { if (this.su_j2 < this.su_z) { this.currentChar = this.su_ch2; this.crc.updateCRC(this.su_ch2); this.su_j2++; } else { this.currentState = STATE.RAND_PART_A_STATE; this.su_i2++; this.su_count = 0; setupRandPartA(); } } private void setupNoRandPartB() throws IOException { if (this.su_ch2 != this.su_chPrev) { this.su_count = 1; setupNoRandPartA(); } else if (++this.su_count >= 4) { this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); this.su_tPos = this.data.tt[this.su_tPos]; this.su_j2 = 0; setupNoRandPartC(); } else { setupNoRandPartA(); } } private void setupNoRandPartC() throws IOException { if (this.su_j2 < this.su_z) { int su_ch2Shadow = this.su_ch2; this.currentChar = su_ch2Shadow; this.crc.updateCRC(su_ch2Shadow); this.su_j2++; this.currentState = STATE.NO_RAND_PART_C_STATE; } else { this.su_i2++; this.su_count = 0; setupNoRandPartA(); } } private static final class Data extends Object { // (with blockSize 900k) final boolean[] inUse = new boolean[256]; // 256 byte final byte[] seqToUnseq = new byte[256]; // 256 byte final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte /** * Freq table collected to save a pass over the data during * decompression. */ final int[] unzftab = new int[256]; // 1024 byte final int[][] limit = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[][] base = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[][] perm = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte final int[] minLens = new int[N_GROUPS]; // 24 byte final int[] cftab = new int[257]; // 1028 byte final char[] getAndMoveToFrontDecode_yy = new char[256]; // 512 byte final char[][] temp_charArray2d = new char[N_GROUPS][MAX_ALPHA_SIZE]; // 3096 // byte final byte[] recvDecodingTables_pos = new byte[N_GROUPS]; // 6 byte // --------------- // 60798 byte int[] tt; // 3600000 byte byte[] ll8; // 900000 byte // --------------- // 4560782 byte // =============== Data(int blockSize100k) { super(); this.ll8 = new byte[blockSize100k * BZip2Constants.baseBlockSize]; } /** * Initializes the {@link #tt} array. * * This method is called when the required length of the array is known. * I don't initialize it at construction time to avoid unnecessary * memory allocation when compressing small files. */ final int[] initTT(int length) { int[] ttShadow = this.tt; // tt.length should always be >= length, but theoretically // it can happen, if the compressor mixed small and large // blocks. Normally only the last block will be smaller // than others. if ((ttShadow == null) || (ttShadow.length < length)) { this.tt = ttShadow = new int[length]; } return ttShadow; } } }
googleapis/google-cloud-java
37,297
java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ImportSuggestionDenyListEntriesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1/import_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1; /** * * * <pre> * Response message for * [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse} */ public final class ImportSuggestionDenyListEntriesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) ImportSuggestionDenyListEntriesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ImportSuggestionDenyListEntriesResponse.newBuilder() to construct. private ImportSuggestionDenyListEntriesResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportSuggestionDenyListEntriesResponse() { errorSamples_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportSuggestionDenyListEntriesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1_ImportSuggestionDenyListEntriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1_ImportSuggestionDenyListEntriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse.class, com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse.Builder .class); } public static final int ERROR_SAMPLES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.rpc.Status> errorSamples_; /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ @java.lang.Override public java.util.List<com.google.rpc.Status> getErrorSamplesList() { return errorSamples_; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorSamplesOrBuilderList() { return errorSamples_; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ @java.lang.Override public int getErrorSamplesCount() { return errorSamples_.size(); } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ @java.lang.Override public com.google.rpc.Status getErrorSamples(int index) { return errorSamples_.get(index); } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ @java.lang.Override public com.google.rpc.StatusOrBuilder getErrorSamplesOrBuilder(int index) { return errorSamples_.get(index); } public static final int IMPORTED_ENTRIES_COUNT_FIELD_NUMBER = 2; private long importedEntriesCount_ = 0L; /** * * * <pre> * Count of deny list entries successfully imported. * </pre> * * <code>int64 imported_entries_count = 2;</code> * * @return The importedEntriesCount. */ @java.lang.Override public long getImportedEntriesCount() { return importedEntriesCount_; } public static final int FAILED_ENTRIES_COUNT_FIELD_NUMBER = 3; private long failedEntriesCount_ = 0L; /** * * * <pre> * Count of deny list entries that failed to be imported. * </pre> * * <code>int64 failed_entries_count = 3;</code> * * @return The failedEntriesCount. */ @java.lang.Override public long getFailedEntriesCount() { return failedEntriesCount_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < errorSamples_.size(); i++) { output.writeMessage(1, errorSamples_.get(i)); } if (importedEntriesCount_ != 0L) { output.writeInt64(2, importedEntriesCount_); } if (failedEntriesCount_ != 0L) { output.writeInt64(3, failedEntriesCount_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < errorSamples_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, errorSamples_.get(i)); } if (importedEntriesCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, importedEntriesCount_); } if (failedEntriesCount_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, failedEntriesCount_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse other = (com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) obj; if (!getErrorSamplesList().equals(other.getErrorSamplesList())) return false; if (getImportedEntriesCount() != other.getImportedEntriesCount()) return false; if (getFailedEntriesCount() != other.getFailedEntriesCount()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getErrorSamplesCount() > 0) { hash = (37 * hash) + ERROR_SAMPLES_FIELD_NUMBER; hash = (53 * hash) + getErrorSamplesList().hashCode(); } hash = (37 * hash) + IMPORTED_ENTRIES_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getImportedEntriesCount()); hash = (37 * hash) + FAILED_ENTRIES_COUNT_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailedEntriesCount()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for * [CompletionService.ImportSuggestionDenyListEntries][google.cloud.discoveryengine.v1.CompletionService.ImportSuggestionDenyListEntries] * method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1_ImportSuggestionDenyListEntriesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1_ImportSuggestionDenyListEntriesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse.class, com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse.Builder .class); } // Construct using // com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (errorSamplesBuilder_ == null) { errorSamples_ = java.util.Collections.emptyList(); } else { errorSamples_ = null; errorSamplesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); importedEntriesCount_ = 0L; failedEntriesCount_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1.ImportConfigProto .internal_static_google_cloud_discoveryengine_v1_ImportSuggestionDenyListEntriesResponse_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse build() { com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse buildPartial() { com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse result = new com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse result) { if (errorSamplesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { errorSamples_ = java.util.Collections.unmodifiableList(errorSamples_); bitField0_ = (bitField0_ & ~0x00000001); } result.errorSamples_ = errorSamples_; } else { result.errorSamples_ = errorSamplesBuilder_.build(); } } private void buildPartial0( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.importedEntriesCount_ = importedEntriesCount_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.failedEntriesCount_ = failedEntriesCount_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) { return mergeFrom( (com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse other) { if (other == com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse .getDefaultInstance()) return this; if (errorSamplesBuilder_ == null) { if (!other.errorSamples_.isEmpty()) { if (errorSamples_.isEmpty()) { errorSamples_ = other.errorSamples_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureErrorSamplesIsMutable(); errorSamples_.addAll(other.errorSamples_); } onChanged(); } } else { if (!other.errorSamples_.isEmpty()) { if (errorSamplesBuilder_.isEmpty()) { errorSamplesBuilder_.dispose(); errorSamplesBuilder_ = null; errorSamples_ = other.errorSamples_; bitField0_ = (bitField0_ & ~0x00000001); errorSamplesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getErrorSamplesFieldBuilder() : null; } else { errorSamplesBuilder_.addAllMessages(other.errorSamples_); } } } if (other.getImportedEntriesCount() != 0L) { setImportedEntriesCount(other.getImportedEntriesCount()); } if (other.getFailedEntriesCount() != 0L) { setFailedEntriesCount(other.getFailedEntriesCount()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.rpc.Status m = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry); if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); errorSamples_.add(m); } else { errorSamplesBuilder_.addMessage(m); } break; } // case 10 case 16: { importedEntriesCount_ = input.readInt64(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { failedEntriesCount_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.rpc.Status> errorSamples_ = java.util.Collections.emptyList(); private void ensureErrorSamplesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { errorSamples_ = new java.util.ArrayList<com.google.rpc.Status>(errorSamples_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorSamplesBuilder_; /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public java.util.List<com.google.rpc.Status> getErrorSamplesList() { if (errorSamplesBuilder_ == null) { return java.util.Collections.unmodifiableList(errorSamples_); } else { return errorSamplesBuilder_.getMessageList(); } } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public int getErrorSamplesCount() { if (errorSamplesBuilder_ == null) { return errorSamples_.size(); } else { return errorSamplesBuilder_.getCount(); } } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public com.google.rpc.Status getErrorSamples(int index) { if (errorSamplesBuilder_ == null) { return errorSamples_.get(index); } else { return errorSamplesBuilder_.getMessage(index); } } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder setErrorSamples(int index, com.google.rpc.Status value) { if (errorSamplesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureErrorSamplesIsMutable(); errorSamples_.set(index, value); onChanged(); } else { errorSamplesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder setErrorSamples(int index, com.google.rpc.Status.Builder builderForValue) { if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); errorSamples_.set(index, builderForValue.build()); onChanged(); } else { errorSamplesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder addErrorSamples(com.google.rpc.Status value) { if (errorSamplesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureErrorSamplesIsMutable(); errorSamples_.add(value); onChanged(); } else { errorSamplesBuilder_.addMessage(value); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder addErrorSamples(int index, com.google.rpc.Status value) { if (errorSamplesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureErrorSamplesIsMutable(); errorSamples_.add(index, value); onChanged(); } else { errorSamplesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder addErrorSamples(com.google.rpc.Status.Builder builderForValue) { if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); errorSamples_.add(builderForValue.build()); onChanged(); } else { errorSamplesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder addErrorSamples(int index, com.google.rpc.Status.Builder builderForValue) { if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); errorSamples_.add(index, builderForValue.build()); onChanged(); } else { errorSamplesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder addAllErrorSamples(java.lang.Iterable<? extends com.google.rpc.Status> values) { if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, errorSamples_); onChanged(); } else { errorSamplesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder clearErrorSamples() { if (errorSamplesBuilder_ == null) { errorSamples_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { errorSamplesBuilder_.clear(); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public Builder removeErrorSamples(int index) { if (errorSamplesBuilder_ == null) { ensureErrorSamplesIsMutable(); errorSamples_.remove(index); onChanged(); } else { errorSamplesBuilder_.remove(index); } return this; } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public com.google.rpc.Status.Builder getErrorSamplesBuilder(int index) { return getErrorSamplesFieldBuilder().getBuilder(index); } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public com.google.rpc.StatusOrBuilder getErrorSamplesOrBuilder(int index) { if (errorSamplesBuilder_ == null) { return errorSamples_.get(index); } else { return errorSamplesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public java.util.List<? extends com.google.rpc.StatusOrBuilder> getErrorSamplesOrBuilderList() { if (errorSamplesBuilder_ != null) { return errorSamplesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(errorSamples_); } } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public com.google.rpc.Status.Builder addErrorSamplesBuilder() { return getErrorSamplesFieldBuilder().addBuilder(com.google.rpc.Status.getDefaultInstance()); } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public com.google.rpc.Status.Builder addErrorSamplesBuilder(int index) { return getErrorSamplesFieldBuilder() .addBuilder(index, com.google.rpc.Status.getDefaultInstance()); } /** * * * <pre> * A sample of errors encountered while processing the request. * </pre> * * <code>repeated .google.rpc.Status error_samples = 1;</code> */ public java.util.List<com.google.rpc.Status.Builder> getErrorSamplesBuilderList() { return getErrorSamplesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> getErrorSamplesFieldBuilder() { if (errorSamplesBuilder_ == null) { errorSamplesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>( errorSamples_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); errorSamples_ = null; } return errorSamplesBuilder_; } private long importedEntriesCount_; /** * * * <pre> * Count of deny list entries successfully imported. * </pre> * * <code>int64 imported_entries_count = 2;</code> * * @return The importedEntriesCount. */ @java.lang.Override public long getImportedEntriesCount() { return importedEntriesCount_; } /** * * * <pre> * Count of deny list entries successfully imported. * </pre> * * <code>int64 imported_entries_count = 2;</code> * * @param value The importedEntriesCount to set. * @return This builder for chaining. */ public Builder setImportedEntriesCount(long value) { importedEntriesCount_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Count of deny list entries successfully imported. * </pre> * * <code>int64 imported_entries_count = 2;</code> * * @return This builder for chaining. */ public Builder clearImportedEntriesCount() { bitField0_ = (bitField0_ & ~0x00000002); importedEntriesCount_ = 0L; onChanged(); return this; } private long failedEntriesCount_; /** * * * <pre> * Count of deny list entries that failed to be imported. * </pre> * * <code>int64 failed_entries_count = 3;</code> * * @return The failedEntriesCount. */ @java.lang.Override public long getFailedEntriesCount() { return failedEntriesCount_; } /** * * * <pre> * Count of deny list entries that failed to be imported. * </pre> * * <code>int64 failed_entries_count = 3;</code> * * @param value The failedEntriesCount to set. * @return This builder for chaining. */ public Builder setFailedEntriesCount(long value) { failedEntriesCount_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Count of deny list entries that failed to be imported. * </pre> * * <code>int64 failed_entries_count = 3;</code> * * @return This builder for chaining. */ public Builder clearFailedEntriesCount() { bitField0_ = (bitField0_ & ~0x00000004); failedEntriesCount_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse) private static final com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse(); } public static com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> PARSER = new com.google.protobuf.AbstractParser<ImportSuggestionDenyListEntriesResponse>() { @java.lang.Override public ImportSuggestionDenyListEntriesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportSuggestionDenyListEntriesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1.ImportSuggestionDenyListEntriesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }