repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
sflyphotobooks/crp-batik | sources/org/apache/batik/bridge/SVGMaskElementBridge.java | 4733 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.bridge;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import org.apache.batik.ext.awt.image.renderable.Filter;
import org.apache.batik.gvt.CompositeGraphicsNode;
import org.apache.batik.gvt.GraphicsNode;
import org.apache.batik.gvt.filter.Mask;
import org.apache.batik.gvt.filter.MaskRable8Bit;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
/**
* Bridge class for the <mask> element.
*
* @author <a href="mailto:tkormann@apache.org">Thierry Kormann</a>
* @version $Id: SVGMaskElementBridge.java 475477 2006-11-15 22:44:28Z cam $
*/
public class SVGMaskElementBridge extends AnimatableGenericSVGBridge
implements MaskBridge {
/**
* Constructs a new bridge for the <mask> element.
*/
public SVGMaskElementBridge() {}
/**
* Returns 'mask'.
*/
public String getLocalName() {
return SVG_MASK_TAG;
}
/**
* Creates a <tt>Mask</tt> according to the specified parameters.
*
* @param ctx the bridge context to use
* @param maskElement the element that defines the mask
* @param maskedElement the element that references the mask element
* @param maskedNode the graphics node to mask
*/
public Mask createMask(BridgeContext ctx,
Element maskElement,
Element maskedElement,
GraphicsNode maskedNode) {
String s;
// get mask region using 'maskUnits'
Rectangle2D maskRegion = SVGUtilities.convertMaskRegion
(maskElement, maskedElement, maskedNode, ctx);
//
// Build the GVT tree that represents the mask
//
GVTBuilder builder = ctx.getGVTBuilder();
CompositeGraphicsNode maskNode = new CompositeGraphicsNode();
CompositeGraphicsNode maskNodeContent = new CompositeGraphicsNode();
maskNode.getChildren().add(maskNodeContent);
boolean hasChildren = false;
for(Node node = maskElement.getFirstChild();
node != null;
node = node.getNextSibling()){
// check if the node is a valid Element
if(node.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
Element child = (Element)node;
GraphicsNode gn = builder.build(ctx, child) ;
if(gn == null) {
continue;
}
hasChildren = true;
maskNodeContent.getChildren().add(gn);
}
if (!hasChildren) {
return null; // empty mask
}
// 'transform' attribute
AffineTransform Tx;
s = maskElement.getAttributeNS(null, SVG_TRANSFORM_ATTRIBUTE);
if (s.length() != 0) {
Tx = SVGUtilities.convertTransform
(maskElement, SVG_TRANSFORM_ATTRIBUTE, s, ctx);
} else {
Tx = new AffineTransform();
}
// 'maskContentUnits' attribute - default is userSpaceOnUse
short coordSystemType;
s = maskElement.getAttributeNS(null, SVG_MASK_CONTENT_UNITS_ATTRIBUTE);
if (s.length() == 0) {
coordSystemType = SVGUtilities.USER_SPACE_ON_USE;
} else {
coordSystemType = SVGUtilities.parseCoordinateSystem
(maskElement, SVG_MASK_CONTENT_UNITS_ATTRIBUTE, s, ctx);
}
// additional transform to move to objectBoundingBox coordinate system
if (coordSystemType == SVGUtilities.OBJECT_BOUNDING_BOX) {
Tx = SVGUtilities.toObjectBBox(Tx, maskedNode);
}
maskNodeContent.setTransform(Tx);
Filter filter = maskedNode.getFilter();
if (filter == null) {
// Make the initial source as a RenderableImage
filter = maskedNode.getGraphicsNodeRable(true);
}
return new MaskRable8Bit(filter, maskNode, maskRegion);
}
}
| apache-2.0 |
nezirus/elasticsearch | core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java | 5726 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.FieldValueQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregator;
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.io.IOException;
import java.util.Arrays;
import java.util.function.Consumer;
import static java.util.Collections.singleton;
public class CardinalityAggregatorTests extends AggregatorTestCase {
public void testNoDocs() throws IOException {
testCase(new MatchAllDocsQuery(), iw -> {
// Intentionally not writing any docs
}, card -> {
assertEquals(0.0, card.getValue(), 0);
});
}
public void testNoMatchingField() throws IOException {
testCase(new MatchAllDocsQuery(), iw -> {
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 7)));
iw.addDocument(singleton(new SortedNumericDocValuesField("wrong_number", 1)));
}, card -> {
assertEquals(0.0, card.getValue(), 0);
});
}
public void testSomeMatchesSortedNumericDocValues() throws IOException {
testCase(new FieldValueQuery("number"), iw -> {
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 7)));
iw.addDocument(singleton(new SortedNumericDocValuesField("number", 1)));
}, card -> {
assertEquals(2, card.getValue(), 0);
});
}
public void testSomeMatchesNumericDocValues() throws IOException {
testCase(new FieldValueQuery("number"), iw -> {
iw.addDocument(singleton(new NumericDocValuesField("number", 7)));
iw.addDocument(singleton(new NumericDocValuesField("number", 1)));
}, card -> {
assertEquals(2, card.getValue(), 0);
});
}
public void testQueryFiltering() throws IOException {
testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> {
iw.addDocument(Arrays.asList(new IntPoint("number", 7),
new SortedNumericDocValuesField("number", 7)));
iw.addDocument(Arrays.asList(new IntPoint("number", 1),
new SortedNumericDocValuesField("number", 1)));
}, card -> {
assertEquals(1, card.getValue(), 0);
});
}
public void testQueryFiltersAll() throws IOException {
testCase(IntPoint.newRangeQuery("number", -1, 0), iw -> {
iw.addDocument(Arrays.asList(new IntPoint("number", 7),
new SortedNumericDocValuesField("number", 7)));
iw.addDocument(Arrays.asList(new IntPoint("number", 1),
new SortedNumericDocValuesField("number", 1)));
}, card -> {
assertEquals(0.0, card.getValue(), 0);
});
}
private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
Consumer<InternalCardinality> verify) throws IOException {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
buildIndex.accept(indexWriter);
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
CardinalityAggregationBuilder aggregationBuilder = new CardinalityAggregationBuilder(
"_name", ValueType.NUMERIC).field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(
NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
CardinalityAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher,
fieldType);
aggregator.preCollection();
indexSearcher.search(query, aggregator);
aggregator.postCollection();
verify.accept((InternalCardinality) aggregator.buildAggregation(0L));
indexReader.close();
directory.close();
}
}
| apache-2.0 |
akdasari/SparkCore | spark-framework-web/src/main/java/org/sparkcommerce/core/web/api/endpoint/customer/CustomerEndpoint.java | 1431 | /*
* #%L
* SparkCommerce Framework Web
* %%
* Copyright (C) 2009 - 2013 Spark Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.sparkcommerce.core.web.api.endpoint.customer;
import org.sparkcommerce.core.web.api.endpoint.BaseEndpoint;
import org.sparkcommerce.profile.core.service.CustomerService;
import javax.annotation.Resource;
/**
* This endpoint depends on JAX-RS. It should be extended by components that actually wish
* to provide an endpoint. The annotations such as @Path, @Scope, @Context, @PathParam, @QueryParam,
* @GET, @POST, @PUT, and @DELETE are purposely not provided here to allow implementors finer control over
* the details of the endpoint.
* <p/>
* User: Kelly Tisdell
* Date: 4/10/12
*/
public abstract class CustomerEndpoint extends BaseEndpoint {
@Resource(name="blCustomerService")
protected CustomerService customerService;
}
| apache-2.0 |
robsoncardosoti/flowable-engine | modules/flowable5-engine/src/main/java/org/activiti/engine/impl/cmd/SetTaskVariablesCmd.java | 2232 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.cmd;
import java.util.Map;
import org.activiti.engine.impl.interceptor.CommandContext;
import org.activiti.engine.impl.persistence.entity.TaskEntity;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public class SetTaskVariablesCmd extends NeedsActiveTaskCmd<Object> {
private static final long serialVersionUID = 1L;
protected Map<String, ? extends Object> variables;
protected boolean isLocal;
public SetTaskVariablesCmd(String taskId, Map<String, ? extends Object> variables, boolean isLocal) {
super(taskId);
this.taskId = taskId;
this.variables = variables;
this.isLocal = isLocal;
}
protected Object execute(CommandContext commandContext, TaskEntity task) {
if (isLocal) {
if (variables != null) {
for (String variableName : variables.keySet()) {
task.setVariableLocal(variableName, variables.get(variableName), false);
}
}
} else {
if (variables != null) {
for (String variableName : variables.keySet()) {
task.setVariable(variableName, variables.get(variableName), false);
}
}
}
// ACT-1887: Force an update of the task's revision to prevent simultaneous inserts of the same
// variable. If not, duplicate variables may occur since optimistic locking doesn't work on inserts
task.forceUpdate();
return null;
}
@Override
protected String getSuspendedTaskException() {
return "Cannot add variables to a suspended task";
}
}
| apache-2.0 |
tinkoff-dwh/zeppelin | spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java | 16647 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.spark;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.display.ui.CheckBox;
import org.apache.zeppelin.display.ui.Select;
import org.apache.zeppelin.display.ui.TextBox;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterOutput;
import org.apache.zeppelin.interpreter.InterpreterOutputListener;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResultMessageOutput;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.user.AuthenticationInfo;
import org.junit.After;
import org.junit.Test;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
public class NewSparkInterpreterTest {
private SparkInterpreter interpreter;
// catch the streaming output in onAppend
private volatile String output = "";
// catch the interpreter output in onUpdate
private InterpreterResultMessageOutput messageOutput;
@Test
public void testSparkInterpreter() throws IOException, InterruptedException, InterpreterException {
Properties properties = new Properties();
properties.setProperty("spark.master", "local");
properties.setProperty("spark.app.name", "test");
properties.setProperty("zeppelin.spark.maxResult", "100");
properties.setProperty("zeppelin.spark.test", "true");
properties.setProperty("zeppelin.spark.useNew", "true");
interpreter = new SparkInterpreter(properties);
assertTrue(interpreter.getDelegation() instanceof NewSparkInterpreter);
interpreter.setInterpreterGroup(mock(InterpreterGroup.class));
interpreter.open();
InterpreterResult result = interpreter.interpret("val a=\"hello world\"", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals("a: String = hello world\n", output);
result = interpreter.interpret("print(a)", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals("hello world", output);
// incomplete
result = interpreter.interpret("println(a", getInterpreterContext());
assertEquals(InterpreterResult.Code.INCOMPLETE, result.code());
// syntax error
result = interpreter.interpret("println(b)", getInterpreterContext());
assertEquals(InterpreterResult.Code.ERROR, result.code());
assertTrue(output.contains("not found: value b"));
// multiple line
result = interpreter.interpret("\"123\".\ntoInt", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// single line comment
result = interpreter.interpret("/*comment here*/", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("/*comment here*/\nprint(\"hello world\")", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// multiple line comment
result = interpreter.interpret("/*line 1 \n line 2*/", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// test function
result = interpreter.interpret("def add(x:Int, y:Int)\n{ return x+y }", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("print(add(1,2))", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("/*line 1 \n line 2*/print(\"hello world\")", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// companion object
result = interpreter.interpret("class Counter {\n " +
"var value: Long = 0} \n" +
"object Counter {\n def apply(x: Long) = new Counter()\n}", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// spark rdd operation
result = interpreter.interpret("sc.range(1, 10).sum", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertTrue(output.contains("45"));
// case class
result = interpreter.interpret("val bankText = sc.textFile(\"bank.csv\")", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret(
"case class Bank(age:Integer, job:String, marital : String, education : String, balance : Integer)\n",
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret(
"val bank = bankText.map(s=>s.split(\";\")).filter(s => s(0)!=\"\\\"age\\\"\").map(\n" +
" s => Bank(s(0).toInt, \n" +
" s(1).replaceAll(\"\\\"\", \"\"),\n" +
" s(2).replaceAll(\"\\\"\", \"\"),\n" +
" s(3).replaceAll(\"\\\"\", \"\"),\n" +
" s(5).replaceAll(\"\\\"\", \"\").toInt\n" +
" )\n" +
")", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// spark version
result = interpreter.interpret("sc.version", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
// spark sql test
String version = output.trim();
if (version.contains("String = 1.")) {
result = interpreter.interpret("sqlContext", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret(
"val df = sqlContext.createDataFrame(Seq((1,\"a\"),(2,\"b\")))\n" +
"df.show()", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertTrue(output.contains(
"+---+---+\n" +
"| _1| _2|\n" +
"+---+---+\n" +
"| 1| a|\n" +
"| 2| b|\n" +
"+---+---+"));
} else if (version.contains("String = 2.")) {
result = interpreter.interpret("spark", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret(
"val df = spark.createDataFrame(Seq((1,\"a\"),(2,\"b\")))\n" +
"df.show()", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertTrue(output.contains(
"+---+---+\n" +
"| _1| _2|\n" +
"+---+---+\n" +
"| 1| a|\n" +
"| 2| b|\n" +
"+---+---+"));
}
// ZeppelinContext
result = interpreter.interpret("z.show(df)", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(InterpreterResult.Type.TABLE, messageOutput.getType());
messageOutput.flush();
assertEquals("_1\t_2\n1\ta\n2\tb\n", messageOutput.toInterpreterResultMessage().getData());
InterpreterContext context = getInterpreterContext();
result = interpreter.interpret("z.input(\"name\", \"default_name\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("name") instanceof TextBox);
TextBox textBox = (TextBox) context.getGui().getForms().get("name");
assertEquals("name", textBox.getName());
assertEquals("default_name", textBox.getDefaultValue());
context = getInterpreterContext();
result = interpreter.interpret("z.checkbox(\"checkbox_1\", Seq(\"value_2\"), Seq((\"value_1\", \"name_1\"), (\"value_2\", \"name_2\")))", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("checkbox_1") instanceof CheckBox);
CheckBox checkBox = (CheckBox) context.getGui().getForms().get("checkbox_1");
assertEquals("checkbox_1", checkBox.getName());
assertEquals(1, checkBox.getDefaultValue().length);
assertEquals("value_2", checkBox.getDefaultValue()[0]);
assertEquals(2, checkBox.getOptions().length);
assertEquals("value_1", checkBox.getOptions()[0].getValue());
assertEquals("name_1", checkBox.getOptions()[0].getDisplayName());
assertEquals("value_2", checkBox.getOptions()[1].getValue());
assertEquals("name_2", checkBox.getOptions()[1].getDisplayName());
context = getInterpreterContext();
result = interpreter.interpret("z.select(\"select_1\", Seq(\"value_2\"), Seq((\"value_1\", \"name_1\"), (\"value_2\", \"name_2\")))", context);
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(1, context.getGui().getForms().size());
assertTrue(context.getGui().getForms().get("select_1") instanceof Select);
Select select = (Select) context.getGui().getForms().get("select_1");
assertEquals("select_1", select.getName());
// TODO(zjffdu) it seems a bug of GUI, the default value should be 'value_2', but it is List(value_2)
// assertEquals("value_2", select.getDefaultValue());
assertEquals(2, select.getOptions().length);
assertEquals("value_1", select.getOptions()[0].getValue());
assertEquals("name_1", select.getOptions()[0].getDisplayName());
assertEquals("value_2", select.getOptions()[1].getValue());
assertEquals("name_2", select.getOptions()[1].getDisplayName());
// completions
List<InterpreterCompletion> completions = interpreter.completion("a.", 2, getInterpreterContext());
assertTrue(completions.size() > 0);
completions = interpreter.completion("a.isEm", 6, getInterpreterContext());
assertEquals(1, completions.size());
assertEquals("isEmpty", completions.get(0).name);
completions = interpreter.completion("sc.ra", 5, getInterpreterContext());
assertEquals(1, completions.size());
assertEquals("range", completions.get(0).name);
// Zeppelin-Display
result = interpreter.interpret("import org.apache.zeppelin.display.angular.notebookscope._\n" +
"import AngularElem._", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
result = interpreter.interpret("<div style=\"color:blue\">\n" +
"<h4>Hello Angular Display System</h4>\n" +
"</div>.display", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(InterpreterResult.Type.ANGULAR, messageOutput.getType());
assertTrue(messageOutput.toInterpreterResultMessage().getData().contains("Hello Angular Display System"));
result = interpreter.interpret("<div class=\"btn btn-success\">\n" +
" Click me\n" +
"</div>.onClick{() =>\n" +
" println(\"hello world\")\n" +
"}.display", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
assertEquals(InterpreterResult.Type.ANGULAR, messageOutput.getType());
assertTrue(messageOutput.toInterpreterResultMessage().getData().contains("Click me"));
// getProgress
final InterpreterContext context2 = getInterpreterContext();
Thread interpretThread = new Thread() {
@Override
public void run() {
InterpreterResult result = null;
try {
result = interpreter.interpret(
"val df = sc.parallelize(1 to 10, 2).foreach(e=>Thread.sleep(1000))", context2);
} catch (InterpreterException e) {
e.printStackTrace();
}
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
}
};
interpretThread.start();
boolean nonZeroProgress = false;
int progress = 0;
while(interpretThread.isAlive()) {
progress = interpreter.getProgress(context2);
assertTrue(progress >= 0);
if (progress != 0 && progress != 100) {
nonZeroProgress = true;
}
Thread.sleep(100);
}
assertTrue(nonZeroProgress);
// cancel
final InterpreterContext context3 = getInterpreterContext();
interpretThread = new Thread() {
@Override
public void run() {
InterpreterResult result = null;
try {
result = interpreter.interpret(
"val df = sc.parallelize(1 to 10, 2).foreach(e=>Thread.sleep(1000))", context3);
} catch (InterpreterException e) {
e.printStackTrace();
}
assertEquals(InterpreterResult.Code.ERROR, result.code());
assertTrue(output.contains("cancelled"));
}
};
interpretThread.start();
// sleep 1 second to wait for the spark job start
Thread.sleep(1000);
interpreter.cancel(context3);
interpretThread.join();
}
@Test
public void testDependencies() throws IOException, InterpreterException {
Properties properties = new Properties();
properties.setProperty("spark.master", "local");
properties.setProperty("spark.app.name", "test");
properties.setProperty("zeppelin.spark.maxResult", "100");
properties.setProperty("zeppelin.spark.useNew", "true");
// download spark-avro jar
URL website = new URL("http://repo1.maven.org/maven2/com/databricks/spark-avro_2.11/3.2.0/spark-avro_2.11-3.2.0.jar");
ReadableByteChannel rbc = Channels.newChannel(website.openStream());
File avroJarFile = new File("spark-avro_2.11-3.2.0.jar");
FileOutputStream fos = new FileOutputStream(avroJarFile);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
properties.setProperty("spark.jars", avroJarFile.getAbsolutePath());
interpreter = new SparkInterpreter(properties);
assertTrue(interpreter.getDelegation() instanceof NewSparkInterpreter);
interpreter.setInterpreterGroup(mock(InterpreterGroup.class));
interpreter.open();
InterpreterResult result = interpreter.interpret("import com.databricks.spark.avro._", getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
}
@After
public void tearDown() throws InterpreterException {
if (this.interpreter != null) {
this.interpreter.close();
}
}
private InterpreterContext getInterpreterContext() {
output = "";
return new InterpreterContext(
"noteId",
"paragraphId",
"replName",
"paragraphTitle",
"paragraphText",
new AuthenticationInfo(),
new HashMap<String, Object>(),
new GUI(),
new GUI(),
new AngularObjectRegistry("spark", null),
null,
null,
new InterpreterOutput(
new InterpreterOutputListener() {
@Override
public void onUpdateAll(InterpreterOutput out) {
}
@Override
public void onAppend(int index, InterpreterResultMessageOutput out, byte[] line) {
try {
output = out.toInterpreterResultMessage().getData();
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void onUpdate(int index, InterpreterResultMessageOutput out) {
messageOutput = out;
}
})
);
}
}
| apache-2.0 |
OpenUniversity/ovirt-engine | backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/businessentities/StorageDomainOvfInfo.java | 3033 | package org.ovirt.engine.core.common.businessentities;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import org.ovirt.engine.core.common.utils.ToStringBuilder;
import org.ovirt.engine.core.compat.Guid;
public class StorageDomainOvfInfo implements BusinessEntity<Guid> {
private Guid storageDomainId;
private List<Guid> storedOvfIds;
private Guid ovfDiskId;
private StorageDomainOvfInfoStatus status;
private Date lastUpdated;
public StorageDomainOvfInfo(Guid storageDomainId, List<Guid> storedOvfIds,
Guid ovfDiskId, StorageDomainOvfInfoStatus status, Date lastUpdated) {
this.storageDomainId = storageDomainId;
this.storedOvfIds = storedOvfIds;
this.ovfDiskId = ovfDiskId;
this.status = status;
this.lastUpdated = lastUpdated;
}
public StorageDomainOvfInfo() {
}
public StorageDomainOvfInfoStatus getStatus() {
return status;
}
public void setStatus(StorageDomainOvfInfoStatus status) {
this.status = status;
}
public Guid getStorageDomainId() {
return storageDomainId;
}
public void setStorageDomainId(Guid storageDomainId) {
this.storageDomainId = storageDomainId;
}
public Guid getOvfDiskId() {
return ovfDiskId;
}
public void setOvfDiskId(Guid ovfDiskId) {
this.ovfDiskId = ovfDiskId;
}
@Override
public Guid getId() {
return getOvfDiskId();
}
@Override
public void setId(Guid id) {
setOvfDiskId(id);
}
public List<Guid> getStoredOvfIds() {
return storedOvfIds;
}
public void setStoredOvfIds(List<Guid> storedOvfIds) {
this.storedOvfIds = storedOvfIds;
}
public Date getLastUpdated() {
return lastUpdated;
}
public void setLastUpdated(Date lastUpdated) {
this.lastUpdated = lastUpdated;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof StorageDomainOvfInfo)) {
return false;
}
StorageDomainOvfInfo ovfInfo = (StorageDomainOvfInfo) o;
return status == ovfInfo.status
&& Objects.equals(ovfDiskId, ovfInfo.ovfDiskId)
&& Objects.equals(storageDomainId, ovfInfo.storageDomainId)
&& Objects.equals(lastUpdated, ovfInfo.lastUpdated);
}
@Override
public int hashCode() {
return Objects.hash(
storageDomainId,
ovfDiskId,
status,
lastUpdated
);
}
@Override
public String toString() {
return ToStringBuilder.forInstance(this)
.append("storageDomainId", storageDomainId)
.append("storedOvfIds", storedOvfIds)
.append("ovfDiskId", ovfDiskId)
.append("status", status)
.append("lastUpdated", lastUpdated)
.build();
}
}
| apache-2.0 |
raviagarwal7/buck | src/com/facebook/buck/model/BuildTargets.java | 8290 | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.model;
import com.facebook.buck.io.BuckPaths;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.util.HumanReadableException;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Sets;
import java.nio.file.Path;
import java.util.Set;
/**
* Static helpers for working with build targets.
*/
public class BuildTargets {
/** Utility class: do not instantiate. */
private BuildTargets() {}
/**
* Return a path to a file in the buck-out/bin/ directory. {@code format} will be prepended with
* the {@link BuckPaths#getScratchDir()} and the target base path, then formatted with the target
* short name.
*
* @param target The {@link BuildTarget} to scope this path to.
* @param format {@link String#format} string for the path name. It should contain one "%s",
* which will be filled in with the rule's short name. It should not start with a slash.
* @return A {@link java.nio.file.Path} under buck-out/bin, scoped to the base path of
* {@code target}.
*/
public static Path getScratchPath(
ProjectFilesystem filesystem,
BuildTarget target,
String format) {
return filesystem.getRootPath().getFileSystem().getPath(
String.format(
"%s/%s" + format,
filesystem.getBuckPaths().getScratchDir(),
target.getBasePathWithSlash(),
target.getShortNameAndFlavorPostfix()));
}
/**
* Return a path to a file in the buck-out/annotation/ directory. {@code format} will be prepended
* with the {@link BuckPaths#getAnnotationDir()} and the target base path, then formatted with the
* target short name.
*
* @param target The {@link BuildTarget} to scope this path to.
* @param format {@link String#format} string for the path name. It should contain one "%s",
* which will be filled in with the rule's short name. It should not start with a slash.
* @return A {@link java.nio.file.Path} under buck-out/annotation, scoped to the base path of
* {@code target}.
*/
public static Path getAnnotationPath(
ProjectFilesystem filesystem,
BuildTarget target,
String format) {
return filesystem.getRootPath().getFileSystem().getPath(
String.format(
"%s/%s" + format,
filesystem.getBuckPaths().getAnnotationDir(),
target.getBasePathWithSlash(),
target.getShortNameAndFlavorPostfix()));
}
/**
* Return a path to a file in the buck-out/gen/ directory. {@code format} will be prepended with
* the {@link BuckPaths#getGenDir()} and the target base path, then formatted with the target
* short name.
*
* @param target The {@link BuildTarget} to scope this path to.
* @param format {@link String#format} string for the path name. It should contain one "%s",
* which will be filled in with the rule's short name. It should not start with a slash.
* @return A {@link java.nio.file.Path} under buck-out/gen, scoped to the base path of
* {@code target}.
*/
public static Path getGenPath(ProjectFilesystem filesystem, BuildTarget target, String format) {
return filesystem.getRootPath().getFileSystem().getPath(
String.format(
"%s/%s" + format,
filesystem.getBuckPaths().getGenDir(),
target.getBasePathWithSlash(),
target.getShortNameAndFlavorPostfix()));
}
/**
* Takes the {@link BuildTarget} for {@code hasBuildTarget} and derives a new {@link BuildTarget}
* from it with the specified flavor.
* @throws IllegalArgumentException if the original {@link BuildTarget} already has a flavor.
*/
public static BuildTarget createFlavoredBuildTarget(
UnflavoredBuildTarget buildTarget,
Flavor flavor) {
return BuildTarget.builder(buildTarget)
.addFlavors(flavor)
.build();
}
public static Predicate<BuildTarget> containsFlavors(final FlavorDomain<?> domain) {
return new Predicate<BuildTarget>() {
@Override
public boolean apply(BuildTarget input) {
ImmutableSet<Flavor> flavorSet =
Sets.intersection(domain.getFlavors(), input.getFlavors()).immutableCopy();
return !flavorSet.isEmpty();
}
};
}
public static Predicate<BuildTarget> containsFlavor(final Flavor flavor) {
return new Predicate<BuildTarget>() {
@Override
public boolean apply(BuildTarget input) {
return input.getFlavors().contains(flavor);
}
};
}
/**
* Propagate flavors represented by the given {@link FlavorDomain} objects from a parent
* target to its dependencies.
*/
public static ImmutableSortedSet<BuildTarget> propagateFlavorDomains(
BuildTarget target,
Iterable<FlavorDomain<?>> domains,
Iterable<BuildTarget> deps) {
Set<Flavor> flavors = Sets.newHashSet();
// For each flavor domain, extract the corresponding flavor from the parent target and
// verify that each dependency hasn't already set this flavor.
for (FlavorDomain<?> domain : domains) {
// Now extract all relevant domain flavors from our parent target.
ImmutableSet<Flavor> flavorSet =
Sets.intersection(domain.getFlavors(), target.getFlavors()).immutableCopy();
if (flavorSet.isEmpty()) {
throw new HumanReadableException(
"%s: no flavor for \"%s\"",
target,
domain.getName());
}
flavors.addAll(flavorSet);
// First verify that our deps are not already flavored for our given domains.
for (BuildTarget dep : deps) {
if (domain.getFlavor(dep).isPresent()) {
throw new HumanReadableException(
"%s: dep %s already has flavor for \"%s\" : %s",
target,
dep,
domain.getName(),
flavorSet.toString());
}
}
}
ImmutableSortedSet.Builder<BuildTarget> flavoredDeps = ImmutableSortedSet.naturalOrder();
// Now flavor each dependency with the relevant flavors.
for (BuildTarget dep : deps) {
flavoredDeps.add(BuildTarget.builder(dep).addAllFlavors(flavors).build());
}
return flavoredDeps.build();
}
/**
* Propagate a build target's flavors in a certain domain to a list of other build targets.
*
* @param domain the flavor domain to be propagated.
* @param buildTarget the build target containing the flavors to be propagated
* @param deps list of BuildTargets to propagate the flavors to. If a target already contains
* one or more flavors in domain, it is left unchanged.
* @return the list of BuildTargets with any flavors propagated.
*/
public static FluentIterable<BuildTarget> propagateFlavorsInDomainIfNotPresent(
FlavorDomain<?> domain,
BuildTarget buildTarget,
FluentIterable<BuildTarget> deps) {
if (domain.containsAnyOf(buildTarget.getFlavors())) {
FluentIterable<BuildTarget> targetsWithFlavorsAlready = deps.filter(
BuildTargets.containsFlavors(domain));
FluentIterable<BuildTarget> targetsWithoutFlavors = deps.filter(
Predicates.not(BuildTargets.containsFlavors(domain)));
deps = targetsWithFlavorsAlready
.append(
BuildTargets.propagateFlavorDomains(
buildTarget,
ImmutableSet.<FlavorDomain<?>>of(domain),
targetsWithoutFlavors));
}
return deps;
}
}
| apache-2.0 |
SnappyDataInc/snappy-store | tests/core/src/main/java/com/gemstone/gemfire/internal/cache/tier/sockets/CacheServerTransactionsDUnitTest.java | 29950 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.internal.cache.tier.sockets;
import hydra.Log;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import com.gemstone.gemfire.cache.AttributesFactory;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheException;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.ConflictException;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.EntryEvent;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.Scope;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.cache.client.internal.PoolImpl;
import com.gemstone.gemfire.cache.util.BridgeServer;
import com.gemstone.gemfire.cache.util.CacheListenerAdapter;
import com.gemstone.gemfire.cache30.CacheSerializableRunnable;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.internal.AvailablePort;
import dunit.DistributedTestCase;
import dunit.Host;
import dunit.VM;
/**
* Tests behaviour of transactions in client server model
*/
public class CacheServerTransactionsDUnitTest extends DistributedTestCase
{
/** constructor */
public CacheServerTransactionsDUnitTest(String name) {
super(name);
}
private static Cache cache = null;
private static PoolImpl pool = null;
private static final String k1 = "k1";
private static final String k2 = "k2";
private static final String k3 = "k3";
private static final String server1_k1 = "server1-k1";
private static final String server1_k2 = "server1-k2";
private static final String server2_k3 = "server2-k3";
private static final String server2_k2 = "server2-k2";
private static final String client_k2 = "client-k2";
private static final String client_k1 = "client-k1";
private static final String REGION_NAME = "CacheServerTransactionsDUnitTest_region";
private static Host host = null;
private static VM server1 = null;
private static VM server2 = null;
private static VM client1 = null;
private static VM client2 = null;
// private static RegionAttributes attrs = null;
protected static boolean destroyed = false;
protected static boolean invalidated = false;
@Override
public void setUp() throws Exception
{
super.setUp();
host = Host.getHost(0);
server1 = host.getVM(0);
server2 = host.getVM(1);
client1 = host.getVM(2);
client2 = host.getVM(3);
}
private static final int PAUSE = 5 * 1000;
/**
* Test for update propagation to the clients when there is one server and two
* clients connected to the server.
*
*/
public void testOneServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
pause(PAUSE);
server1.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "putInTransaction",
new Object[] { "server1" });
pause(PAUSE);
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
}
/**
* Test for update propagation to the clients when there are 2 servers and two
* clients connected to both the servers.
*
*/
public void testServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "putInTransaction",
new Object[] { "server1" });
pause(PAUSE);
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
}
/**
* Test for update propagation to the clients when there are 2 servers and two
* clients connected to separate server.
*
*/
public void testServerToClientTransactionsPropagationWithOneClientConnectedToOneServer()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "putInTransaction",
new Object[] { "server1" });
pause(PAUSE);
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyNotUpdated");
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdates");
}
/**
* Test for invalidate propagation to the clients when there is one server and two
* clients connected to the server.
*/
public void testInvalidatesOneServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
pause(PAUSE);
server1.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "invalidateInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
}
/**
* Test for invalidate propagation to the clients when there are 2 servers and two
* clients connected to both servers.
*/
public void testInvalidatesServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "invalidateInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
}
/**
* Test for invalidate propagation to the clients when there are 2 servers and two
* clients connected to separate servers.
*
*/
public void testInvalidatesServerToClientTransactionsPropagationWithOneConnection()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "invalidateInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyInvalidates");
}
/**
* Test for destroy propagation to the clients when there is one server and two
* clients connected to the server.
*
*/
public void testDestroysOneServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
pause(PAUSE);
server1.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "destroyInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
}
/**
* Test for destroy propagation to the clients when there are 2 servers and two
* clients connected to both servers.
*/
public void testDestroysServerToClientTransactionsPropagation()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1, port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "destroyInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
}
/**
* Test for destroy propagation to the clients when there are 2 servers and two
* clients connected to sepatate servers.
*/
public void testDestroysServerToClientTransactionsPropagationWithOneConnection()
{
Integer port1 = initServerCache(server1);
Integer port2 = initServerCache(server2);
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port2 });
pause(PAUSE);
server1.invoke(resetFlags());
server2.invoke(resetFlags());
client1.invoke(resetFlags());
client2.invoke(resetFlags());
server1.invoke(CacheServerTransactionsDUnitTest.class, "destroyInTransaction",
new Object[] { "server1" });
server1.invoke(CacheServerTransactionsDUnitTest.class,
"commitTransactionOnServer1");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client1.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyDestroys");
}
/**
* Tests if client commits are propagated to servers or not
* Currently it is UnsupportedOperationException hence the test is commented
*
*/
public void YOGESH_testClientToServerCommits(){
Integer port1 = ((Integer)server1.invoke(
CacheServerTransactionsDUnitTest.class, "createServerCache"));
Integer port2 = ((Integer)server2.invoke(
CacheServerTransactionsDUnitTest.class, "createServerCache"));
client1.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port1 });
client2.invoke(CacheServerTransactionsDUnitTest.class, "createClientCache",
new Object[] { getServerHostName(server1.getHost()), port2 });
client1.invoke(CacheServerTransactionsDUnitTest.class, "commitTransactionOnClient");
pause(PAUSE);
server1.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdatesOnServer");
server2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdatesOnServer");
client2.invoke(CacheServerTransactionsDUnitTest.class, "verifyUpdatesOnServer");
}
private CacheSerializableRunnable resetFlags()
{
CacheSerializableRunnable resetFlags = new CacheSerializableRunnable(
"resetFlags") {
@Override
public void run2() throws CacheException
{
destroyed = false;
invalidated = false;
}
};
return resetFlags;
}
public static void commitTransactionOnClient()
{
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
cache.getCacheTransactionManager().begin();
r1.put(k1, client_k1);
r1.put(k2, client_k2);
cache.getCacheTransactionManager().commit();
}
catch (ConflictException e) {
fail("Test failed due to ConflictException on client , which is not expected");
}
assertEquals(r1.getEntry(k1).getValue(), client_k1);
assertEquals(r1.getEntry(k2).getValue(), client_k2);
}
public static void verifyUpdatesOnServer()
{
final Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
Log.getLogWriter().info(
"vlaue for the key k1" + r1.getEntry(k1).getValue());
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k1).getValue().equals(client_k1);
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k2).getValue().equals(client_k2);
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
}
catch (Exception e) {
fail("Exception in trying to get due to " + e);
}
}
public static void putInTransaction(String server)
{
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
cache.getCacheTransactionManager().begin();
if (server.equals("server1")) {
r1.put(k1, server1_k1);
r1.put(k2, server1_k2);
assertEquals(r1.getEntry(k1).getValue(), server1_k1);
assertEquals(r1.getEntry(k2).getValue(), server1_k2);
}
else if (server.equals("server2")) {
r1.put(k1, server2_k2);
r1.put(k2, server2_k3);
assertEquals(r1.getEntry(k1).getValue(), server2_k2);
assertEquals(r1.getEntry(k2).getValue(), server2_k3);
}
}
public static void invalidateInTransaction(String server) throws Exception
{
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
cache.getCacheTransactionManager().begin();
if (server.equals("server1")) {
r1.invalidate(k1);
assertNull(r1.getEntry(k1).getValue());
//assertEquals(r1.getEntry(k2).getValue(), server1_k2);
}
else if (server.equals("server2")) {
r1.invalidate(k1);
assertNull(r1.getEntry(k1).getValue());
//assertEquals(r1.getEntry(k2).getValue(), server2_k3);
}
}
public static void destroyInTransaction(String server) throws Exception
{
Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
cache.getCacheTransactionManager().begin();
if (server.equals("server1")) {
r1.destroy(k1);
assertNull(r1.getEntry(k1));
//assertEquals(r1.getEntry(k2).getValue(), server1_k2);
}
else if (server.equals("server2")) {
r1.destroy(k1);
assertNull(r1.getEntry(k1));
//assertEquals(r1.getEntry(k2).getValue(), server2_k3);
}
}
public static void commitTransactionOnServer2()
{
try {
cache.getCacheTransactionManager().commit();
fail("ConflictException is expected on server2 , as server1 has not commited the transaction yet");
}
catch (ConflictException cce) {
}
}
public static void commitTransactionOnServer1()
{
try {
cache.getCacheTransactionManager().commit();
}
catch (ConflictException cce) {
fail("Test failed due to a ConflictException on server1 , which is not expected");
}
}
public static void verifyNotUpdated()
{
final Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
Log.getLogWriter().info(
"vlaue for the key k1" + r1.getEntry(k1).getValue());
// wait until
// condition is
// met
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k1).getValue().equals(k1);
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k2).getValue().equals(k2);
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
}
catch (Exception e) {
fail("Exception in trying to get due to " + e);
}
}
public static void verifyUpdates()
{
final Region r1 = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r1);
try {
WaitCriterion ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k1).getValue().equals(server1_k1);
}
public String description() {
return "Value for entry " + r1 + " never became " + server1_k1
+ "; it is still " + r1.getEntry(k1).getValue();
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
ev = new WaitCriterion() {
public boolean done() {
Thread.yield(); // TODO is this necessary?
return r1.getEntry(k2).getValue().equals(server1_k2);
}
public String description() {
return null;
}
};
DistributedTestCase.waitForCriterion(ev, 120 * 1000, 200, true);
}
catch (Exception e) {
fail("Exception in trying to get due to " + e);
}
}
public static void verifyInvalidates()
{
synchronized (CacheServerTransactionsDUnitTest.class) {
if (!invalidated) {
try {
CacheServerTransactionsDUnitTest.class.wait(60000);
}
catch (InterruptedException e) {
fail("interrupted");
}
if (!invalidated) {
fail("failed to receive invalidation notification");
}
}
}
}
public static void verifyDestroys()
{
synchronized (CacheServerTransactionsDUnitTest.class) {
if (!destroyed) {
try {
CacheServerTransactionsDUnitTest.class.wait(60000);
}
catch (InterruptedException e) {
fail("interrupted");
}
}
if (!destroyed) {
fail("failed to receive destroy notification");
}
}
}
private void createCache(Properties props) throws Exception
{
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
}
public static void createClientCache(String host, Integer port) throws Exception
{
Properties props = new Properties();
props.setProperty("mcast-port", "0");
props.setProperty("locators", "");
new CacheServerTransactionsDUnitTest("temp").createCache(props);
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, port.intValue())
.setSubscriptionEnabled(true)
// .setRetryInterval(2000)
.create("CacheServerTransctionDUnitTestPool2");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
factory.setCacheListener(new CacheListenerAdapter() {
@Override
public void afterDestroy (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
destroyed = true;
CacheServerTransactionsDUnitTest.class.notify();
}
}
@Override
public void afterInvalidate (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
invalidated = true;
CacheServerTransactionsDUnitTest.class.notifyAll();
}
}
});
Region region1 = cache.createRegion(REGION_NAME, factory.create());
assertNotNull(region1);
pool = p;
registerKeys();
}
public static void createClientCache(String host, Integer port1, Integer port2)
throws Exception
{
Properties props = new Properties();
props.setProperty("mcast-port", "0");
props.setProperty("locators", "");
new CacheServerTransactionsDUnitTest("temp").createCache(props);
PoolImpl p = (PoolImpl)PoolManager.createFactory()
.addServer(host, port1.intValue())
.addServer(host, port2.intValue())
.setSubscriptionEnabled(true)
// .setRetryInterval(2000)
.create("CacheServerTransctionDUnitTestPool2");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.LOCAL);
factory.setPoolName(p.getName());
factory.setCacheListener(new CacheListenerAdapter() {
@Override
public void afterDestroy (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
destroyed = true;
CacheServerTransactionsDUnitTest.class.notify();
}
}
@Override
public void afterInvalidate (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
invalidated = true;
CacheServerTransactionsDUnitTest.class.notifyAll();
}
}
});
Region region1 = cache.createRegion(REGION_NAME, factory.create());
assertNotNull(region1);
pool = p;
registerKeys();
}
protected int getMaxThreads() {
return 0;
}
private Integer initServerCache(VM server) {
Object[] args = new Object[] {new Integer(getMaxThreads())};
return (Integer)server.invoke(CacheServerTransactionsDUnitTest.class,
"createServerCache",
args);
}
public static Integer createServerCache(Integer maxThreads) throws Exception
{
new CacheServerTransactionsDUnitTest("temp").createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
factory.setCacheListener(new CacheListenerAdapter() {
@Override
public void afterDestroy (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
destroyed = true;
CacheServerTransactionsDUnitTest.class.notify();
}
}
@Override
public void afterInvalidate (EntryEvent event) {
synchronized (CacheServerTransactionsDUnitTest.class) {
invalidated = true;
CacheServerTransactionsDUnitTest.class.notifyAll();
}
}
});
Region r1 = cache.createRegion(REGION_NAME, factory.create());
assertNotNull(r1);
BridgeServer server1 = cache.addBridgeServer();
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
server1.setPort(port);
server1.setMaxThreads(maxThreads.intValue());
server1.setNotifyBySubscription(true);
server1.start();
createEntries();
return new Integer(server1.getPort());
}
public static void createEntries()
{
try {
Region r = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r);
if (!r.containsKey(k1)) {
r.create(k1, k1);
}
if (!r.containsKey(k2)) {
r.create(k2, k2);
}
if (!r.containsKey(k3)) {
r.create(k3, k3);
}
// Verify that no invalidates occurred to this region
assertEquals(r.getEntry(k1).getValue(), k1);
assertEquals(r.getEntry(k2).getValue(), k2);
assertEquals(r.getEntry(k3).getValue(), k3);
}
catch (Exception ex) {
fail("failed while createEntries()", ex);
}
}
public static void registerKeys()
{
List keys = new ArrayList();
try {
Region r = cache.getRegion(Region.SEPARATOR + REGION_NAME);
assertNotNull(r);
keys.add(k1);
keys.add(k2);
keys.add(k3);
r.registerInterest(keys);
}
catch (Exception ex) {
fail("failed while registering keys(" + keys + ")", ex);
}
}
public static void closeCache()
{
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
@Override
public void tearDown2() throws Exception
{
super.tearDown2();
// close the clients first
client1.invoke(CacheServerTransactionsDUnitTest.class, "closeCache");
client2.invoke(CacheServerTransactionsDUnitTest.class, "closeCache");
// then close the servers
server1.invoke(CacheServerTransactionsDUnitTest.class, "closeCache");
server2.invoke(CacheServerTransactionsDUnitTest.class, "closeCache");
}
}
| apache-2.0 |
christophd/camel | core/camel-core/src/test/java/org/apache/camel/processor/SplitterPojoTest.java | 5743 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.util.ArrayList;
import java.util.List;
import org.apache.camel.Body;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Header;
import org.apache.camel.Message;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.spi.Registry;
import org.apache.camel.support.DefaultMessage;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class SplitterPojoTest extends ContextTestSupport {
@Override
protected Registry createRegistry() throws Exception {
Registry jndi = super.createRegistry();
jndi.bind("mySplitterBean", new MySplitterBean());
return jndi;
}
@Test
public void testSplitBodyWithPojoBean() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.reset();
mock.expectedBodiesReceived("James", "Jonathan", "Hadrian", "Claus", "Willem");
template.sendBody("direct:body", "James,Jonathan,Hadrian,Claus,Willem");
assertMockEndpointsSatisfied();
}
@Test
public void testSplitMessageWithPojoBean() throws Exception {
String users[] = { "James", "Jonathan", "Hadrian", "Claus", "Willem" };
MockEndpoint mock = getMockEndpoint("mock:result");
mock.reset();
mock.expectedMessageCount(5);
template.sendBodyAndHeader("direct:message", "Test Body Message", "user", "James,Jonathan,Hadrian,Claus,Willem");
int i = 0;
for (Exchange exchange : mock.getExchanges()) {
assertEquals("Test Body Message", exchange.getIn().getBody(), "We got a wrong body ");
assertEquals(users[i], exchange.getIn().getHeader("user"), "We got a wrong header ");
i++;
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: e1
from("direct:body")
// here we use a POJO bean mySplitterBean to do the split of
// the payload
.split().method("mySplitterBean", "splitBody").to("mock:result");
from("direct:message")
// here we use a POJO bean mySplitterBean to do the split of
// the message
// with a certain header value
.split().method("mySplitterBean", "splitMessage").to("mock:result");
// END SNIPPET: e1
}
};
}
// START SNIPPET: e2
public class MySplitterBean {
/**
* The split body method returns something that is iteratable such as a java.util.List.
*
* @param body the payload of the incoming message
* @return a list containing each part split
*/
public List<String> splitBody(String body) {
// since this is based on an unit test you can of cause
// use different logic for splitting as Camel have out
// of the box support for splitting a String based on comma
// but this is for show and tell, since this is java code
// you have the full power how you like to split your messages
List<String> answer = new ArrayList<>();
String[] parts = body.split(",");
for (String part : parts) {
answer.add(part);
}
return answer;
}
/**
* The split message method returns something that is iteratable such as a java.util.List.
*
* @param header the header of the incoming message with the name user
* @param body the payload of the incoming message
* @return a list containing each part split
*/
public List<Message> splitMessage(@Header(value = "user") String header, @Body String body, CamelContext camelContext) {
// we can leverage the Parameter Binding Annotations
// http://camel.apache.org/parameter-binding-annotations.html
// to access the message header and body at same time,
// then create the message that we want, splitter will
// take care rest of them.
// *NOTE* this feature requires Camel version >= 1.6.1
List<Message> answer = new ArrayList<>();
String[] parts = header.split(",");
for (String part : parts) {
DefaultMessage message = new DefaultMessage(camelContext);
message.setHeader("user", part);
message.setBody(body);
answer.add(message);
}
return answer;
}
}
// END SNIPPET: e2
}
| apache-2.0 |
manstis/drools | kie-pmml-trusty/kie-pmml-models/kie-pmml-models-scorecard/kie-pmml-models-scorecard-compiler/src/test/java/org/kie/pmml/models/scorecard/compiler/executor/ScorecardModelImplementationProviderTest.java | 4170 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.models.scorecard.compiler.executor;
import java.util.Map;
import org.dmg.pmml.DataDictionary;
import org.dmg.pmml.PMML;
import org.dmg.pmml.TransformationDictionary;
import org.dmg.pmml.scorecard.Scorecard;
import org.junit.BeforeClass;
import org.junit.Test;
import org.kie.pmml.api.enums.PMML_MODEL;
import org.kie.pmml.commons.model.KiePMMLModelWithSources;
import org.kie.pmml.compiler.api.dto.CommonCompilationDTO;
import org.kie.pmml.compiler.api.testutils.TestUtils;
import org.kie.pmml.compiler.commons.mocks.HasClassLoaderMock;
import org.kie.pmml.models.scorecard.model.KiePMMLScorecardModel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
public class ScorecardModelImplementationProviderTest {
private static final String BASIC_COMPLEX_PARTIAL_SCORE_SOURCE = "BasicComplexPartialScore.pmml";
private static final String PACKAGE_NAME = "packagename";
private static final ScorecardModelImplementationProvider provider = new ScorecardModelImplementationProvider();
private static final ScorecardModelImplementationProvider PROVIDER = new ScorecardModelImplementationProvider();
private static PMML basicComplexPartialScorePmml;
private static DataDictionary basicComplexPartialScoreDataDictionary;
private static TransformationDictionary basicComplexPartialScoreTransformationDictionary;
private static Scorecard basicComplexPartialScore;
@BeforeClass
public static void setupClass() throws Exception {
basicComplexPartialScorePmml = TestUtils.loadFromFile(BASIC_COMPLEX_PARTIAL_SCORE_SOURCE);
basicComplexPartialScoreDataDictionary = basicComplexPartialScorePmml.getDataDictionary();
basicComplexPartialScoreTransformationDictionary = basicComplexPartialScorePmml.getTransformationDictionary();
basicComplexPartialScore = ((Scorecard) basicComplexPartialScorePmml.getModels().get(0));
}
@Test
public void getPMMLModelType() {
assertEquals(PMML_MODEL.SCORECARD_MODEL, PROVIDER.getPMMLModelType());
}
@Test
public void getKiePMMLModel() {
final CommonCompilationDTO<Scorecard> compilationDTO =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
basicComplexPartialScorePmml,
basicComplexPartialScore,
new HasClassLoaderMock());
KiePMMLScorecardModel retrieved = provider.getKiePMMLModel(compilationDTO);
assertNotNull(retrieved);
}
@Test
public void getKiePMMLModelWithSources() {
final CommonCompilationDTO<Scorecard> compilationDTO =
CommonCompilationDTO.fromGeneratedPackageNameAndFields(PACKAGE_NAME,
basicComplexPartialScorePmml,
basicComplexPartialScore,
new HasClassLoaderMock());
KiePMMLModelWithSources retrieved = provider.getKiePMMLModelWithSources(compilationDTO);
assertNotNull(retrieved);
Map<String, String> retrievedSourcesMap = retrieved.getSourcesMap();
assertNotNull(retrievedSourcesMap);
assertFalse(retrievedSourcesMap.isEmpty());
}
} | apache-2.0 |
ajrulez/holoaccent | HoloAccentExample/src/com/negusoft/holoaccent/example/fragment/ProgressFragment.java | 588 | package com.negusoft.holoaccent.example.fragment;
import android.app.Fragment;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.negusoft.holoaccent.example.R;
public class ProgressFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.progress, null);
}
@Override
public void onStart() {
super.onStart();
getActivity().findViewById(R.id.myseekbar).setEnabled(false);
}
}
| apache-2.0 |
devicehive/devicehive-java-server | devicehive-plugin/src/main/java/com/devicehive/application/filter/ContentTypeFilter.java | 1419 | package com.devicehive.application.filter;
/*
* #%L
* DeviceHive Java Server Common business logic
* %%
* Copyright (C) 2016 DataArt
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.springframework.http.HttpHeaders;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerRequestFilter;
import javax.ws.rs.container.PreMatching;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
@Provider
@PreMatching
public class ContentTypeFilter implements ContainerRequestFilter {
@Override
public void filter(ContainerRequestContext containerRequestContext) throws IOException {
if(!containerRequestContext.getHeaders().containsKey(HttpHeaders.CONTENT_TYPE)){
containerRequestContext.getHeaders().add(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON);
}
}
}
| apache-2.0 |
dodok1/cas | support/cas-server-support-x509-core/src/test/java/org/apereo/cas/adaptors/x509/authentication/handler/support/AbstractCRLRevocationCheckerTests.java | 2597 | package org.apereo.cas.adaptors.x509.authentication.handler.support;
import lombok.extern.slf4j.Slf4j;
import org.apereo.cas.adaptors.x509.authentication.revocation.checker.AbstractCRLRevocationChecker;
import org.apereo.cas.adaptors.x509.authentication.revocation.checker.RevocationChecker;
import org.apereo.cas.util.crypto.CertUtils;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.core.io.ClassPathResource;
import java.security.GeneralSecurityException;
import java.security.cert.X509Certificate;
/**
* Base class for {@link RevocationChecker} unit tests.
*
* @author Marvin S. Addison
* @since 3.4.6
*
*/
@Slf4j
public abstract class AbstractCRLRevocationCheckerTests {
/** Certificate to be tested. */
private final X509Certificate[] certificates;
/** Expected result of check; null for success. */
private final GeneralSecurityException expected;
/**
* Creates a new test instance with given parameters.
*
* @param certFiles File names of certificates to check.
* @param expected Expected result of check; null to indicate expected success.
*/
public AbstractCRLRevocationCheckerTests(final String[] certFiles, final GeneralSecurityException expected) {
this.expected = expected;
this.certificates = new X509Certificate[certFiles.length];
int i = 0;
for (final String file : certFiles) {
this.certificates[i++] = CertUtils.readCertificate(new ClassPathResource(file));
}
}
/**
* Test method for {@link AbstractCRLRevocationChecker#check(X509Certificate)}.
*/
@Test
public void checkCertificate() {
try {
for (final X509Certificate cert : this.certificates) {
getChecker().check(cert);
}
if (this.expected != null) {
Assert.fail("Expected exception of type " + this.expected.getClass());
}
} catch (final GeneralSecurityException e) {
if (this.expected == null) {
Assert.fail("Revocation check failed unexpectedly with exception: " + e);
} else {
final Class<?> expectedClass = this.expected.getClass();
final Class<?> actualClass = e.getClass();
Assert.assertTrue(
String.format("Expected exception of type %s but got %s", expectedClass, actualClass),
expectedClass.isAssignableFrom(actualClass));
}
}
}
protected abstract RevocationChecker getChecker();
}
| apache-2.0 |
davidwilliams1978/camel | tooling/apt/src/main/java/org/apache/camel/tools/apt/model/EndpointOption.java | 3702 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.tools.apt.model;
import java.util.Set;
import org.apache.camel.tools.apt.helper.CollectionStringBuffer;
import static org.apache.camel.tools.apt.helper.Strings.isNullOrEmpty;
public final class EndpointOption {
private String name;
private String type;
private String required;
private String defaultValue;
private String defaultValueNote;
private String documentation;
private boolean deprecated;
private String group;
private String label;
private boolean enumType;
private Set<String> enums;
public EndpointOption(String name, String type, String required, String defaultValue, String defaultValueNote,
String documentation, boolean deprecated, String group, String label, boolean enumType, Set<String> enums) {
this.name = name;
this.type = type;
this.required = required;
this.defaultValue = defaultValue;
this.defaultValueNote = defaultValueNote;
this.documentation = documentation;
this.deprecated = deprecated;
this.group = group;
this.label = label;
this.enumType = enumType;
this.enums = enums;
}
public String getName() {
return name;
}
public String getType() {
return type;
}
public String getRequired() {
return required;
}
public String getDefaultValue() {
return defaultValue;
}
public String getDocumentation() {
return documentation;
}
public boolean isDeprecated() {
return deprecated;
}
public String getEnumValuesAsHtml() {
CollectionStringBuffer csb = new CollectionStringBuffer("<br/>");
if (enums != null && enums.size() > 0) {
for (String e : enums) {
csb.append(e);
}
}
return csb.toString();
}
public String getDocumentationWithNotes() {
StringBuilder sb = new StringBuilder();
sb.append(documentation);
if (!isNullOrEmpty(defaultValueNote)) {
sb.append(". Default value notice: ").append(defaultValueNote);
}
return sb.toString();
}
public boolean isEnumType() {
return enumType;
}
public Set<String> getEnums() {
return enums;
}
public String getGroup() {
return group;
}
public String getLabel() {
return label;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EndpointOption that = (EndpointOption) o;
if (!name.equals(that.name)) {
return false;
}
return true;
}
@Override
public int hashCode() {
return name.hashCode();
}
}
| apache-2.0 |
xuzha/elasticsearch | plugins/discovery-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureComputeServiceTestCase.java | 2912 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cloud.azure;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.cloud.azure.management.AzureComputeService.Discovery;
import org.elasticsearch.cloud.azure.management.AzureComputeService.Management;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection;
public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCase {
private Class<? extends Plugin> mockPlugin;
public AbstractAzureComputeServiceTestCase(Class<? extends Plugin> mockPlugin) {
// We want to inject the Azure API Mock
this.mockPlugin = mockPlugin;
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder builder = Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put("discovery.type", "azure")
// We need the network to make the mock working
.put(Node.NODE_MODE_SETTING.getKey(), "network");
// We add a fake subscription_id to start mock compute service
builder.put(Management.SUBSCRIPTION_ID_SETTING.getKey(), "fake")
.put(Discovery.REFRESH_SETTING.getKey(), "5s")
.put(Management.KEYSTORE_PATH_SETTING.getKey(), "dummy")
.put(Management.KEYSTORE_PASSWORD_SETTING.getKey(), "dummy")
.put(Management.SERVICE_NAME_SETTING.getKey(), "dummy");
return builder.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(AzureDiscoveryPlugin.class, mockPlugin);
}
protected void checkNumberOfNodes(int expected) {
NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().execute().actionGet();
assertNotNull(nodeInfos);
assertNotNull(nodeInfos.getNodes());
assertEquals(expected, nodeInfos.getNodes().length);
}
}
| apache-2.0 |
zamattiac/tika | tika-parsers/src/main/java/org/apache/tika/parser/mp3/ID3v2Frame.java | 14140 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.parser.mp3;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.io.UnsupportedEncodingException;
import java.util.Iterator;
import org.apache.tika.parser.mp3.ID3Tags.ID3Comment;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
/**
* A frame of ID3v2 data, which is then passed to a handler to
* be turned into useful data.
*/
public class ID3v2Frame implements MP3Frame {
private int majorVersion;
private int minorVersion;
private int flags;
private int length;
/** Excludes the header size part */
private byte[] extendedHeader;
private byte[] data;
public int getMajorVersion() {
return majorVersion;
}
public int getMinorVersion() {
return minorVersion;
}
public int getFlags() {
return flags;
}
public int getLength() {
return length;
}
public byte[] getExtendedHeader() {
return extendedHeader;
}
public byte[] getData() {
return data;
}
/**
* Returns the next ID3v2 Frame in
* the file, or null if the next batch of data
* doesn't correspond to either an ID3v2 header.
* If no ID3v2 frame could be detected and the passed in input stream is a
* {@code PushbackInputStream}, the bytes read so far are pushed back so
* that they can be read again.
* ID3v2 Frames should come before all Audio ones.
*/
public static MP3Frame createFrameIfPresent(InputStream inp)
throws IOException {
int h1 = inp.read();
int h2 = inp.read();
int h3 = inp.read();
// Is it an ID3v2 Frame?
if (h1 == (int)'I' && h2 == (int)'D' && h3 == (int)'3') {
int majorVersion = inp.read();
int minorVersion = inp.read();
if (majorVersion == -1 || minorVersion == -1) {
pushBack(inp, h1, h2, h3, majorVersion, minorVersion);
return null;
}
return new ID3v2Frame(majorVersion, minorVersion, inp);
}
// Not a frame header
pushBack(inp, h1, h2, h3);
return null;
}
/**
* Pushes bytes back into the stream if possible. This method is called if
* no ID3v2 header could be found at the current stream position.
*
* @param inp the input stream
* @param bytes the bytes to be pushed back
* @throws IOException if an error occurs
*/
private static void pushBack(InputStream inp, int... bytes)
throws IOException
{
if (inp instanceof PushbackInputStream)
{
byte[] buf = new byte[bytes.length];
for (int i = 0; i < bytes.length; i++)
{
buf[i] = (byte) bytes[i];
}
((PushbackInputStream) inp).unread(buf);
}
}
private ID3v2Frame(int majorVersion, int minorVersion, InputStream inp)
throws IOException {
this.majorVersion = majorVersion;
this.minorVersion = minorVersion;
// Get the flags and the length
flags = inp.read();
length = get7BitsInt(readFully(inp, 4), 0);
// Do we have an extended header?
if ((flags & 0x02) == 0x02) {
int size = getInt(readFully(inp, 4));
extendedHeader = readFully(inp, size);
}
// Get the frame's data, or at least as much
// of it as we could do
data = readFully(inp, length, false);
}
protected static int getInt(byte[] data) {
return getInt(data, 0);
}
protected static int getInt(byte[] data, int offset) {
int b0 = data[offset+0] & 0xFF;
int b1 = data[offset+1] & 0xFF;
int b2 = data[offset+2] & 0xFF;
int b3 = data[offset+3] & 0xFF;
return (b0 << 24) + (b1 << 16) + (b2 << 8) + (b3 << 0);
}
protected static int getInt3(byte[] data, int offset) {
int b0 = data[offset+0] & 0xFF;
int b1 = data[offset+1] & 0xFF;
int b2 = data[offset+2] & 0xFF;
return (b0 << 16) + (b1 << 8) + (b2 << 0);
}
protected static int getInt2(byte[] data, int offset) {
int b0 = data[offset+0] & 0xFF;
int b1 = data[offset+1] & 0xFF;
return (b0 << 8) + (b1 << 0);
}
/**
* AKA a Synchsafe integer.
* 4 bytes hold a 28 bit number. The highest
* bit in each byte is always 0 and always ignored.
*/
protected static int get7BitsInt(byte[] data, int offset) {
int b0 = data[offset+0] & 0x7F;
int b1 = data[offset+1] & 0x7F;
int b2 = data[offset+2] & 0x7F;
int b3 = data[offset+3] & 0x7F;
return (b0 << 21) + (b1 << 14) + (b2 << 7) + (b3 << 0);
}
protected static byte[] readFully(InputStream inp, int length)
throws IOException {
return readFully(inp, length, true);
}
protected static byte[] readFully(InputStream inp, int length, boolean shortDataIsFatal)
throws IOException {
byte[] b = new byte[length];
int pos = 0;
int read;
while (pos < length) {
read = inp.read(b, pos, length-pos);
if (read == -1) {
if(shortDataIsFatal) {
throw new IOException("Tried to read " + length + " bytes, but only " + pos + " bytes present");
} else {
// Give them what we found
// TODO Log the short read
return b;
}
}
pos += read;
}
return b;
}
protected static class TextEncoding {
public final boolean doubleByte;
public final String encoding;
private TextEncoding(String encoding, boolean doubleByte) {
this.doubleByte = doubleByte;
this.encoding = encoding;
}
}
protected static final TextEncoding[] encodings = new TextEncoding[] {
new TextEncoding("ISO-8859-1", false),
new TextEncoding("UTF-16", true), // With BOM
new TextEncoding("UTF-16BE", true), // Without BOM
new TextEncoding("UTF-8", false)
};
/**
* Returns the (possibly null padded) String at the given offset and
* length. String encoding is held in the first byte;
*/
protected static String getTagString(byte[] data, int offset, int length) {
int actualLength = length;
if (actualLength == 0) {
return "";
}
if (actualLength == 1 && data[offset] == 0) {
return "";
}
// Does it have an encoding flag?
// Detect by the first byte being sub 0x20
TextEncoding encoding = encodings[0];
byte maybeEncodingFlag = data[offset];
if (maybeEncodingFlag >= 0 && maybeEncodingFlag < encodings.length) {
offset++;
actualLength--;
encoding = encodings[maybeEncodingFlag];
}
// Trim off null termination / padding (as present)
while (encoding.doubleByte && actualLength >= 2 && data[offset+actualLength-1] == 0 && data[offset+actualLength-2] == 0) {
actualLength -= 2;
}
while (!encoding.doubleByte && actualLength >= 1 && data[offset+actualLength-1] == 0) {
actualLength--;
}
if (actualLength == 0) {
return "";
}
// TIKA-1024: If it's UTF-16 (with BOM) and all we
// have is a naked BOM then short-circuit here
// (return empty string), because new String(..)
// gives different results on different JVMs
if (encoding.encoding.equals("UTF-16") && actualLength == 2 &&
((data[offset] == (byte) 0xff && data[offset+1] == (byte) 0xfe) ||
(data[offset] == (byte) 0xfe && data[offset+1] == (byte) 0xff))) {
return "";
}
try {
// Build the base string
return new String(data, offset, actualLength, encoding.encoding);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(
"Core encoding " + encoding.encoding + " is not available", e);
}
}
/**
* Builds up the ID3 comment, by parsing and extracting
* the comment string parts from the given data.
*/
protected static ID3Comment getComment(byte[] data, int offset, int length) {
// Comments must have an encoding
int encodingFlag = data[offset];
if (encodingFlag >= 0 && encodingFlag < encodings.length) {
// Good, valid flag
} else {
// Invalid string
return null;
}
TextEncoding encoding = encodings[encodingFlag];
// First is a 3 byte language
String lang = getString(data, offset+1, 3);
// After that we have [Desc]\0(\0)[Text]
int descStart = offset+4;
int textStart = -1;
String description = null;
String text = null;
// Find where the description ends
try {
for (int i=descStart; i<offset+length; i++) {
if (encoding.doubleByte && data[i]==0 && data[i+1] == 0) {
// Handle LE vs BE on low byte text
if (i+2 < offset+length && data[i+1] == 0 && data[i+2] == 0) {
i++;
}
textStart = i+2;
description = new String(data, descStart, i-descStart, encoding.encoding);
break;
}
if (!encoding.doubleByte && data[i]==0) {
textStart = i+1;
description = new String(data, descStart, i-descStart, encoding.encoding);
break;
}
}
// Did we find the end?
if (textStart > -1) {
text = new String(data, textStart, offset+length-textStart, encoding.encoding);
} else {
// Assume everything is the text
text = new String(data, descStart, offset+length-descStart, encoding.encoding);
}
// Return
return new ID3Comment(lang, description, text);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(
"Core encoding " + encoding.encoding + " is not available", e);
}
}
/**
* Returns the String at the given
* offset and length. Strings are ISO-8859-1
*/
protected static String getString(byte[] data, int offset, int length) {
return new String(data, offset, length, ISO_8859_1);
}
/**
* Iterates over id3v2 raw tags.
* Create an instance of this that configures the
* various length and multipliers.
*/
protected class RawTagIterator implements Iterator<RawTag> {
private int nameLength;
private int sizeLength;
private int sizeMultiplier;
private int flagLength;
private int offset = 0;
protected RawTagIterator(
int nameLength, int sizeLength, int sizeMultiplier,
int flagLength) {
this.nameLength = nameLength;
this.sizeLength = sizeLength;
this.sizeMultiplier = sizeMultiplier;
this.flagLength = flagLength;
}
public boolean hasNext() {
// Check for padding at the end
return offset < data.length && data[offset] != 0;
}
public RawTag next() {
RawTag tag = new RawTag(nameLength, sizeLength, sizeMultiplier,
flagLength, data, offset);
offset += tag.getSize();
return tag;
}
public void remove() {
}
}
protected static class RawTag {
private int headerSize;
protected String name;
protected int flag;
protected byte[] data;
private RawTag(
int nameLength, int sizeLength, int sizeMultiplier,
int flagLength, byte[] frameData, int offset) {
headerSize = nameLength + sizeLength + flagLength;
// Name, normally 3 or 4 bytes
name = getString(frameData, offset, nameLength);
// Size
int rawSize;
if (sizeLength == 3) {
rawSize = getInt3(frameData, offset+nameLength);
} else {
rawSize = getInt(frameData, offset+nameLength);
}
int size = rawSize * sizeMultiplier;
// Flag
if (flagLength > 0) {
if (flagLength == 1) {
flag = (int)frameData[offset+nameLength+sizeLength];
} else {
flag = getInt2(frameData, offset+nameLength+sizeLength);
}
}
// Now data
int copyFrom = offset+nameLength+sizeLength+flagLength;
size = Math.max(0, Math.min(size, frameData.length-copyFrom)); // TIKA-1218, prevent negative size for malformed files.
data = new byte[size];
System.arraycopy(frameData, copyFrom, data, 0, size);
}
protected int getSize() {
return headerSize + data.length;
}
}
}
| apache-2.0 |
splunk/splunk-sdk-java | splunk/src/main/java/com/splunk/Record.java | 8415 | /*
* Copyright 2012 Splunk, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"): you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.splunk;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
/**
* The {@code Record} class represents an extension of {@code HashMap} that
* contains a variety of value-converting access methods.
*/
public class Record extends HashMap<String, Object> {
/**
* Returns the {@code Boolean} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key, or {@code null} if the
* key does not exist.
*/
boolean getBoolean(String key) {
return Value.toBoolean(getString(key));
}
/**
* Returns the {@code Boolean} value associated with the given key, or the
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
boolean getBoolean(String key, boolean defaultValue) {
if (!containsKey(key)) return defaultValue;
return Value.toBoolean(getString(key));
}
/**
* Returns the {@code long} byte count value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key, or {@code null} if the
* key does not exist.
*/
long getByteCount(String key) {
return Value.toByteCount(getString(key));
}
/**
* Returns the {@code long} byte count value associated with the given key,
* or {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
long getByteCount(String key, long defaultValue) {
if (!containsKey(key)) return defaultValue;
return Value.toByteCount(getString(key));
}
/**
* Returns the {@code Date} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key, or {@code null} if the
* key does not exist.
*/
Date getDate(String key) {
return Value.toDate(getString(key));
}
/**
* Returns the {@code Date} value associated with the given key, or
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
Date getDate(String key, Date defaultValue) {
if (!containsKey(key)) return defaultValue;
return Value.toDate(getString(key));
}
/**
* Returns the {@code float} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key.
*/
float getFloat(String key) {
return Value.toFloat(getString(key));
}
/**
* Returns the {@code int} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key.
*/
int getInteger(String key) {
return Value.toInteger(getString(key));
}
/**
* Returns the {@code int} value associated with the given key, or
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
int getInteger(String key, int defaultValue) {
if (!containsKey(key)) return defaultValue;
return Value.toInteger(getString(key));
}
/**
* Returns the {@code long} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key.
*/
long getLong(String key) {
return Value.toLong(getString(key));
}
/**
* Returns the {@code long} value associated with the given key, or
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
long getLong(String key, int defaultValue) {
if (!containsKey(key)) return defaultValue;
return Value.toLong(getString(key));
}
/**
* Returns the {@code String} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key.
*/
String getString(String key) {
return get(key).toString();
}
/**
* Returns the {@code String} value associated with the given key, or
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
String getString(String key, String defaultValue) {
if (!containsKey(key)) return defaultValue;
return get(key).toString();
}
/**
* Returns the {@code String[]} value associated with the given key.
*
* @param key The key of the value being retrieved.
* @return The value associated with the given key.
*/
String[] getStringArray(String key) {
List<String> value = (List<String>)get(key);
return value.toArray(new String[value.size()]);
}
/**
* Returns the {@code String[]} value associated with the given key, or
* {@code defaultValue} if the key does not exist.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @return The value associated with the given key, or {@code defaultValue}
* if the key does not exist.
*/
String[] getStringArray(String key, String[] defaultValue) {
if (!containsKey(key)) return defaultValue;
return getStringArray(key);
}
/**
* Returns the value associated with the given key, cast to the given type
* parameter.
*
* @param key The key of the value being retrieved.
* @param <T> The type to cast the return value to.
* @return The value associated with the given key, cast to the given type.
*/
<T> T getValue(String key) {
return (T)get(key);
}
/**
* Returns the value associated with the given key, or {@code defaultValue}
* if the key does not exist, cast to the given type parameter.
*
* @param key The key of the value being retrieved.
* @param defaultValue The value to return if the key does not exist.
* @param <T> The type to cast the return value to.
* @return The value associated with the given key, or {@code defautlValue}
* if the key does not exist.
*/
<T> T getValue(String key, T defaultValue) {
if (!containsKey(key)) return defaultValue;
return (T)get(key);
}
}
| apache-2.0 |
springml/DataflowJavaSDK | sdk/src/test/java/com/google/cloud/dataflow/sdk/util/GcsUtilTest.java | 12585 | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.util;
import static org.hamcrest.Matchers.contains;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
import com.google.api.client.http.HttpStatusCodes;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import com.google.api.client.util.Throwables;
import com.google.api.services.storage.Storage;
import com.google.api.services.storage.model.Objects;
import com.google.api.services.storage.model.StorageObject;
import com.google.cloud.dataflow.sdk.options.GcsOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory;
import com.google.cloud.dataflow.sdk.util.gcsfs.GcsPath;
import com.google.cloud.dataflow.sdk.util.gcsio.GoogleCloudStorageReadChannel;
import com.google.common.collect.ImmutableList;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mockito;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/** Test case for {@link GcsUtil}. */
@RunWith(JUnit4.class)
public class GcsUtilTest {
@Rule public ExpectedException exception = ExpectedException.none();
@Test
public void testGlobTranslation() {
assertEquals("foo", GcsUtil.globToRegexp("foo"));
assertEquals("fo[^/]*o", GcsUtil.globToRegexp("fo*o"));
assertEquals("f[^/]*o\\.[^/]", GcsUtil.globToRegexp("f*o.?"));
assertEquals("foo-[0-9][^/]*", GcsUtil.globToRegexp("foo-[0-9]*"));
}
@Test
public void testCreationWithDefaultOptions() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
assertNotNull(pipelineOptions.getGcpCredential());
}
@Test
public void testCreationWithExecutorServiceProvided() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
pipelineOptions.setExecutorService(Executors.newCachedThreadPool());
assertSame(pipelineOptions.getExecutorService(), pipelineOptions.getGcsUtil().executorService);
}
@Test
public void testCreationWithGcsUtilProvided() {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
GcsUtil gcsUtil = Mockito.mock(GcsUtil.class);
pipelineOptions.setGcsUtil(gcsUtil);
assertSame(gcsUtil, pipelineOptions.getGcsUtil());
}
@Test
public void testMultipleThreadsCanCompleteOutOfOrderWithDefaultThreadPool() throws Exception {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
ExecutorService executorService = pipelineOptions.getExecutorService();
int numThreads = 100;
final CountDownLatch[] countDownLatches = new CountDownLatch[numThreads];
for (int i = 0; i < numThreads; i++) {
final int currentLatch = i;
countDownLatches[i] = new CountDownLatch(1);
executorService.execute(new Runnable() {
@Override
public void run() {
// Wait for latch N and then release latch N - 1
try {
countDownLatches[currentLatch].await();
if (currentLatch > 0) {
countDownLatches[currentLatch - 1].countDown();
}
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
});
}
// Release the last latch starting the chain reaction.
countDownLatches[countDownLatches.length - 1].countDown();
executorService.shutdown();
assertTrue("Expected tasks to complete",
executorService.awaitTermination(10, TimeUnit.SECONDS));
}
@Test
public void testGlobExpansion() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class);
Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class);
Objects modelObjects = new Objects();
List<StorageObject> items = new ArrayList<>();
// A directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/"));
// Files within the directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file1name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file2name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/file3name"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/otherfile"));
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/anotherfile"));
modelObjects.setItems(items);
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList);
when(mockStorageList.execute()).thenReturn(modelObjects);
// Test a single file.
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/otherfile");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/otherfile"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
// Test patterns.
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file*");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file[1-3]*");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/file?name");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/test*ectory/fi*name");
List<GcsPath> expectedFiles = ImmutableList.of(
GcsPath.fromUri("gs://testbucket/testdirectory/file1name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file2name"),
GcsPath.fromUri("gs://testbucket/testdirectory/file3name"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
}
// Patterns that contain recursive wildcards ('**') are not supported.
@Test
public void testRecursiveGlobExpansionFails() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
GcsPath pattern = GcsPath.fromUri("gs://testbucket/test**");
exception.expect(IllegalArgumentException.class);
exception.expectMessage("Unsupported wildcard usage");
gcsUtil.expand(pattern);
}
// GCSUtil.expand() should not fail for non-existent single files or directories, since GCS file
// listing is only eventually consistent.
@Test
public void testNonExistent() throws IOException {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class);
Storage.Objects.List mockStorageList = Mockito.mock(Storage.Objects.List.class);
Objects modelObjects = new Objects();
List<StorageObject> items = new ArrayList<>();
// A directory
items.add(new StorageObject().setBucket("testbucket").setName("testdirectory/"));
modelObjects.setItems(items);
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.list("testbucket")).thenReturn(mockStorageList);
when(mockStorageList.execute()).thenReturn(modelObjects);
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentfile");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentfile"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
{
GcsPath pattern = GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentdirectory/");
List<GcsPath> expectedFiles =
ImmutableList.of(GcsPath.fromUri("gs://testbucket/testdirectory/nonexistentdirectory/"));
assertThat(expectedFiles, contains(gcsUtil.expand(pattern).toArray()));
}
}
@Test
public void testGetSizeBytes() throws Exception {
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
Storage mockStorage = Mockito.mock(Storage.class);
gcsUtil.setStorageClient(mockStorage);
Storage.Objects mockStorageObjects = Mockito.mock(Storage.Objects.class);
Storage.Objects.Get mockStorageGet = Mockito.mock(Storage.Objects.Get.class);
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.get("testbucket", "testobject")).thenReturn(mockStorageGet);
when(mockStorageGet.execute()).thenReturn(
new StorageObject().setSize(BigInteger.valueOf(1000)));
assertEquals(1000, gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject")));
}
@Test(expected = NoSuchFileException.class)
public void testGetSizeBytesWhenFileNotFound() throws Exception {
MockLowLevelHttpResponse notFoundResponse = new MockLowLevelHttpResponse();
notFoundResponse.setContent("");
notFoundResponse.setStatusCode(HttpStatusCodes.STATUS_CODE_NOT_FOUND);
MockHttpTransport mockTransport =
new MockHttpTransport.Builder().setLowLevelHttpResponse(notFoundResponse).build();
GcsOptions pipelineOptions = PipelineOptionsFactory.as(GcsOptions.class);
pipelineOptions.setGcpCredential(new TestCredential());
GcsUtil gcsUtil = pipelineOptions.getGcsUtil();
gcsUtil.setStorageClient(new Storage(mockTransport, Transport.getJsonFactory(), null));
gcsUtil.fileSize(GcsPath.fromComponents("testbucket", "testobject"));
}
@Test
public void testGCSChannelCloseIdempotent() throws IOException {
SeekableByteChannel channel =
new GoogleCloudStorageReadChannel(null, "dummybucket", "dummyobject", null);
channel.close();
channel.close();
}
}
| apache-2.0 |
majinkai/pinpoint | commons-hbase/src/main/java/com/navercorp/pinpoint/common/hbase/HbaseAdminFactory.java | 1655 | /*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.common.hbase;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import java.io.IOException;
import java.util.Objects;
/**
* @author HyunGil Jeong
*/
public class HbaseAdminFactory implements AdminFactory {
private final Connection connection;
public HbaseAdminFactory(Connection connection) {
this.connection = Objects.requireNonNull(connection, "connection must not be null");
}
@Override
public Admin getAdmin() {
if (connection.isClosed()) {
throw new HBaseAccessException("Connection already closed");
}
try {
return connection.getAdmin();
} catch (IOException e) {
throw new HbaseSystemException(e);
}
}
@Override
public void releaseAdmin(Admin admin) {
if (admin == null) {
return;
}
try {
admin.close();
} catch (IOException e) {
throw new HbaseSystemException(e);
}
}
}
| apache-2.0 |
plusplusjiajia/directory-kerby | kerby-kerb/kerb-identity-test/src/main/java/org/apache/kerby/kerberos/kerb/identity/backend/BackendTest.java | 5390 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.kerby.kerberos.kerb.identity.backend;
import org.apache.kerby.kerberos.kerb.KrbException;
import org.apache.kerby.kerberos.kerb.request.KrbIdentity;
import org.apache.kerby.kerberos.kerb.type.base.EncryptionKey;
import org.apache.kerby.kerberos.kerb.type.base.EncryptionType;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static org.apache.kerby.kerberos.kerb.identity.backend.BackendTestUtil.TEST_PRINCIPAL;
import static org.assertj.core.api.Assertions.assertThat;
/**
* A common backend test utility
*/
public abstract class BackendTest {
protected void testGet(IdentityBackend backend) throws KrbException {
KrbIdentity kid = BackendTestUtil.createOneIdentity(TEST_PRINCIPAL);
backend.addIdentity(kid);
// clear the identity cache.
backend.release();
KrbIdentity identity = backend.getIdentity(TEST_PRINCIPAL);
assertThat(identity).isNotNull();
assertThat(identity.getExpireTime()).isEqualTo(kid.getExpireTime());
assertThat(identity.isDisabled()).isEqualTo(kid.isDisabled());
assertThat(identity.getKeyVersion()).isEqualTo(kid.getKeyVersion());
for (EncryptionKey expectedKey : kid.getKeys().values()) {
EncryptionType actualType = EncryptionType.fromValue(expectedKey.getKeyType().getValue());
EncryptionKey actualKey = identity.getKey(actualType);
assertThat(actualKey.getKeyType().getValue()).isEqualTo(expectedKey.getKeyType().getValue());
assertThat(actualKey.getKeyData()).isEqualTo(expectedKey.getKeyData());
assertThat(actualKey.getKvno()).isEqualTo(expectedKey.getKvno());
}
//tearDown
backend.deleteIdentity(TEST_PRINCIPAL);
}
protected void testStore(IdentityBackend backend) throws KrbException {
KrbIdentity kid = BackendTestUtil.createOneIdentity(TEST_PRINCIPAL);
backend.addIdentity(kid);
// clear the identity cache.
backend.release();
KrbIdentity kid2 = backend.getIdentity(TEST_PRINCIPAL);
assertThat(kid).isEqualTo(kid2);
//tearDown
backend.deleteIdentity(TEST_PRINCIPAL);
}
protected void testUpdate(IdentityBackend backend) throws KrbException {
KrbIdentity kid = BackendTestUtil.createOneIdentity(TEST_PRINCIPAL);
backend.addIdentity(kid);
kid.setDisabled(true);
backend.updateIdentity(kid);
// clear the identity cache.
backend.release();
assertThat(backend.getIdentity(TEST_PRINCIPAL)).isEqualTo(kid);
//tearDown
backend.deleteIdentity(TEST_PRINCIPAL);
}
protected void testDelete(IdentityBackend backend) throws KrbException {
KrbIdentity kid = BackendTestUtil.createOneIdentity(TEST_PRINCIPAL);
backend.addIdentity(kid);
// clear the identity cache.
backend.release();
assertThat(backend.getIdentity(TEST_PRINCIPAL)).isNotNull();
backend.deleteIdentity(TEST_PRINCIPAL);
assertThat(backend.getIdentity(TEST_PRINCIPAL)).isNull();
}
protected void testGetIdentities(IdentityBackend backend) throws KrbException {
KrbIdentity[] identities = BackendTestUtil.createManyIdentities();
for (KrbIdentity identity : identities) {
backend.addIdentity(identity);
}
// clear the identity cache.
backend.release();
Iterable<String> principals = backend.getIdentities();
Iterator<String> iterator = principals.iterator();
List<String> principalList = new LinkedList<>();
while (iterator.hasNext()) {
principalList.add(iterator.next());
}
assertThat(principalList).hasSize(identities.length)
.contains(identities[0].getPrincipalName())
.contains(identities[1].getPrincipalName())
.contains(identities[2].getPrincipalName())
.contains(identities[3].getPrincipalName())
.contains(identities[4].getPrincipalName());
//tearDown
for (KrbIdentity identity : identities) {
backend.deleteIdentity(identity.getPrincipalName());
}
}
protected void cleanIdentities(IdentityBackend backend) throws KrbException {
Iterable<String> identities = backend.getIdentities();
Iterator<String> iterator = identities.iterator();
while (iterator.hasNext()) {
backend.deleteIdentity(iterator.next());
}
}
}
| apache-2.0 |
airlift/airlift | sample-server/src/main/java/io/airlift/sample/StoreConfig.java | 1574 | /*
* Copyright 2010 Proofpoint, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.sample;
import io.airlift.configuration.Config;
import io.airlift.configuration.LegacyConfig;
import io.airlift.units.Duration;
import javax.validation.constraints.NotNull;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
public class StoreConfig
{
private Duration ttl = new Duration(1, TimeUnit.HOURS);
@Deprecated
@LegacyConfig(value = "store.ttl-in-ms", replacedBy = "store.ttl")
public StoreConfig setTtlInMs(int duration)
{
return setTtl(new Duration(duration, TimeUnit.MILLISECONDS));
}
@Config("store.ttl")
public StoreConfig setTtl(Duration ttl)
{
requireNonNull(ttl, "ttl must not be null");
checkArgument(ttl.toMillis() > 0, "ttl must be > 0");
this.ttl = ttl;
return this;
}
@NotNull
public Duration getTtl()
{
return ttl;
}
}
| apache-2.0 |
rajath26/cassandra-trunk | tools/stress/src/org/apache/cassandra/stress/operations/ThriftMultiGetter.java | 2614 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.stress.operations;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.cassandra.stress.Operation;
import org.apache.cassandra.stress.util.ThriftClient;
import org.apache.cassandra.thrift.ColumnParent;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SliceRange;
import org.apache.cassandra.utils.ByteBufferUtil;
public final class ThriftMultiGetter extends Operation
{
public ThriftMultiGetter(State state, long index)
{
super(state, index);
}
public void run(final ThriftClient client) throws IOException
{
final SlicePredicate predicate = new SlicePredicate().setSlice_range(
new SliceRange(
ByteBufferUtil.EMPTY_BYTE_BUFFER,
ByteBufferUtil.EMPTY_BYTE_BUFFER,
false,
state.settings.columns.maxColumnsPerKey
)
);
final List<ByteBuffer> keys = getKeys(state.settings.command.keysAtOnce);
for (final ColumnParent parent : state.columnParents)
{
timeWithRetry(new RunOp()
{
int count;
@Override
public boolean run() throws Exception
{
return (count = client.multiget_slice(keys, parent, predicate, state.settings.command.consistencyLevel).size()) != 0;
}
@Override
public String key()
{
return keys.toString();
}
@Override
public int keyCount()
{
return count;
}
});
}
}
}
| apache-2.0 |
hekonsek/fabric8 | sandbox/fabric/fabric-api/src/main/java/io/fabric8/api/ProfileBuilders.java | 2231 | /**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.api;
import org.jboss.gravia.runtime.RuntimeLocator;
import org.jboss.gravia.runtime.ServiceLocator;
/**
* A profile builder factory
*
* @author thomas.diesler@jboss.com
* @since 14-Mar-2014
*/
public interface ProfileBuilders {
VersionBuilder profileVersionBuilder();
VersionBuilder profileVersionBuilder(String versionId);
ProfileBuilder profileBuilder();
ProfileBuilder profileBuilder(String profileId);
ProfileBuilder profileBuilder(String versionId, String profileId);
ProfileBuilder profileBuilderFrom(Profile profile);
VersionBuilder profileVersionBuilderFrom(Version version);
// ConfigurationItemBuilder configurationItemBuilder();
// ConfigurationItemBuilder configurationItemBuilder(String identity);
final class Factory {
public static ProfileBuilders getProfileBuilders() {
ProfileBuilders builders;
if (RuntimeLocator.getRuntime() != null) {
builders = ServiceLocator.getRequiredService(ProfileBuilders.class);
} else {
try {
ClassLoader classLoader = ProfileBuilders.class.getClassLoader();
builders = (ProfileBuilders) classLoader.loadClass("io.fabric8.internal.DefaultProfileBuilders").newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException ex) {
throw new IllegalStateException(ex);
}
}
return builders;
}
// Hide ctor
private Factory() {
}
}
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/blacktie/jatmibroker-xatmi/src/test/java/org/jboss/narayana/blacktie/jatmibroker/xatmi/services/TPSendService.java | 573 | package org.jboss.narayana.blacktie.jatmibroker.xatmi.services;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.jboss.narayana.blacktie.jatmibroker.xatmi.Response;
import org.jboss.narayana.blacktie.jatmibroker.xatmi.Service;
import org.jboss.narayana.blacktie.jatmibroker.xatmi.TPSVCINFO;
public class TPSendService implements Service {
private static final Logger log = LogManager.getLogger(TPSendService.class);
public Response tpservice(TPSVCINFO svcinfo) {
log.info("testtpsend_service");
return null;
}
}
| apache-2.0 |
tabish121/activemq-artemis | artemis-server/src/main/java/org/apache/activemq/artemis/core/config/amqpBrokerConnectivity/AMQPBrokerConnectConfiguration.java | 3991 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.config.amqpBrokerConnectivity;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.activemq.artemis.api.core.TransportConfiguration;
import org.apache.activemq.artemis.core.config.brokerConnectivity.BrokerConnectConfiguration;
import org.apache.activemq.artemis.uri.ConnectorTransportConfigurationParser;
/**
* This is a specific AMQP Broker Connection Configuration
* */
public class AMQPBrokerConnectConfiguration extends BrokerConnectConfiguration {
List<TransportConfiguration> transportConfigurations;
List<AMQPBrokerConnectionElement> connectionElements = new ArrayList<>();
public AMQPBrokerConnectConfiguration() {
super(null, null);
}
public AMQPBrokerConnectConfiguration(String name, String uri) {
super(name, uri);
}
public AMQPBrokerConnectConfiguration addElement(AMQPBrokerConnectionElement amqpBrokerConnectionElement) {
amqpBrokerConnectionElement.setParent(this);
if (amqpBrokerConnectionElement.getType() == AMQPBrokerConnectionAddressType.MIRROR && !(amqpBrokerConnectionElement instanceof AMQPMirrorBrokerConnectionElement)) {
throw new IllegalArgumentException("must be an AMQPMirrorConnectionElement");
}
connectionElements.add(amqpBrokerConnectionElement);
return this;
}
public AMQPBrokerConnectConfiguration addConnectionElement(AMQPMirrorBrokerConnectionElement amqpBrokerConnectionElement) {
return addElement(amqpBrokerConnectionElement);
}
public List<AMQPBrokerConnectionElement> getConnectionElements() {
return connectionElements;
}
@Override
public void parseURI() throws Exception {
ConnectorTransportConfigurationParser parser = new ConnectorTransportConfigurationParser(false);
URI transportURI = parser.expandURI(getUri());
this.transportConfigurations = parser.newObject(transportURI, getName());
}
public List<TransportConfiguration> getTransportConfigurations() throws Exception {
if (transportConfigurations == null) {
parseURI();
}
return transportConfigurations;
}
@Override
public AMQPBrokerConnectConfiguration setReconnectAttempts(int reconnectAttempts) {
super.setReconnectAttempts(reconnectAttempts);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setUser(String user) {
super.setUser(user);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setRetryInterval(int retryInterval) {
super.setRetryInterval(retryInterval);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setPassword(String password) {
super.setPassword(password);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setUri(String uri) {
super.setUri(uri);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setName(String name) {
super.setName(name);
return this;
}
@Override
public AMQPBrokerConnectConfiguration setAutostart(boolean autostart) {
super.setAutostart(autostart);
return this;
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-compute/v1/1.31.0/com/google/api/services/compute/model/ServiceAttachmentConnectedEndpoint.java | 3591 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* [Output Only] A connection connected to this service attachment.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ServiceAttachmentConnectedEndpoint extends com.google.api.client.json.GenericJson {
/**
* The url of a connected endpoint.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String endpoint;
/**
* The PSC connection id of the connected endpoint.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger pscConnectionId;
/**
* The status of a connected endpoint to this service attachment.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* The url of a connected endpoint.
* @return value or {@code null} for none
*/
public java.lang.String getEndpoint() {
return endpoint;
}
/**
* The url of a connected endpoint.
* @param endpoint endpoint or {@code null} for none
*/
public ServiceAttachmentConnectedEndpoint setEndpoint(java.lang.String endpoint) {
this.endpoint = endpoint;
return this;
}
/**
* The PSC connection id of the connected endpoint.
* @return value or {@code null} for none
*/
public java.math.BigInteger getPscConnectionId() {
return pscConnectionId;
}
/**
* The PSC connection id of the connected endpoint.
* @param pscConnectionId pscConnectionId or {@code null} for none
*/
public ServiceAttachmentConnectedEndpoint setPscConnectionId(java.math.BigInteger pscConnectionId) {
this.pscConnectionId = pscConnectionId;
return this;
}
/**
* The status of a connected endpoint to this service attachment.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* The status of a connected endpoint to this service attachment.
* @param status status or {@code null} for none
*/
public ServiceAttachmentConnectedEndpoint setStatus(java.lang.String status) {
this.status = status;
return this;
}
@Override
public ServiceAttachmentConnectedEndpoint set(String fieldName, Object value) {
return (ServiceAttachmentConnectedEndpoint) super.set(fieldName, value);
}
@Override
public ServiceAttachmentConnectedEndpoint clone() {
return (ServiceAttachmentConnectedEndpoint) super.clone();
}
}
| apache-2.0 |
RCheungIT/phoenix | phoenix-core/src/it/java/org/apache/phoenix/end2end/ArraysWithNullsIT.java | 14392 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.junit.Assert.assertEquals;
import java.sql.*;
import org.apache.phoenix.schema.types.PTimestamp;
import org.apache.phoenix.schema.types.PhoenixArray;
import org.junit.Test;
public class ArraysWithNullsIT extends BaseHBaseManagedTimeTableReuseIT {
@Test
public void testArrayUpsertIntWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t1 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t1 + " ( k VARCHAR PRIMARY KEY, a INTEGER[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t1 + " VALUES('a',ARRAY[null,3,null])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t1 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("INTEGER",new Object[]{null,3,null});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertVarcharWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t2 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t2 + " ( k VARCHAR PRIMARY KEY, a VARCHAR[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t2 + " VALUES('a',ARRAY['10',null])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t2 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("VARCHAR",new Object[]{"10",null});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertBigIntWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t3 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t3 + " ( k VARCHAR PRIMARY KEY, a BIGINT[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t3 + " VALUES('a',ARRAY[2,null,32335,4])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t3 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("BIGINT",new Object[]{(long)2,null,(long)32335,(long)4});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertFloatWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t4 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t4 + " ( k VARCHAR PRIMARY KEY, a FLOAT[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t4 + " VALUES('a',ARRAY[1.1,2.2,null,3.4])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t4 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("FLOAT",new Object[]{(float)1.1,(float)2.2,null,(float)3.4});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertSmallIntWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t5 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t5 + " ( k VARCHAR PRIMARY KEY, a SMALLINT[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t5 + " VALUES('a',ARRAY[123,456,null,456])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t5 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("SMALLINT",new Object[]{(short)123,(short)456,null,(short)456});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertTinyIntWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t6 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t6 + " ( k VARCHAR PRIMARY KEY, a TINYINT[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t6 + " VALUES('a',ARRAY[123,45,null,45])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t6 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("TINYINT",new Object[]{(byte)123,(byte)45,null,(byte)45});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertBooleanWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t7 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t7 + " ( k VARCHAR PRIMARY KEY, a BOOLEAN[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t7 + " VALUES('a',ARRAY[true,false,null,true])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t7 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("BOOLEAN",new Object[]{true,false,null,true});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertDoubleWithNulls() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t8 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t8 + " ( k VARCHAR PRIMARY KEY, a DOUBLE[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t8 + " VALUES('a',ARRAY[1.2,2.3,null,3.4])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t8 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("DOUBLE",new Object[]{1.2,2.3,null,3.4});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertDateWithNulls1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t9 = generateRandomString();
conn.createStatement().execute("CREATE TABLE " + t9 + " ( k VARCHAR PRIMARY KEY, a DATE[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t9
+ " VALUES('a',ARRAY[TO_DATE('2015-05-20 06:12:14.184'),null,TO_DATE('2015-05-20 06:12:14.184'),null])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery("Select a from " + t9 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("DATE",new Date[]{new Date(1432102334184l),new Date(0l),new Date(1432102334184l),new Date(0l)});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertDateWithNulls2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t10 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t10 + " ( k VARCHAR PRIMARY KEY, a DATE[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t10
+ " VALUES('a',ARRAY[TO_DATE('1970-01-01 00:00:00.000'), TO_DATE('2015-05-20 06:12:14.184'),TO_DATE('2015-05-20 06:12:14.184')])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t10 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("DATE",new Date[]{new Date(0l), new Date(1432102334184l), new Date(1432102334184l)});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertTimeWithNulls1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t11 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t11 + " ( k VARCHAR PRIMARY KEY, a TIME[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t11
+ " VALUES('a',ARRAY[TO_TIME('2015-05-20 06:12:14.184'),null,TO_TIME('2015-05-20 06:12:14.184'),null])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t11 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("TIME",new Time[]{new Time(1432102334184l),new Time(0l),new Time(1432102334184l),new Time(0l)});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertTimeWithNulls2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t12 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t12 + " ( k VARCHAR PRIMARY KEY, a TIME[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t12
+ " VALUES('a',ARRAY[TO_TIME('1970-01-01 00:00:00.000'), TO_TIME('2015-05-20 06:12:14.184'),null,TO_TIME('2015-05-20 06:12:14.184'),null])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t12 + " where k = 'a'");
rs.next();
Array array = conn.createArrayOf("TIME",new Time[]{new Time(0l),new Time(1432102334184l),new Time(0l),new Time(1432102334184l),new Time(0l)});
assertEquals(rs.getArray(1),array);
conn.close();
}
@Test
public void testArrayUpsertTimeStampWithNulls1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t13 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t13 + " ( k VARCHAR PRIMARY KEY, a TIMESTAMP[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t13
+ " VALUES('a',ARRAY[TO_TIMESTAMP('2015-05-20 06:12:14.184'),null,TO_TIMESTAMP('2015-05-20 06:12:14.184'),TO_TIMESTAMP('1970-01-01 00:00:00.000')])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t13 + " where k = 'a'");
rs.next();
assertEquals(rs.getArray(1),conn.createArrayOf("TIMESTAMP",new Timestamp[]{new Timestamp(1432102334184l),new Timestamp(0l),new Timestamp(1432102334184l),new Timestamp(0l)}));
conn.close();
}
@Test
public void testArrayUpsertTimeStampWithNulls2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t14 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t14 + " ( k VARCHAR PRIMARY KEY, a TIMESTAMP[])");
PreparedStatement stmt = conn.prepareStatement("UPSERT INTO " + t14
+ " VALUES('a',ARRAY[TO_TIMESTAMP('1970-01-01 00:00:00.000'),TO_TIMESTAMP('2015-05-20 06:12:14.184'),TO_TIMESTAMP('1970-01-01 00:00:00.000'),TO_TIMESTAMP('2015-05-20 06:12:14.184'),TO_TIMESTAMP('1970-01-01 00:00:00.000')])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t14 + " where k = 'a'");
rs.next();
assertEquals(rs.getArray(1),conn.createArrayOf("TIMESTAMP",new Timestamp[]{new Timestamp(0l),new Timestamp(1432102334184l),new Timestamp(0l),new Timestamp(1432102334184l),new Timestamp(0l)}));
conn.close();
}
@Test
public void testArrayUpsertCharWithNulls1() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t15 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t15 + " ( k VARCHAR PRIMARY KEY, a CHAR(15)[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t15 + " VALUES('a',ARRAY['foo',null,'fo','foo'])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t15 + " where k = 'a'");
rs.next();
assertEquals(rs.getArray(1),conn.createArrayOf("CHAR",new String[]{"foo","","fo","foo"}));
conn.close();
}
@Test
public void testArrayUpsertCharWithNulls2() throws Exception {
Connection conn = DriverManager.getConnection(getUrl());
String t16 = generateRandomString();
conn.createStatement().execute(
"CREATE TABLE " + t16 + " ( k VARCHAR PRIMARY KEY, a CHAR(15)[])");
PreparedStatement stmt = conn.prepareStatement(
"UPSERT INTO " + t16 + " VALUES('a',ARRAY[null,'foo',null,'fo','foo'])");
stmt.execute();
conn.commit();
ResultSet rs = conn.createStatement().executeQuery(
"Select a from " + t16 + " where k = 'a'");
rs.next();
assertEquals(rs.getArray(1),conn.createArrayOf("CHAR",new String[]{"","foo","","fo","foo"}));
conn.close();
}
}
| apache-2.0 |
darciopacifico/omr | tags/tag_ModalOK/jsf-app/src/main/java/br/com/tsuite/tiporelacionamento/ProjetoVO.java | 416 | package br.com.tsuite.tiporelacionamento;
import java.io.Serializable;
public class ProjetoVO implements Serializable {
private Long id;
private String descricao;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getDescricao() {
return descricao;
}
public void setDescricao(String descricao) {
this.descricao = descricao;
}
}
| apache-2.0 |
kalaspuffar/pdfbox | preflight/src/main/java/org/apache/pdfbox/preflight/graphic/ColorSpaceHelperFactory.java | 2573 | /*****************************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
package org.apache.pdfbox.preflight.graphic;
import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace;
import org.apache.pdfbox.preflight.PreflightContext;
/**
* This factory create the right Helper according to the owner of the ColorSpace entry.
*/
public class ColorSpaceHelperFactory
{
/**
* Return an instance of ColorSpaceHelper according to the ColorSpaceRestriction value.
* <UL>
* <li>ColorSpaceRestriction.NO_PATTERN : returns NoPatternColorSpaceHelper
* <li>ColorSpaceRestriction.ONLY_DEVICE : returns DeviceColorSpaceHelper
* <li>ColorSpaceRestriction.NO_RESTRICTION (default) : returns StandardColorSpaceHelper
* </UL>
*
* @param context
* the PreflightContext to access useful data
* @param cs
* the High level PDFBox object which represents the ColorSpace
* @param csr
* the color space restriction
* @return the ColorSpaceHelper instance.
*/
public ColorSpaceHelper getColorSpaceHelper(PreflightContext context, PDColorSpace cs, ColorSpaceRestriction csr)
{
switch (csr)
{
case NO_PATTERN:
return new NoPatternColorSpaceHelper(context, cs);
case ONLY_DEVICE:
return new DeviceColorSpaceHelper(context, cs);
default:
return new StandardColorSpaceHelper(context, cs);
}
}
/**
* Enum used as argument of methods of this factory to return the right Helper.
*/
public enum ColorSpaceRestriction
{
NO_RESTRICTION, NO_PATTERN, ONLY_DEVICE
}
}
| apache-2.0 |
subhrajyotim/camunda-bpm-platform | engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/TaskVariableLocalRestResourceInteractionTest.java | 50701 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest;
import static com.jayway.restassured.RestAssured.given;
import static org.camunda.bpm.engine.rest.helper.MockProvider.EXAMPLE_TASK_ID;
import static org.camunda.bpm.engine.rest.helper.MockProvider.NON_EXISTING_ID;
import static org.camunda.bpm.engine.rest.util.DateTimeUtils.DATE_FORMAT_WITH_TIMEZONE;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response.Status;
import org.camunda.bpm.engine.AuthorizationException;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.TaskService;
import org.camunda.bpm.engine.impl.TaskServiceImpl;
import org.camunda.bpm.engine.impl.digest._apacheCommonsCodec.Base64;
import org.camunda.bpm.engine.impl.util.IoUtil;
import org.camunda.bpm.engine.rest.exception.InvalidRequestException;
import org.camunda.bpm.engine.rest.exception.RestException;
import org.camunda.bpm.engine.rest.helper.EqualsList;
import org.camunda.bpm.engine.rest.helper.EqualsMap;
import org.camunda.bpm.engine.rest.helper.ErrorMessageHelper;
import org.camunda.bpm.engine.rest.helper.MockObjectValue;
import org.camunda.bpm.engine.rest.helper.MockProvider;
import org.camunda.bpm.engine.rest.helper.VariableTypeHelper;
import org.camunda.bpm.engine.rest.helper.variable.EqualsNullValue;
import org.camunda.bpm.engine.rest.helper.variable.EqualsObjectValue;
import org.camunda.bpm.engine.rest.helper.variable.EqualsPrimitiveValue;
import org.camunda.bpm.engine.rest.helper.variable.EqualsUntypedValue;
import org.camunda.bpm.engine.rest.util.VariablesBuilder;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.camunda.bpm.engine.variable.Variables;
import org.camunda.bpm.engine.variable.type.SerializableValueType;
import org.camunda.bpm.engine.variable.type.ValueType;
import org.camunda.bpm.engine.variable.value.BooleanValue;
import org.camunda.bpm.engine.variable.value.FileValue;
import org.camunda.bpm.engine.variable.value.ObjectValue;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.jayway.restassured.http.ContentType;
import com.jayway.restassured.response.Response;
/**
* @author Daniel Meyer
*
*/
public class TaskVariableLocalRestResourceInteractionTest extends
AbstractRestServiceTest {
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
protected static final String TASK_SERVICE_URL = TEST_RESOURCE_ROOT_PATH + "/task";
protected static final String SINGLE_TASK_URL = TASK_SERVICE_URL + "/{id}";
protected static final String SINGLE_TASK_VARIABLES_URL = SINGLE_TASK_URL + "/localVariables";
protected static final String SINGLE_TASK_SINGLE_VARIABLE_URL = SINGLE_TASK_VARIABLES_URL + "/{varId}";
protected static final String SINGLE_TASK_PUT_SINGLE_VARIABLE_URL = SINGLE_TASK_SINGLE_VARIABLE_URL;
protected static final String SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL = SINGLE_TASK_PUT_SINGLE_VARIABLE_URL + "/data";
protected static final String SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL = SINGLE_TASK_SINGLE_VARIABLE_URL;
protected static final String SINGLE_TASK_MODIFY_VARIABLES_URL = SINGLE_TASK_VARIABLES_URL;
protected TaskService taskServiceMock;
@Before
public void setUpRuntimeData() {
taskServiceMock = mock(TaskService.class);
when(processEngine.getTaskService()).thenReturn(taskServiceMock);
}
private TaskServiceImpl mockTaskServiceImpl() {
TaskServiceImpl taskServiceMock = mock(TaskServiceImpl.class);
when(processEngine.getTaskService()).thenReturn(taskServiceMock);
return taskServiceMock;
}
@Test
public void testGetLocalVariables() {
when(taskServiceMock.getVariablesLocalTyped(EXAMPLE_TASK_ID, true)).thenReturn(EXAMPLE_VARIABLES);
Response response = given().pathParam("id", EXAMPLE_TASK_ID)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.OK.getStatusCode())
.body(EXAMPLE_VARIABLE_KEY, notNullValue())
.body(EXAMPLE_VARIABLE_KEY + ".value", equalTo(EXAMPLE_VARIABLE_VALUE.getValue()))
.body(EXAMPLE_VARIABLE_KEY + ".type", equalTo(VariableTypeHelper.toExpectedValueTypeName(EXAMPLE_VARIABLE_VALUE.getType())))
.when().get(SINGLE_TASK_VARIABLES_URL);
Assert.assertEquals("Should return exactly one variable", 1, response.jsonPath().getMap("").size());
}
@Test
public void testGetLocalObjectVariables() {
// given
String variableKey = "aVariableId";
List<String> payload = Arrays.asList("a", "b");
ObjectValue variableValue =
MockObjectValue
.fromObjectValue(Variables
.objectValue(payload)
.serializationDataFormat("application/json")
.create())
.objectTypeName(ArrayList.class.getName())
.serializedValue("a serialized value"); // this should differ from the serialized json
when(taskServiceMock.getVariablesLocalTyped(eq(EXAMPLE_TASK_ID), anyBoolean()))
.thenReturn(Variables.createVariables().putValueTyped(variableKey, variableValue));
// when
given().pathParam("id", EXAMPLE_TASK_ID)
.then().expect().statusCode(Status.OK.getStatusCode())
.body(variableKey + ".value", equalTo(payload))
.body(variableKey + ".type", equalTo("Object"))
.body(variableKey + ".valueInfo." + SerializableValueType.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json"))
.body(variableKey + ".valueInfo." + SerializableValueType.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName()))
.when().get(SINGLE_TASK_VARIABLES_URL);
// then
verify(taskServiceMock).getVariablesLocalTyped(EXAMPLE_TASK_ID, true);
}
@Test
public void testGetLocalObjectVariablesSerialized() {
// given
String variableKey = "aVariableId";
ObjectValue variableValue =
Variables
.serializedObjectValue("a serialized value")
.serializationDataFormat("application/json")
.objectTypeName(ArrayList.class.getName())
.create();
when(taskServiceMock.getVariablesLocalTyped(eq(EXAMPLE_TASK_ID), anyBoolean()))
.thenReturn(Variables.createVariables().putValueTyped(variableKey, variableValue));
// when
given()
.pathParam("id", EXAMPLE_TASK_ID)
.queryParam("deserializeValues", false)
.then().expect().statusCode(Status.OK.getStatusCode())
.body(variableKey + ".value", equalTo("a serialized value"))
.body(variableKey + ".type", equalTo("Object"))
.body(variableKey + ".valueInfo." + SerializableValueType.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json"))
.body(variableKey + ".valueInfo." + SerializableValueType.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName()))
.when().get(SINGLE_TASK_VARIABLES_URL);
// then
verify(taskServiceMock).getVariablesLocalTyped(EXAMPLE_TASK_ID, false);
}
@Test
public void testGetLocalVariablesForNonExistingTaskId() {
when(taskServiceMock.getVariablesLocalTyped(NON_EXISTING_ID, true)).thenThrow(new ProcessEngineException("task " + NON_EXISTING_ID + " doesn't exist"));
given().pathParam("id", NON_EXISTING_ID)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON)
.body("type", equalTo(ProcessEngineException.class.getSimpleName()))
.body("message", equalTo("task " + NON_EXISTING_ID + " doesn't exist"))
.when().get(SINGLE_TASK_VARIABLES_URL);
}
@Test
public void testGetLocalVariablesThrowsAuthorizationException() {
String message = "expected exception";
when(taskServiceMock.getVariablesLocalTyped(anyString(), anyBoolean())).thenThrow(new AuthorizationException(message));
given()
.pathParam("id", EXAMPLE_TASK_ID)
.then().expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.contentType(ContentType.JSON)
.body("type", equalTo(AuthorizationException.class.getSimpleName()))
.body("message", equalTo(message))
.when()
.get(SINGLE_TASK_VARIABLES_URL);
}
@Test
public void testLocalVariableModification() {
TaskServiceImpl taskServiceMock = mockTaskServiceImpl();
Map<String, Object> messageBodyJson = new HashMap<String, Object>();
String variableKey = "aKey";
int variableValue = 123;
Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables();
messageBodyJson.put("modifications", modifications);
List<String> deletions = new ArrayList<String>();
deletions.add("deleteKey");
messageBodyJson.put("deletions", deletions);
given().pathParam("id", EXAMPLE_TASK_ID).contentType(ContentType.JSON).body(messageBodyJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().post(SINGLE_TASK_MODIFY_VARIABLES_URL);
Map<String, Object> expectedModifications = new HashMap<String, Object>();
expectedModifications.put(variableKey, variableValue);
verify(taskServiceMock).updateVariablesLocal(eq(EXAMPLE_TASK_ID), argThat(new EqualsMap(expectedModifications)),
argThat(new EqualsList(deletions)));
}
@Test
public void testLocalVariableModificationForNonExistingTaskId() {
TaskServiceImpl taskServiceMock = mockTaskServiceImpl();
doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID)).when(taskServiceMock).updateVariablesLocal(anyString(), any(Map.class), any(List.class));
Map<String, Object> messageBodyJson = new HashMap<String, Object>();
String variableKey = "aKey";
int variableValue = 123;
Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables();
messageBodyJson.put("modifications", modifications);
given().pathParam("id", NON_EXISTING_ID).contentType(ContentType.JSON).body(messageBodyJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode()).contentType(ContentType.JSON)
.body("type", equalTo(RestException.class.getSimpleName()))
.body("message", equalTo("Cannot modify variables for task " + NON_EXISTING_ID + ": Cannot find task with id " + NON_EXISTING_ID))
.when().post(SINGLE_TASK_MODIFY_VARIABLES_URL);
}
@Test
public void testEmptyLocalVariableModification() {
mockTaskServiceImpl();
given().pathParam("id", EXAMPLE_TASK_ID).contentType(ContentType.JSON).body(EMPTY_JSON_OBJECT)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().post(SINGLE_TASK_MODIFY_VARIABLES_URL);
}
@Test
public void testVariableModificationThrowsAuthorizationException() {
String variableKey = "aKey";
int variableValue = 123;
Map<String, Object> messageBodyJson = new HashMap<String, Object>();
Map<String, Object> modifications = VariablesBuilder.create().variable(variableKey, variableValue).getVariables();
messageBodyJson.put("modifications", modifications);
TaskServiceImpl taskServiceMock = mockTaskServiceImpl();
String message = "excpected exception";
doThrow(new AuthorizationException(message)).when(taskServiceMock).updateVariablesLocal(anyString(), any(Map.class), any(List.class));
given()
.pathParam("id", EXAMPLE_TASK_ID)
.contentType(ContentType.JSON)
.body(messageBodyJson)
.then().expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.body("type", is(AuthorizationException.class.getSimpleName()))
.body("message", is(message))
.when()
.post(SINGLE_TASK_MODIFY_VARIABLES_URL);
}
@Test
public void testGetSingleLocalVariable() {
String variableKey = "aVariableKey";
int variableValue = 123;
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean()))
.thenReturn(Variables.integerValue(variableValue));
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.OK.getStatusCode())
.body("value", is(123))
.body("type", is("Integer"))
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
}
@Test
public void testGetSingleLocalVariableData() {
when(taskServiceMock.getVariableLocalTyped(anyString(), eq(EXAMPLE_BYTES_VARIABLE_KEY), eq(false))).thenReturn(EXAMPLE_VARIABLE_VALUE_BYTES);
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID)
.pathParam("varId", EXAMPLE_BYTES_VARIABLE_KEY)
.then()
.expect()
.statusCode(Status.OK.getStatusCode())
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.when()
.get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).getVariableLocalTyped(MockProvider.EXAMPLE_TASK_ID, EXAMPLE_BYTES_VARIABLE_KEY, false);
}
@Test
public void testGetSingleLocalVariableDataNonExisting() {
when(taskServiceMock.getVariableLocalTyped(anyString(), eq("nonExisting"), eq(false))).thenReturn(null);
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID)
.pathParam("varId", "nonExisting")
.then()
.expect()
.statusCode(Status.NOT_FOUND.getStatusCode())
.body("type", is(InvalidRequestException.class.getSimpleName()))
.body("message", is("task variable with name " + "nonExisting" + " does not exist"))
.when()
.get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).getVariableLocalTyped(MockProvider.EXAMPLE_TASK_ID, "nonExisting", false);
}
@Test
public void testGetSingleLocalVariabledataNotBinary() {
when(taskServiceMock.getVariableLocalTyped(anyString(), eq(EXAMPLE_VARIABLE_KEY), eq(false))).thenReturn(EXAMPLE_VARIABLE_VALUE);
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID)
.pathParam("varId", EXAMPLE_VARIABLE_KEY)
.then()
.expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.when()
.get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).getVariableLocalTyped(MockProvider.EXAMPLE_TASK_ID, EXAMPLE_VARIABLE_KEY, false);
}
@Test
public void testGetSingleLocalObjectVariable() {
// given
String variableKey = "aVariableId";
List<String> payload = Arrays.asList("a", "b");
ObjectValue variableValue =
MockObjectValue
.fromObjectValue(Variables
.objectValue(payload)
.serializationDataFormat("application/json")
.create())
.objectTypeName(ArrayList.class.getName())
.serializedValue("a serialized value"); // this should differ from the serialized json
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
// when
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.then().expect().statusCode(Status.OK.getStatusCode())
.body("value", equalTo(payload))
.body("type", equalTo("Object"))
.body("valueInfo." + SerializableValueType.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json"))
.body("valueInfo." + SerializableValueType.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName()))
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
// then
verify(taskServiceMock).getVariableLocalTyped(EXAMPLE_TASK_ID, variableKey, true);
}
@Test
public void testGetSingleLocalObjectVariableSerialized() {
// given
String variableKey = "aVariableId";
ObjectValue variableValue =
Variables
.serializedObjectValue("a serialized value")
.serializationDataFormat("application/json")
.objectTypeName(ArrayList.class.getName())
.create();
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
// when
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.queryParam("deserializeValue", false)
.then().expect().statusCode(Status.OK.getStatusCode())
.body("value", equalTo("a serialized value"))
.body("type", equalTo("Object"))
.body("valueInfo." + SerializableValueType.VALUE_INFO_SERIALIZATION_DATA_FORMAT, equalTo("application/json"))
.body("valueInfo." + SerializableValueType.VALUE_INFO_OBJECT_TYPE_NAME, equalTo(ArrayList.class.getName()))
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
// then
verify(taskServiceMock).getVariableLocalTyped(EXAMPLE_TASK_ID, variableKey, false);
}
@Test
public void testNonExistingLocalVariable() {
String variableKey = "aVariableKey";
when(taskServiceMock.getVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey))).thenReturn(null);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NOT_FOUND.getStatusCode())
.body("type", is(InvalidRequestException.class.getSimpleName()))
.body("message", is("task variable with name " + variableKey + " does not exist"))
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
}
@Test
public void testGetLocalVariableForNonExistingTaskId() {
String variableKey = "aVariableKey";
when(taskServiceMock.getVariableLocalTyped(eq(NON_EXISTING_ID), eq(variableKey), anyBoolean()))
.thenThrow(new ProcessEngineException("task " + NON_EXISTING_ID + " doesn't exist"));
given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode())
.body("type", is(RestException.class.getSimpleName()))
.body("message", is("Cannot get task variable " + variableKey + ": task " + NON_EXISTING_ID + " doesn't exist"))
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
}
@Test
public void testGetSingleLocalVariableThrowsAuthorizationException() {
String variableKey = "aVariableKey";
String message = "excpected exception";
when(taskServiceMock.getVariableLocalTyped(anyString(), anyString(), anyBoolean())).thenThrow(new AuthorizationException(message));
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.body("type", is(AuthorizationException.class.getSimpleName()))
.body("message", is(message))
.when()
.get(SINGLE_TASK_SINGLE_VARIABLE_URL);
}
@Test
public void testGetFileVariable() {
String variableKey = "aVariableKey";
final byte[] byteContent = "some bytes".getBytes();
String filename = "test.txt";
String mimeType = "text/plain";
FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(mimeType).create();
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.contentType(ContentType.JSON.toString())
.and()
.body("valueInfo.mimeType", equalTo(mimeType))
.body("valueInfo.filename", equalTo(filename))
.body("value", nullValue())
.when().get(SINGLE_TASK_SINGLE_VARIABLE_URL);
}
@Test
public void testGetNullFileVariable() {
String variableKey = "aVariableKey";
String filename = "test.txt";
String mimeType = "text/plain";
FileValue variableValue = Variables.fileValue(filename).mimeType(mimeType).create();
when(taskServiceMock.getVariableLocalTyped(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey), anyBoolean()))
.thenReturn(variableValue);
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.contentType(ContentType.TEXT.toString())
.and()
.body(is(equalTo("")))
.when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testGetFileVariableDownloadWithType() {
String variableKey = "aVariableKey";
final byte[] byteContent = "some bytes".getBytes();
String filename = "test.txt";
FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(ContentType.TEXT.toString()).create();
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.contentType(ContentType.TEXT.toString())
.and()
.body(is(equalTo(new String(byteContent))))
.when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testGetFileVariableDownloadWithTypeAndEncoding() {
String variableKey = "aVariableKey";
final byte[] byteContent = "some bytes".getBytes();
String filename = "test.txt";
String encoding = "UTF-8";
FileValue variableValue = Variables.fileValue(filename).file(byteContent).mimeType(ContentType.TEXT.toString()).encoding(encoding).create();
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
Response response = given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.body(is(equalTo(new String(byteContent))))
.when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
String contentType = response.contentType().replaceAll(" ", "");
assertThat(contentType, is(ContentType.TEXT + ";charset=" + encoding));
}
@Test
public void testGetFileVariableDownloadWithoutType() {
String variableKey = "aVariableKey";
final byte[] byteContent = "some bytes".getBytes();
String filename = "test.txt";
FileValue variableValue = Variables.fileValue(filename).file(byteContent).create();
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.OK.getStatusCode())
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.and()
.body(is(equalTo(new String(byteContent))))
.header("Content-Disposition", containsString(filename))
.when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testCannotDownloadVariableOtherThanFile() {
String variableKey = "aVariableKey";
BooleanValue variableValue = Variables.booleanValue(true);
when(taskServiceMock.getVariableLocalTyped(eq(EXAMPLE_TASK_ID), eq(variableKey), anyBoolean())).thenReturn(variableValue);
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.when().get(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testPutSingleLocalVariable() {
String variableKey = "aVariableKey";
String variableValue = "aVariableValue";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsUntypedValue.matcher().value(variableValue)));
}
@Test
public void testPutSingleVariableWithTypeInteger() {
String variableKey = "aVariableKey";
Integer variableValue = 123;
String type = "Integer";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.integerValue(variableValue)));
}
@Test
public void testPutSingleVariableWithUnparseableInteger() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "Integer";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": "
+ ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Integer.class)))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableWithTypeShort() {
String variableKey = "aVariableKey";
Short variableValue = 123;
String type = "Short";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.shortValue(variableValue)));
}
@Test
public void testPutSingleVariableWithUnparseableShort() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "Short";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": "
+ ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Short.class)))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableWithTypeLong() {
String variableKey = "aVariableKey";
Long variableValue = Long.valueOf(123);
String type = "Long";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.longValue(variableValue)));
}
@Test
public void testPutSingleVariableWithUnparseableLong() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "Long";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", MockProvider.EXAMPLE_EXECUTION_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": "
+ ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Long.class)))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableWithTypeDouble() {
String variableKey = "aVariableKey";
Double variableValue = 123.456;
String type = "Double";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.doubleValue(variableValue)));
}
@Test
public void testPutSingleVariableWithUnparseableDouble() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "Double";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": "
+ ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Double.class)))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableWithTypeBoolean() {
String variableKey = "aVariableKey";
Boolean variableValue = true;
String type = "Boolean";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.booleanValue(variableValue)));
}
@Test
public void testPutSingleVariableWithTypeDate() throws Exception {
Date now = new Date();
String variableKey = "aVariableKey";
String variableValue = DATE_FORMAT_WITH_TIMEZONE.format(now);
String type = "Date";
Date expectedValue = DATE_FORMAT_WITH_TIMEZONE.parse(variableValue);
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.dateValue(expectedValue)));
}
@Test
public void testPutSingleVariableWithUnparseableDate() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "Date";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": "
+ ErrorMessageHelper.getExpectedFailingConversionMessage(variableValue, type, Date.class)))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableWithNotSupportedType() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "X";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable " + variableKey + ": Unsupported value type 'X'"))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleLocalVariableWithNoValue() {
String variableKey = "aVariableKey";
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(EMPTY_JSON_OBJECT)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsNullValue.matcher()));
}
@Test
public void testPutLocalVariableForNonExistingTaskId() {
String variableKey = "aVariableKey";
String variableValue = "aVariableValue";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue);
doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID))
.when(taskServiceMock).setVariableLocal(eq(NON_EXISTING_ID), eq(variableKey), any());
given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(variableJson)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode())
.body("type", is(RestException.class.getSimpleName()))
.body("message", is("Cannot put task variable " + variableKey + ": Cannot find task with id " + NON_EXISTING_ID))
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleVariableThrowsAuthorizationException() {
String variableKey = "aVariableKey";
String variableValue = "1abc";
String type = "String";
Map<String, Object> variableJson = VariablesBuilder.getVariableValueMap(variableValue, type);
String message = "expected exception";
doThrow(new AuthorizationException(message)).when(taskServiceMock).setVariableLocal(anyString(), anyString(), any());
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.contentType(ContentType.JSON)
.body(variableJson)
.then().expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.body("type", equalTo(AuthorizationException.class.getSimpleName()))
.body("message", equalTo(message))
.when()
.put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPostSingleLocalBinaryVariable() throws Exception {
byte[] bytes = "someContent".getBytes();
String variableKey = "aVariableKey";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", null, bytes)
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.bytesValue(bytes)));
}
@Test
public void testPostSingleLocalBinaryVariableWithValueType() throws Exception {
byte[] bytes = "someContent".getBytes();
String variableKey = "aVariableKey";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", null, bytes)
.multiPart("valueType", "Bytes", "text/plain")
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.bytesValue(bytes)));
}
@Test
public void testPostSingleLocalBinaryVariableWithNoValue() throws Exception {
byte[] bytes = new byte[0];
String variableKey = "aVariableKey";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", null, bytes)
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsPrimitiveValue.bytesValue(bytes)));
}
@Test
public void testPutSingleBinaryVariableThrowsAuthorizationException() {
byte[] bytes = "someContent".getBytes();
String variableKey = "aVariableKey";
String message = "expected exception";
doThrow(new AuthorizationException(message)).when(taskServiceMock).setVariableLocal(anyString(), anyString(), any());
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.multiPart("data", "unspecified", bytes)
.expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.contentType(ContentType.JSON)
.body("type", equalTo(AuthorizationException.class.getSimpleName()))
.body("message", equalTo(message))
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testPostSingleLocalSerializableVariable() throws Exception {
ArrayList<String> serializable = new ArrayList<String>();
serializable.add("foo");
ObjectMapper mapper = new ObjectMapper();
String jsonBytes = mapper.writeValueAsString(serializable);
String typeName = TypeFactory.defaultInstance().constructType(serializable.getClass()).toCanonical();
String variableKey = "aVariableKey";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", jsonBytes, MediaType.APPLICATION_JSON)
.multiPart("type", typeName, MediaType.TEXT_PLAIN)
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsObjectValue.objectValueMatcher().isDeserialized().value(serializable)));
}
@Test
public void testPostSingleLocalSerializableVariableUnsupportedMediaType() throws Exception {
ArrayList<String> serializable = new ArrayList<String>();
serializable.add("foo");
ObjectMapper mapper = new ObjectMapper();
String jsonBytes = mapper.writeValueAsString(serializable);
String typeName = TypeFactory.defaultInstance().constructType(serializable.getClass()).toCanonical();
String variableKey = "aVariableKey";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", jsonBytes, "unsupported")
.multiPart("type", typeName, MediaType.TEXT_PLAIN)
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body(containsString("Unrecognized content type for serialized java type: unsupported"))
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
verify(taskServiceMock, never()).setVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey),
eq(serializable));
}
@Test
public void testPostSingleLocalFileVariableWithEncodingAndMimeType() throws Exception {
byte[] value = "some text".getBytes();
String variableKey = "aVariableKey";
String encoding = "utf-8";
String filename = "test.txt";
String mimetype = MediaType.TEXT_PLAIN;
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", filename, value, mimetype + "; encoding="+encoding)
.multiPart("valueType", "File", "text/plain")
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class);
verify(taskServiceMock).setVariableLocal(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
captor.capture());
FileValue captured = captor.getValue();
assertThat(captured.getEncoding(), is(encoding));
assertThat(captured.getFilename(), is(filename));
assertThat(captured.getMimeType(), is(mimetype));
assertThat(IoUtil.readInputStream(captured.getValue(), null), is(value));
}
@Test
public void testPostSingleLocalFileVariableWithMimeType() throws Exception {
byte[] value = "some text".getBytes();
String base64 = Base64.encodeBase64String(value);
String variableKey = "aVariableKey";
String filename = "test.txt";
String mimetype = MediaType.TEXT_PLAIN;
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", filename, value, mimetype)
.multiPart("valueType", "File", "text/plain")
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class);
verify(taskServiceMock).setVariableLocal(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
captor.capture());
FileValue captured = captor.getValue();
assertThat(captured.getEncoding(), is(nullValue()));
assertThat(captured.getFilename(), is(filename));
assertThat(captured.getMimeType(), is(mimetype));
assertThat(IoUtil.readInputStream(captured.getValue(), null), is(value));
}
@Test
public void testPostSingleLocalFileVariableWithEncoding() throws Exception {
byte[] value = "some text".getBytes();
String variableKey = "aVariableKey";
String encoding = "utf-8";
String filename = "test.txt";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", filename, value, "encoding="+encoding)
.multiPart("valueType", "File", "text/plain")
.header("accept", MediaType.APPLICATION_JSON)
.expect()
//when the user passes an encoding, he has to provide the type, too
.statusCode(Status.BAD_REQUEST.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
}
@Test
public void testPostSingleLocalFileVariableOnlyFilename() throws Exception {
String variableKey = "aVariableKey";
String filename = "test.txt";
given()
.pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.multiPart("data", filename, new byte[0])
.multiPart("valueType", "File", "text/plain")
.header("accept", MediaType.APPLICATION_JSON)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.post(SINGLE_TASK_SINGLE_BINARY_VARIABLE_URL);
ArgumentCaptor<FileValue> captor = ArgumentCaptor.forClass(FileValue.class);
verify(taskServiceMock).setVariableLocal(eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
captor.capture());
FileValue captured = captor.getValue();
assertThat(captured.getEncoding(), is(nullValue()));
assertThat(captured.getFilename(), is(filename));
assertThat(captured.getMimeType(), is(MediaType.APPLICATION_OCTET_STREAM));
assertThat(captured.getValue().available(), is(0));
}
@Test
public void testPutSingleLocalVariableFromSerialized() throws Exception {
String serializedValue = "{\"prop\" : \"value\"}";
Map<String, Object> requestJson = VariablesBuilder
.getObjectValueMap(serializedValue, ValueType.OBJECT.getName(), "aDataFormat", "aRootType");
String variableKey = "aVariableKey";
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON)
.body(requestJson)
.expect()
.statusCode(Status.NO_CONTENT.getStatusCode())
.when()
.put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(
eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsObjectValue.objectValueMatcher()
.serializedValue(serializedValue)
.serializationFormat("aDataFormat")
.objectTypeName("aRootType")));
}
@Test
public void testPutSingleLocalVariableFromInvalidSerialized() throws Exception {
String serializedValue = "{\"prop\" : \"value\"}";
Map<String, Object> requestJson = VariablesBuilder
.getObjectValueMap(serializedValue, "aNonExistingType", null, null);
String variableKey = "aVariableKey";
given()
.pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON)
.body(requestJson)
.expect()
.statusCode(Status.BAD_REQUEST.getStatusCode())
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Cannot put task variable aVariableKey: Unsupported value type 'aNonExistingType'"))
.when()
.put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
}
@Test
public void testPutSingleLocalVariableFromSerializedWithNoValue() {
String variableKey = "aVariableKey";
Map<String, Object> requestJson = VariablesBuilder
.getObjectValueMap(null, ValueType.OBJECT.getName(), null, null);
given().pathParam("id", MockProvider.EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.contentType(ContentType.JSON).body(requestJson)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().put(SINGLE_TASK_PUT_SINGLE_VARIABLE_URL);
verify(taskServiceMock).setVariableLocal(
eq(MockProvider.EXAMPLE_TASK_ID), eq(variableKey),
argThat(EqualsObjectValue.objectValueMatcher()));
}
@Test
public void testDeleteSingleLocalVariable() {
String variableKey = "aVariableKey";
given().pathParam("id", EXAMPLE_TASK_ID).pathParam("varId", variableKey)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.NO_CONTENT.getStatusCode())
.when().delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL);
verify(taskServiceMock).removeVariableLocal(eq(EXAMPLE_TASK_ID), eq(variableKey));
}
@Test
public void testDeleteLocalVariableForNonExistingTaskId() {
String variableKey = "aVariableKey";
doThrow(new ProcessEngineException("Cannot find task with id " + NON_EXISTING_ID))
.when(taskServiceMock).removeVariableLocal(eq(NON_EXISTING_ID), eq(variableKey));
given().pathParam("id", NON_EXISTING_ID).pathParam("varId", variableKey)
.header("accept", MediaType.APPLICATION_JSON)
.then().expect().statusCode(Status.INTERNAL_SERVER_ERROR.getStatusCode())
.contentType(ContentType.JSON)
.body("type", is(RestException.class.getSimpleName()))
.body("message", is("Cannot delete task variable " + variableKey + ": Cannot find task with id " + NON_EXISTING_ID))
.when().delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL);
}
@Test
public void testDeleteVariableThrowsAuthorizationException() {
String variableKey = "aVariableKey";
String message = "expected exception";
doThrow(new AuthorizationException(message)).when(taskServiceMock).removeVariableLocal(anyString(), anyString());
given()
.pathParam("id", EXAMPLE_TASK_ID)
.pathParam("varId", variableKey)
.then().expect()
.statusCode(Status.FORBIDDEN.getStatusCode())
.contentType(ContentType.JSON)
.body("type", is(AuthorizationException.class.getSimpleName()))
.body("message", is(message))
.when()
.delete(SINGLE_TASK_DELETE_SINGLE_VARIABLE_URL);
}
}
| apache-2.0 |
dkhwangbo/druid | server/src/main/java/org/apache/druid/client/cache/ForegroundCachePopulator.java | 4416 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.client.cache;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.SequenceWrapper;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.logger.Logger;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
public class ForegroundCachePopulator implements CachePopulator
{
private static final Logger log = new Logger(ForegroundCachePopulator.class);
private final Object lock = new Object();
private final ObjectMapper objectMapper;
private final CachePopulatorStats cachePopulatorStats;
private final long maxEntrySize;
public ForegroundCachePopulator(
final ObjectMapper objectMapper,
final CachePopulatorStats cachePopulatorStats,
final long maxEntrySize
)
{
this.objectMapper = Preconditions.checkNotNull(objectMapper, "objectMapper");
this.cachePopulatorStats = Preconditions.checkNotNull(cachePopulatorStats, "cachePopulatorStats");
this.maxEntrySize = maxEntrySize;
}
@Override
public <T, CacheType> Sequence<T> wrap(
final Sequence<T> sequence,
final Function<T, CacheType> cacheFn,
final Cache cache,
final Cache.NamedKey cacheKey
)
{
final ByteArrayOutputStream bytes = new ByteArrayOutputStream();
final AtomicBoolean tooBig = new AtomicBoolean(false);
final JsonGenerator jsonGenerator;
try {
jsonGenerator = objectMapper.getFactory().createGenerator(bytes);
}
catch (IOException e) {
throw new RuntimeException(e);
}
return Sequences.wrap(
Sequences.map(
sequence,
input -> {
if (!tooBig.get()) {
synchronized (lock) {
try {
jsonGenerator.writeObject(cacheFn.apply(input));
// Not flushing jsonGenerator before checking this, but should be ok since Jackson buffers are
// typically just a few KB, and we don't want to waste cycles flushing.
if (maxEntrySize > 0 && bytes.size() > maxEntrySize) {
tooBig.set(true);
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
return input;
}
),
new SequenceWrapper()
{
@Override
public void after(final boolean isDone, final Throwable thrown) throws Exception
{
synchronized (lock) {
jsonGenerator.close();
if (isDone) {
// Check tooBig, then check maxEntrySize one more time, after closing/flushing jsonGenerator.
if (tooBig.get() || (maxEntrySize > 0 && bytes.size() > maxEntrySize)) {
cachePopulatorStats.incrementOversized();
return;
}
try {
cache.put(cacheKey, bytes.toByteArray());
cachePopulatorStats.incrementOk();
}
catch (Exception e) {
log.warn(e, "Unable to write to cache");
cachePopulatorStats.incrementError();
}
}
}
}
}
);
}
}
| apache-2.0 |
masaki-yamakawa/geode | geode-serialization/src/main/java/org/apache/geode/internal/serialization/DSFIDNotFoundException.java | 1656 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.serialization;
import java.io.NotSerializableException;
/**
* Exception to indicate that a specified DSFID type could not be found (e.g. due to class being
* absent in lower product versions).
*/
public class DSFIDNotFoundException extends NotSerializableException {
private static final long serialVersionUID = 130596009484324655L;
private int dsfid;
private short versionOrdinal;
/**
* Constructs a DSFIDNotFoundException object with message string.
*
* @param msg exception message
*/
public DSFIDNotFoundException(String msg, int dsfid) {
super(msg);
this.dsfid = dsfid;
this.versionOrdinal = KnownVersion.getCurrentVersion().ordinal();
}
public int getUnknownDSFID() {
return this.dsfid;
}
public short getProductVersionOrdinal() {
return this.versionOrdinal;
}
}
| apache-2.0 |
salyh/geronimo-specs | geronimo-javamail_1.4_spec/src/main/java/javax/mail/event/MessageCountAdapter.java | 1202 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.mail.event;
/**
* An adaptor that receives message count events.
* This is a default implementation where the handlers perform no action.
*
* @version $Rev$ $Date$
*/
public abstract class MessageCountAdapter implements MessageCountListener {
public void messagesAdded(MessageCountEvent event) {
}
public void messagesRemoved(MessageCountEvent event) {
}
}
| apache-2.0 |
rekhajoshm/mapreduce-fork | src/java/org/apache/hadoop/mapred/FileInputFormat.java | 22363 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.net.NodeBase;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
/**
* A base class for file-based {@link InputFormat}.
*
* <p><code>FileInputFormat</code> is the base class for all file-based
* <code>InputFormat</code>s. This provides a generic implementation of
* {@link #getSplits(JobConf, int)}.
* Subclasses of <code>FileInputFormat</code> can also override the
* {@link #isSplitable(FileSystem, Path)} method to ensure input-files are
* not split-up and are processed as a whole by {@link Mapper}s.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
public static final Log LOG =
LogFactory.getLog(FileInputFormat.class);
public static final String NUM_INPUT_FILES =
org.apache.hadoop.mapreduce.lib.input.FileInputFormat.NUM_INPUT_FILES;
private static final double SPLIT_SLOP = 1.1; // 10% slop
private long minSplitSize = 1;
private static final PathFilter hiddenFileFilter = new PathFilter(){
public boolean accept(Path p){
String name = p.getName();
return !name.startsWith("_") && !name.startsWith(".");
}
};
protected void setMinSplitSize(long minSplitSize) {
this.minSplitSize = minSplitSize;
}
/**
* Proxy PathFilter that accepts a path only if all filters given in the
* constructor do. Used by the listPaths() to apply the built-in
* hiddenFileFilter together with a user provided one (if any).
*/
private static class MultiPathFilter implements PathFilter {
private List<PathFilter> filters;
public MultiPathFilter(List<PathFilter> filters) {
this.filters = filters;
}
public boolean accept(Path path) {
for (PathFilter filter : filters) {
if (!filter.accept(path)) {
return false;
}
}
return true;
}
}
/**
* Is the given filename splitable? Usually, true, but if the file is
* stream compressed, it will not be.
*
* <code>FileInputFormat</code> implementations can override this and return
* <code>false</code> to ensure that individual input files are never split-up
* so that {@link Mapper}s process entire files.
*
* @param fs the file system that the file is on
* @param filename the file name to check
* @return is this file splitable?
*/
protected boolean isSplitable(FileSystem fs, Path filename) {
return true;
}
public abstract RecordReader<K, V> getRecordReader(InputSplit split,
JobConf job,
Reporter reporter)
throws IOException;
/**
* Set a PathFilter to be applied to the input paths for the map-reduce job.
*
* @param filter the PathFilter class use for filtering the input paths.
*/
public static void setInputPathFilter(JobConf conf,
Class<? extends PathFilter> filter) {
conf.setClass(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.PATHFILTER_CLASS, filter, PathFilter.class);
}
/**
* Get a PathFilter instance of the filter set for the input paths.
*
* @return the PathFilter instance set for the job, NULL if none has been set.
*/
public static PathFilter getInputPathFilter(JobConf conf) {
Class<? extends PathFilter> filterClass = conf.getClass(
org.apache.hadoop.mapreduce.lib.input.FileInputFormat.PATHFILTER_CLASS,
null, PathFilter.class);
return (filterClass != null) ?
ReflectionUtils.newInstance(filterClass, conf) : null;
}
/**
* Add files in the input path recursively into the results.
* @param result
* The List to store all files.
* @param fs
* The FileSystem.
* @param path
* The input path.
* @param inputFilter
* The input filter that can be used to filter files/dirs.
* @throws IOException
*/
protected void addInputPathRecursively(List<FileStatus> result,
FileSystem fs, Path path, PathFilter inputFilter)
throws IOException {
for(FileStatus stat: fs.listStatus(path, inputFilter)) {
if (stat.isDirectory()) {
addInputPathRecursively(result, fs, stat.getPath(), inputFilter);
} else {
result.add(stat);
}
}
}
/** List input directories.
* Subclasses may override to, e.g., select only files matching a regular
* expression.
*
* @param job the job to list input paths for
* @return array of FileStatus objects
* @throws IOException if zero items.
*/
protected FileStatus[] listStatus(JobConf job) throws IOException {
Path[] dirs = getInputPaths(job);
if (dirs.length == 0) {
throw new IOException("No input paths specified in job");
}
// get tokens for all the required FileSystems..
TokenCache.obtainTokensForNamenodes(job.getCredentials(), dirs, job);
// Whether we need to recursive look into the directory structure
boolean recursive = job.getBoolean("mapred.input.dir.recursive", false);
List<FileStatus> result = new ArrayList<FileStatus>();
List<IOException> errors = new ArrayList<IOException>();
// creates a MultiPathFilter with the hiddenFileFilter and the
// user provided one (if any).
List<PathFilter> filters = new ArrayList<PathFilter>();
filters.add(hiddenFileFilter);
PathFilter jobFilter = getInputPathFilter(job);
if (jobFilter != null) {
filters.add(jobFilter);
}
PathFilter inputFilter = new MultiPathFilter(filters);
for (Path p: dirs) {
FileSystem fs = p.getFileSystem(job);
FileStatus[] matches = fs.globStatus(p, inputFilter);
if (matches == null) {
errors.add(new IOException("Input path does not exist: " + p));
} else if (matches.length == 0) {
errors.add(new IOException("Input Pattern " + p + " matches 0 files"));
} else {
for (FileStatus globStat: matches) {
if (globStat.isDirectory()) {
for(FileStatus stat: fs.listStatus(globStat.getPath(),
inputFilter)) {
if (recursive && stat.isDirectory()) {
addInputPathRecursively(result, fs, stat.getPath(), inputFilter);
} else {
result.add(stat);
}
}
} else {
result.add(globStat);
}
}
}
}
if (!errors.isEmpty()) {
throw new InvalidInputException(errors);
}
LOG.info("Total input paths to process : " + result.size());
return result.toArray(new FileStatus[result.size()]);
}
/**
* A factory that makes the split for this class. It can be overridden
* by sub-classes to make sub-types
*/
protected FileSplit makeSplit(Path file, long start, long length,
String[] hosts) {
return new FileSplit(file, start, length, hosts);
}
/** Splits files returned by {@link #listStatus(JobConf)} when
* they're too big.*/
@SuppressWarnings("deprecation")
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
FileStatus[] files = listStatus(job);
// Save the number of input files for metrics/loadgen
job.setLong(NUM_INPUT_FILES, files.length);
long totalSize = 0; // compute total size
for (FileStatus file: files) { // check we have valid files
if (file.isDirectory()) {
throw new IOException("Not a file: "+ file.getPath());
}
totalSize += file.getLen();
}
long goalSize = totalSize / (numSplits == 0 ? 1 : numSplits);
long minSize = Math.max(job.getLong(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.SPLIT_MINSIZE, 1), minSplitSize);
// generate splits
ArrayList<FileSplit> splits = new ArrayList<FileSplit>(numSplits);
NetworkTopology clusterMap = new NetworkTopology();
for (FileStatus file: files) {
Path path = file.getPath();
FileSystem fs = path.getFileSystem(job);
long length = file.getLen();
BlockLocation[] blkLocations = fs.getFileBlockLocations(file, 0, length);
if ((length != 0) && isSplitable(fs, path)) {
long blockSize = file.getBlockSize();
long splitSize = computeSplitSize(goalSize, minSize, blockSize);
long bytesRemaining = length;
while (((double) bytesRemaining)/splitSize > SPLIT_SLOP) {
String[] splitHosts = getSplitHosts(blkLocations,
length-bytesRemaining, splitSize, clusterMap);
splits.add(makeSplit(path, length-bytesRemaining, splitSize,
splitHosts));
bytesRemaining -= splitSize;
}
if (bytesRemaining != 0) {
splits.add(makeSplit(path, length-bytesRemaining, bytesRemaining,
blkLocations[blkLocations.length-1].getHosts()));
}
} else if (length != 0) {
String[] splitHosts = getSplitHosts(blkLocations,0,length,clusterMap);
splits.add(makeSplit(path, 0, length, splitHosts));
} else {
//Create empty hosts array for zero length files
splits.add(makeSplit(path, 0, length, new String[0]));
}
}
LOG.debug("Total # of splits: " + splits.size());
return splits.toArray(new FileSplit[splits.size()]);
}
protected long computeSplitSize(long goalSize, long minSize,
long blockSize) {
return Math.max(minSize, Math.min(goalSize, blockSize));
}
protected int getBlockIndex(BlockLocation[] blkLocations,
long offset) {
for (int i = 0 ; i < blkLocations.length; i++) {
// is the offset inside this block?
if ((blkLocations[i].getOffset() <= offset) &&
(offset < blkLocations[i].getOffset() + blkLocations[i].getLength())){
return i;
}
}
BlockLocation last = blkLocations[blkLocations.length -1];
long fileLength = last.getOffset() + last.getLength() -1;
throw new IllegalArgumentException("Offset " + offset +
" is outside of file (0.." +
fileLength + ")");
}
/**
* Sets the given comma separated paths as the list of inputs
* for the map-reduce job.
*
* @param conf Configuration of the job
* @param commaSeparatedPaths Comma separated paths to be set as
* the list of inputs for the map-reduce job.
*/
public static void setInputPaths(JobConf conf, String commaSeparatedPaths) {
setInputPaths(conf, StringUtils.stringToPath(
getPathStrings(commaSeparatedPaths)));
}
/**
* Add the given comma separated paths to the list of inputs for
* the map-reduce job.
*
* @param conf The configuration of the job
* @param commaSeparatedPaths Comma separated paths to be added to
* the list of inputs for the map-reduce job.
*/
public static void addInputPaths(JobConf conf, String commaSeparatedPaths) {
for (String str : getPathStrings(commaSeparatedPaths)) {
addInputPath(conf, new Path(str));
}
}
/**
* Set the array of {@link Path}s as the list of inputs
* for the map-reduce job.
*
* @param conf Configuration of the job.
* @param inputPaths the {@link Path}s of the input directories/files
* for the map-reduce job.
*/
public static void setInputPaths(JobConf conf, Path... inputPaths) {
Path path = new Path(conf.getWorkingDirectory(), inputPaths[0]);
StringBuffer str = new StringBuffer(StringUtils.escapeString(path.toString()));
for(int i = 1; i < inputPaths.length;i++) {
str.append(StringUtils.COMMA_STR);
path = new Path(conf.getWorkingDirectory(), inputPaths[i]);
str.append(StringUtils.escapeString(path.toString()));
}
conf.set(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.INPUT_DIR, str.toString());
}
/**
* Add a {@link Path} to the list of inputs for the map-reduce job.
*
* @param conf The configuration of the job
* @param path {@link Path} to be added to the list of inputs for
* the map-reduce job.
*/
public static void addInputPath(JobConf conf, Path path ) {
path = new Path(conf.getWorkingDirectory(), path);
String dirStr = StringUtils.escapeString(path.toString());
String dirs = conf.get(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.INPUT_DIR);
conf.set(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.INPUT_DIR, dirs == null ? dirStr :
dirs + StringUtils.COMMA_STR + dirStr);
}
// This method escapes commas in the glob pattern of the given paths.
private static String[] getPathStrings(String commaSeparatedPaths) {
int length = commaSeparatedPaths.length();
int curlyOpen = 0;
int pathStart = 0;
boolean globPattern = false;
List<String> pathStrings = new ArrayList<String>();
for (int i=0; i<length; i++) {
char ch = commaSeparatedPaths.charAt(i);
switch(ch) {
case '{' : {
curlyOpen++;
if (!globPattern) {
globPattern = true;
}
break;
}
case '}' : {
curlyOpen--;
if (curlyOpen == 0 && globPattern) {
globPattern = false;
}
break;
}
case ',' : {
if (!globPattern) {
pathStrings.add(commaSeparatedPaths.substring(pathStart, i));
pathStart = i + 1 ;
}
break;
}
}
}
pathStrings.add(commaSeparatedPaths.substring(pathStart, length));
return pathStrings.toArray(new String[0]);
}
/**
* Get the list of input {@link Path}s for the map-reduce job.
*
* @param conf The configuration of the job
* @return the list of input {@link Path}s for the map-reduce job.
*/
public static Path[] getInputPaths(JobConf conf) {
String dirs = conf.get(org.apache.hadoop.mapreduce.lib.input.
FileInputFormat.INPUT_DIR, "");
String [] list = StringUtils.split(dirs);
Path[] result = new Path[list.length];
for (int i = 0; i < list.length; i++) {
result[i] = new Path(StringUtils.unEscapeString(list[i]));
}
return result;
}
private void sortInDescendingOrder(List<NodeInfo> mylist) {
Collections.sort(mylist, new Comparator<NodeInfo> () {
public int compare(NodeInfo obj1, NodeInfo obj2) {
if (obj1 == null || obj2 == null)
return -1;
if (obj1.getValue() == obj2.getValue()) {
return 0;
}
else {
return ((obj1.getValue() < obj2.getValue()) ? 1 : -1);
}
}
}
);
}
/**
* This function identifies and returns the hosts that contribute
* most for a given split. For calculating the contribution, rack
* locality is treated on par with host locality, so hosts from racks
* that contribute the most are preferred over hosts on racks that
* contribute less
* @param blkLocations The list of block locations
* @param offset
* @param splitSize
* @return array of hosts that contribute most to this split
* @throws IOException
*/
protected String[] getSplitHosts(BlockLocation[] blkLocations,
long offset, long splitSize, NetworkTopology clusterMap)
throws IOException {
int startIndex = getBlockIndex(blkLocations, offset);
long bytesInThisBlock = blkLocations[startIndex].getOffset() +
blkLocations[startIndex].getLength() - offset;
//If this is the only block, just return
if (bytesInThisBlock >= splitSize) {
return blkLocations[startIndex].getHosts();
}
long bytesInFirstBlock = bytesInThisBlock;
int index = startIndex + 1;
splitSize -= bytesInThisBlock;
while (splitSize > 0) {
bytesInThisBlock =
Math.min(splitSize, blkLocations[index++].getLength());
splitSize -= bytesInThisBlock;
}
long bytesInLastBlock = bytesInThisBlock;
int endIndex = index - 1;
Map <Node,NodeInfo> hostsMap = new IdentityHashMap<Node,NodeInfo>();
Map <Node,NodeInfo> racksMap = new IdentityHashMap<Node,NodeInfo>();
String [] allTopos = new String[0];
// Build the hierarchy and aggregate the contribution of
// bytes at each level. See TestGetSplitHosts.java
for (index = startIndex; index <= endIndex; index++) {
// Establish the bytes in this block
if (index == startIndex) {
bytesInThisBlock = bytesInFirstBlock;
}
else if (index == endIndex) {
bytesInThisBlock = bytesInLastBlock;
}
else {
bytesInThisBlock = blkLocations[index].getLength();
}
allTopos = blkLocations[index].getTopologyPaths();
// If no topology information is available, just
// prefix a fakeRack
if (allTopos.length == 0) {
allTopos = fakeRacks(blkLocations, index);
}
// NOTE: This code currently works only for one level of
// hierarchy (rack/host). However, it is relatively easy
// to extend this to support aggregation at different
// levels
for (String topo: allTopos) {
Node node, parentNode;
NodeInfo nodeInfo, parentNodeInfo;
node = clusterMap.getNode(topo);
if (node == null) {
node = new NodeBase(topo);
clusterMap.add(node);
}
nodeInfo = hostsMap.get(node);
if (nodeInfo == null) {
nodeInfo = new NodeInfo(node);
hostsMap.put(node,nodeInfo);
parentNode = node.getParent();
parentNodeInfo = racksMap.get(parentNode);
if (parentNodeInfo == null) {
parentNodeInfo = new NodeInfo(parentNode);
racksMap.put(parentNode,parentNodeInfo);
}
parentNodeInfo.addLeaf(nodeInfo);
}
else {
nodeInfo = hostsMap.get(node);
parentNode = node.getParent();
parentNodeInfo = racksMap.get(parentNode);
}
nodeInfo.addValue(index, bytesInThisBlock);
parentNodeInfo.addValue(index, bytesInThisBlock);
} // for all topos
} // for all indices
return identifyHosts(allTopos.length, racksMap);
}
private String[] identifyHosts(int replicationFactor,
Map<Node,NodeInfo> racksMap) {
String [] retVal = new String[replicationFactor];
List <NodeInfo> rackList = new LinkedList<NodeInfo>();
rackList.addAll(racksMap.values());
// Sort the racks based on their contribution to this split
sortInDescendingOrder(rackList);
boolean done = false;
int index = 0;
// Get the host list for all our aggregated items, sort
// them and return the top entries
for (NodeInfo ni: rackList) {
Set<NodeInfo> hostSet = ni.getLeaves();
List<NodeInfo>hostList = new LinkedList<NodeInfo>();
hostList.addAll(hostSet);
// Sort the hosts in this rack based on their contribution
sortInDescendingOrder(hostList);
for (NodeInfo host: hostList) {
// Strip out the port number from the host name
retVal[index++] = host.node.getName().split(":")[0];
if (index == replicationFactor) {
done = true;
break;
}
}
if (done == true) {
break;
}
}
return retVal;
}
private String[] fakeRacks(BlockLocation[] blkLocations, int index)
throws IOException {
String[] allHosts = blkLocations[index].getHosts();
String[] allTopos = new String[allHosts.length];
for (int i = 0; i < allHosts.length; i++) {
allTopos[i] = NetworkTopology.DEFAULT_RACK + "/" + allHosts[i];
}
return allTopos;
}
private static class NodeInfo {
final Node node;
final Set<Integer> blockIds;
final Set<NodeInfo> leaves;
private long value;
NodeInfo(Node node) {
this.node = node;
blockIds = new HashSet<Integer>();
leaves = new HashSet<NodeInfo>();
}
long getValue() {return value;}
void addValue(int blockIndex, long value) {
if (blockIds.add(blockIndex) == true) {
this.value += value;
}
}
Set<NodeInfo> getLeaves() { return leaves;}
void addLeaf(NodeInfo nodeInfo) {
leaves.add(nodeInfo);
}
}
}
| apache-2.0 |
smmribeiro/intellij-community | platform/platform-impl/src/com/intellij/ide/plugins/PluginManagerMain.java | 16238 | // Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.plugins;
import com.intellij.CommonBundle;
import com.intellij.core.CoreBundle;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.plugins.marketplace.statistics.PluginManagerUsageCollector;
import com.intellij.ide.plugins.marketplace.statistics.enums.DialogAcceptanceResultEnum;
import com.intellij.idea.Main;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationAction;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ex.ApplicationEx;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.extensions.PluginId;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageDialogBuilder;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.updateSettings.impl.UpdateChecker;
import com.intellij.openapi.updateSettings.impl.UpdateSettings;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.IdeFrame;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.ui.HyperlinkAdapter;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.text.html.HTMLDocument;
import javax.swing.text.html.HTMLFrameHyperlinkEvent;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
public final class PluginManagerMain {
private PluginManagerMain() { }
/**
* @deprecated Please migrate to either {@link #downloadPluginsAndCleanup(List, Collection, Runnable, com.intellij.ide.plugins.PluginEnabler, ModalityState, Runnable)}
* or {@link #downloadPlugins(List, Collection, boolean, Runnable, com.intellij.ide.plugins.PluginEnabler, Consumer)}.
*/
@Deprecated(since = "2020.2", forRemoval = true)
public static boolean downloadPlugins(@NotNull List<PluginNode> plugins,
@NotNull List<? extends IdeaPluginDescriptor> customPlugins,
@Nullable Runnable onSuccess,
@NotNull PluginEnabler pluginEnabler,
@Nullable Runnable cleanup) throws IOException {
return downloadPluginsAndCleanup(plugins, ContainerUtil.filterIsInstance(customPlugins, PluginNode.class), onSuccess, pluginEnabler, ModalityState.any(), cleanup);
}
public static boolean downloadPluginsAndCleanup(@NotNull List<PluginNode> plugins,
@NotNull Collection<PluginNode> customPlugins,
@Nullable Runnable onSuccess,
@NotNull com.intellij.ide.plugins.PluginEnabler pluginEnabler,
@NotNull ModalityState modalityState,
@Nullable Runnable cleanup) throws IOException {
return downloadPlugins(plugins, customPlugins, false, onSuccess, pluginEnabler, modalityState, cleanup != null ? __ -> cleanup.run() : null);
}
/**
* @deprecated Please use the overload with explicitly passed modality state
*/
@Deprecated
public static boolean downloadPlugins(@NotNull List<PluginNode> plugins,
@NotNull Collection<PluginNode> customPlugins,
boolean allowInstallWithoutRestart,
@Nullable Runnable onSuccess,
@NotNull com.intellij.ide.plugins.PluginEnabler pluginEnabler,
@Nullable Consumer<? super Boolean> function) throws IOException {
return downloadPlugins(plugins, customPlugins, allowInstallWithoutRestart, onSuccess, pluginEnabler, ModalityState.any(), function);
}
public static boolean downloadPlugins(
@NotNull List<PluginNode> plugins,
@NotNull Collection<PluginNode> customPlugins,
boolean allowInstallWithoutRestart,
@Nullable Runnable onSuccess,
@NotNull com.intellij.ide.plugins.PluginEnabler pluginEnabler,
@NotNull final ModalityState modalityState,
@Nullable Consumer<? super Boolean> function) throws IOException {
try {
boolean[] result = new boolean[1];
ProgressManager.getInstance().run(new Task.Backgroundable(null, IdeBundle.message("progress.download.plugins"), true, PluginManagerUISettings.getInstance()) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
try {
//TODO: `PluginInstallOperation` expects only `customPlugins`, but it can take `allPlugins` too
PluginInstallOperation operation = new PluginInstallOperation(plugins, customPlugins, pluginEnabler, indicator);
operation.setAllowInstallWithoutRestart(allowInstallWithoutRestart);
operation.run();
boolean success = operation.isSuccess();
result[0] = success;
if (success) {
ApplicationManager.getApplication().invokeLater(() -> {
if (allowInstallWithoutRestart) {
for (PendingDynamicPluginInstall install : operation.getPendingDynamicPluginInstalls()) {
PluginInstaller.installAndLoadDynamicPlugin(install.getFile(), install.getPluginDescriptor());
}
}
if (onSuccess != null) {
onSuccess.run();
}
}, modalityState);
}
}
finally {
if (function != null) {
ApplicationManager.getApplication().invokeLater(() -> function.accept(result[0]), ModalityState.any());
}
}
}
});
return result[0];
}
catch (RuntimeException e) {
Throwable cause = e.getCause();
if (cause instanceof IOException) {
throw (IOException)cause;
}
else {
throw e;
}
}
}
public static class MyHyperlinkListener extends HyperlinkAdapter {
@Override
protected void hyperlinkActivated(HyperlinkEvent e) {
JEditorPane pane = (JEditorPane)e.getSource();
if (e instanceof HTMLFrameHyperlinkEvent) {
HTMLDocument doc = (HTMLDocument)pane.getDocument();
doc.processHTMLFrameHyperlinkEvent((HTMLFrameHyperlinkEvent)e);
}
else {
URL url = e.getURL();
if (url != null) {
BrowserUtil.browse(url);
}
}
}
}
public static boolean suggestToEnableInstalledDependantPlugins(@NotNull com.intellij.ide.plugins.PluginEnabler pluginEnabler,
@NotNull List<? extends IdeaPluginDescriptor> list) {
Set<IdeaPluginDescriptor> disabled = new HashSet<>();
Set<IdeaPluginDescriptor> disabledDependants = new HashSet<>();
for (IdeaPluginDescriptor node : list) {
PluginId pluginId = node.getPluginId();
if (pluginEnabler.isDisabled(pluginId)) {
disabled.add(node);
}
for (IdeaPluginDependency dependency : node.getDependencies()) {
if (dependency.isOptional()) {
continue;
}
PluginId dependantId = dependency.getPluginId();
// If there is no installed plugin implementing the module, then it can only be a platform module which cannot be disabled
if (PluginManagerCore.isModuleDependency(dependantId) &&
PluginManagerCore.findPluginByModuleDependency(dependantId) == null) {
continue;
}
IdeaPluginDescriptor pluginDescriptor = PluginManagerCore.getPlugin(dependantId);
if (pluginDescriptor != null && pluginEnabler.isDisabled(dependantId)) {
disabledDependants.add(pluginDescriptor);
}
}
}
if (!disabled.isEmpty() || !disabledDependants.isEmpty()) {
String message = "";
if (disabled.size() == 1) {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part1", disabled.iterator().next().getName());
}
else if (!disabled.isEmpty()) {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part2", StringUtil.join(disabled, pluginDescriptor -> pluginDescriptor.getName(), ", "));
}
if (!disabledDependants.isEmpty()) {
message += "<br>";
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part3", list.size());
message += " ";
if (disabledDependants.size() == 1) {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part4", disabledDependants.iterator().next().getName());
}
else {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part5", StringUtil.join(disabledDependants, pluginDescriptor -> pluginDescriptor.getName(), ", "));
}
}
message += " ";
message += IdeBundle.message(disabled.isEmpty() ? "plugin.manager.main.suggest.to.enable.message.part6" : "plugin.manager.main.suggest.to.enable.message.part7");
boolean result;
if (!disabled.isEmpty() && !disabledDependants.isEmpty()) {
int code =
MessageDialogBuilder.yesNoCancel(IdeBundle.message("dialog.title.dependent.plugins.found"), XmlStringUtil.wrapInHtml(message))
.yesText(IdeBundle.message("button.enable.all"))
.noText(IdeBundle.message("button.enable.updated.plugins", disabled.size()))
.guessWindowAndAsk();
if (code == Messages.CANCEL) {
return false;
}
result = code == Messages.YES;
}
else {
message += "<br>";
if (!disabled.isEmpty()) {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part8", disabled.size());
}
else {
message += IdeBundle.message("plugin.manager.main.suggest.to.enable.message.part9", disabledDependants.size());
}
message += "?";
result = MessageDialogBuilder.yesNo(IdeBundle.message("dialog.title.dependent.plugins.found"), XmlStringUtil.wrapInHtml(message)).guessWindowAndAsk();
if (!result) {
return false;
}
}
if (result) {
disabled.addAll(disabledDependants);
pluginEnabler.enable(disabled);
}
else if (!disabled.isEmpty()) {
pluginEnabler.enable(disabled);
}
return true;
}
return false;
}
/** @deprecated Please use {@link com.intellij.ide.plugins.PluginEnabler} directly. */
@Deprecated
public interface PluginEnabler extends com.intellij.ide.plugins.PluginEnabler {
@Override
default boolean isDisabled(@NotNull PluginId pluginId) {
return HEADLESS.isDisabled(pluginId);
}
@Override
default boolean enableById(@NotNull Set<PluginId> pluginIds) {
return HEADLESS.enableById(pluginIds);
}
@Override
default boolean enable(@NotNull Collection<? extends IdeaPluginDescriptor> descriptors) {
return HEADLESS.enable(descriptors);
}
@Override
default boolean disableById(@NotNull Set<PluginId> pluginIds) {
return HEADLESS.disableById(pluginIds);
}
@Override
default boolean disable(@NotNull Collection<? extends IdeaPluginDescriptor> descriptors) {
return HEADLESS.disable(descriptors);
}
final class HEADLESS implements PluginEnabler { }
}
@ApiStatus.Internal
public static void onEvent(String description) {
switch (description) {
case PluginManagerCore.DISABLE:
PluginManagerCore.onEnable(false);
break;
case PluginManagerCore.ENABLE:
if (PluginManagerCore.onEnable(true)) {
notifyPluginsUpdated(null);
}
break;
case PluginManagerCore.EDIT:
IdeFrame frame = WindowManagerEx.getInstanceEx().findFrameFor(null);
PluginManagerConfigurable.showPluginConfigurable(frame != null ? frame.getComponent() : null, null, List.of());
break;
}
}
public static void notifyPluginsUpdated(@Nullable Project project) {
ApplicationEx app = ApplicationManagerEx.getApplicationEx();
String title = IdeBundle.message("updates.notification.title", ApplicationNamesInfo.getInstance().getFullProductName());
String action = IdeBundle.message("ide.restart.required.notification", app.isRestartCapable() ? 1 : 0);
UpdateChecker.getNotificationGroup()
.createNotification(title, NotificationType.INFORMATION)
.setDisplayId("plugins.updated.suggest.restart")
.addAction(new NotificationAction(action) {
@Override
public void actionPerformed(@NotNull AnActionEvent e, @NotNull Notification notification) {
if (PluginManagerConfigurable.showRestartDialog() == Messages.YES) {
notification.expire();
ApplicationManagerEx.getApplicationEx().restart(true);
}
}
})
.notify(project);
}
public static boolean checkThirdPartyPluginsAllowed(@NotNull Collection<? extends IdeaPluginDescriptor> descriptors) {
@SuppressWarnings("SSBasedInspection") Collection<? extends IdeaPluginDescriptor> aliens = descriptors.stream()
.filter(descriptor -> !(descriptor.isBundled() || PluginManagerCore.isDevelopedByJetBrains(descriptor)))
.collect(Collectors.toList());
if (aliens.isEmpty()) return true;
UpdateSettings updateSettings = UpdateSettings.getInstance();
if (updateSettings.isThirdPartyPluginsAllowed()) {
PluginManagerUsageCollector.thirdPartyAcceptanceCheck(DialogAcceptanceResultEnum.AUTO_ACCEPTED);
return true;
}
if (Main.isHeadless()) {
// postponing the dialog till the next start
PluginManagerCore.write3rdPartyPlugins(aliens);
return true;
}
String title = CoreBundle.message("third.party.plugins.privacy.note.title");
String pluginList = aliens.stream()
.map(descriptor -> " " + descriptor.getName() + " (" + descriptor.getVendor() + ')')
.collect(Collectors.joining("<br>"));
String message = CoreBundle.message("third.party.plugins.privacy.note.text", pluginList);
String yesText = CoreBundle.message("third.party.plugins.privacy.note.accept"), noText = CommonBundle.getCancelButtonText();
if (Messages.showYesNoDialog(message, title, yesText, noText, Messages.getWarningIcon()) == Messages.YES) {
updateSettings.setThirdPartyPluginsAllowed(true);
PluginManagerUsageCollector.thirdPartyAcceptanceCheck(DialogAcceptanceResultEnum.ACCEPTED);
return true;
}
else {
PluginManagerUsageCollector.thirdPartyAcceptanceCheck(DialogAcceptanceResultEnum.DECLINED);
return false;
}
}
@ApiStatus.Internal
public static void checkThirdPartyPluginsAllowed() {
Boolean noteAccepted = PluginManagerCore.isThirdPartyPluginsNoteAccepted();
if (noteAccepted == Boolean.TRUE) {
UpdateSettings.getInstance().setThirdPartyPluginsAllowed(true);
PluginManagerUsageCollector.thirdPartyAcceptanceCheck(DialogAcceptanceResultEnum.ACCEPTED);
}
else if (noteAccepted == Boolean.FALSE) {
PluginManagerUsageCollector.thirdPartyAcceptanceCheck(DialogAcceptanceResultEnum.DECLINED);
}
}
}
| apache-2.0 |
Kotlin/dokka | core/testdata/java/typeParameter.java | 64 | class Foo<T extends Comparable<T>> {
public <E> E foo();
}
| apache-2.0 |
prateekm/samza | samza-core/src/main/java/org/apache/samza/serializers/model/JsonTaskModelMixIn.java | 1864 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.serializers.model;
import java.util.Set;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.samza.Partition;
import org.apache.samza.container.TaskName;
import org.apache.samza.system.SystemStreamPartition;
/**
* A mix-in Jackson class to convert Samza's TaskModel to/from JSON.
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public abstract class JsonTaskModelMixIn {
@JsonCreator
public JsonTaskModelMixIn(@JsonProperty("task-name") TaskName taskName, @JsonProperty("system-stream-partitions") Set<SystemStreamPartition> systemStreamPartitions, @JsonProperty("changelog-partition") Partition changelogPartition) {
}
@JsonProperty("task-name")
abstract TaskName getTaskName();
@JsonProperty("system-stream-partitions")
abstract Set<SystemStreamPartition> getSystemStreamPartitions();
@JsonProperty("changelog-partition")
abstract Partition getChangelogPartition();
} | apache-2.0 |
cyongli/palo | fe/test/com/baidu/palo/mysql/MysqlServerTest.java | 5362 | // Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.baidu.palo.mysql;
import com.baidu.palo.qe.ConnectContext;
import com.baidu.palo.qe.ConnectScheduler;
import org.junit.Assert;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.nio.channels.SocketChannel;
public class MysqlServerTest {
private static final Logger LOG = LoggerFactory.getLogger(MysqlServerTest.class);
private int submitNum;
private int submitFailNum;
private ConnectScheduler scheduler;
private ConnectScheduler badScheduler;
@Before
public void setUp() {
submitNum = 0;
submitFailNum = 0;
scheduler = EasyMock.createMock(ConnectScheduler.class);
EasyMock.expect(scheduler.submit(EasyMock.anyObject(ConnectContext.class)))
.andAnswer(new IAnswer<Boolean>() {
@Override
public Boolean answer() throws Throwable {
LOG.info("answer.");
synchronized (MysqlServerTest.this) {
submitNum++;
}
return Boolean.TRUE;
}
}).anyTimes();
EasyMock.replay(scheduler);
badScheduler = EasyMock.createMock(ConnectScheduler.class);
EasyMock.expect(badScheduler.submit(EasyMock.anyObject(ConnectContext.class)))
.andAnswer(new IAnswer<Boolean>() {
@Override
public Boolean answer() throws Throwable {
LOG.info("answer.");
synchronized (MysqlServerTest.this) {
submitFailNum++;
}
return Boolean.FALSE;
}
}).anyTimes();
EasyMock.replay(badScheduler);
}
@Test
public void testNormal() throws IOException, InterruptedException {
ServerSocket socket = new ServerSocket(0);
int port = socket.getLocalPort();
socket.close();
MysqlServer server = new MysqlServer(port, scheduler);
Assert.assertTrue(server.start());
// submit
SocketChannel channel = SocketChannel.open();
channel.connect(new InetSocketAddress("127.0.0.1", port));
// sleep to wait mock process
Thread.sleep(2000);
channel.close();
// submit twice
channel = SocketChannel.open();
channel.connect(new InetSocketAddress("127.0.0.1", port));
// sleep to wait mock process
Thread.sleep(2000);
channel.close();
// stop and join
server.stop();
server.join();
Assert.assertEquals(2, submitNum);
}
@Test
public void testInvalidParam() throws IOException {
ServerSocket socket = new ServerSocket(0);
int port = socket.getLocalPort();
socket.close();
MysqlServer server = new MysqlServer(port, null);
Assert.assertFalse(server.start());
}
@Test
public void testBindFail() throws IOException {
ServerSocket socket = new ServerSocket(0);
int port = socket.getLocalPort();
socket.close();
MysqlServer server = new MysqlServer(port, scheduler);
Assert.assertTrue(server.start());
MysqlServer server1 = new MysqlServer(port, scheduler);
Assert.assertFalse(server1.start());
server.stop();
server.join();
}
@Test
public void testSubFail() throws IOException, InterruptedException {
ServerSocket socket = new ServerSocket(0);
int port = socket.getLocalPort();
socket.close();
MysqlServer server = new MysqlServer(port, badScheduler);
Assert.assertTrue(server.start());
// submit
SocketChannel channel = SocketChannel.open();
channel.connect(new InetSocketAddress(port));
// sleep to wait mock process
Thread.sleep(100);
channel.close();
// submit twice
channel = SocketChannel.open();
channel.connect(new InetSocketAddress(port));
// sleep to wait mock process
Thread.sleep(100);
channel.close();
// stop and join
server.stop();
server.join();
Assert.assertEquals(2, submitFailNum);
}
} | apache-2.0 |
jerrinot/Aeron | aeron-system-tests/src/test/java/uk/co/real_logic/aeron/MultiDriverTest.java | 7145 | /*
* Copyright 2014 - 2015 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.aeron;
import org.junit.After;
import org.junit.Test;
import uk.co.real_logic.aeron.logbuffer.FragmentHandler;
import uk.co.real_logic.aeron.logbuffer.Header;
import uk.co.real_logic.aeron.protocol.DataHeaderFlyweight;
import uk.co.real_logic.aeron.driver.MediaDriver;
import uk.co.real_logic.aeron.driver.ThreadingMode;
import uk.co.real_logic.agrona.IoUtil;
import uk.co.real_logic.agrona.concurrent.UnsafeBuffer;
import java.io.File;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
/**
* Tests requiring multiple embedded drivers
*/
public class MultiDriverTest
{
public static final String MULTICAST_URI = "udp://localhost@224.20.30.39:54326";
private static final int STREAM_ID = 1;
private static final ThreadingMode THREADING_MODE = ThreadingMode.SHARED;
private static final int TERM_BUFFER_SIZE = 64 * 1024;
private static final int NUM_MESSAGES_PER_TERM = 64;
private static final int MESSAGE_LENGTH =
(TERM_BUFFER_SIZE / NUM_MESSAGES_PER_TERM) - DataHeaderFlyweight.HEADER_LENGTH;
private static final String ROOT_DIR =
IoUtil.tmpDirName() + "aeron-system-tests-" + UUID.randomUUID().toString() + File.separator;
private final MediaDriver.Context driverAContext = new MediaDriver.Context();
private final MediaDriver.Context driverBContext = new MediaDriver.Context();
private final Aeron.Context aeronAContext = new Aeron.Context();
private final Aeron.Context aeronBContext = new Aeron.Context();
private Aeron clientA;
private Aeron clientB;
private MediaDriver driverA;
private MediaDriver driverB;
private Publication publication;
private Subscription subscriptionA;
private Subscription subscriptionB;
private UnsafeBuffer buffer = new UnsafeBuffer(new byte[MESSAGE_LENGTH]);
private FragmentHandler fragmentHandlerA = mock(FragmentHandler.class);
private FragmentHandler fragmentHandlerB = mock(FragmentHandler.class);
private void launch()
{
final String baseDirA = ROOT_DIR + "A";
final String baseDirB = ROOT_DIR + "B";
buffer.putInt(0, 1);
driverAContext.termBufferLength(TERM_BUFFER_SIZE);
driverAContext.dirName(baseDirA);
driverAContext.threadingMode(THREADING_MODE);
aeronAContext.dirName(driverAContext.dirName());
driverBContext.termBufferLength(TERM_BUFFER_SIZE);
driverBContext.dirName(baseDirB);
driverBContext.threadingMode(THREADING_MODE);
aeronBContext.dirName(driverBContext.dirName());
driverA = MediaDriver.launch(driverAContext);
driverB = MediaDriver.launch(driverBContext);
clientA = Aeron.connect(aeronAContext);
clientB = Aeron.connect(aeronBContext);
}
@After
public void closeEverything()
{
publication.close();
subscriptionA.close();
subscriptionB.close();
clientB.close();
clientA.close();
driverB.close();
driverA.close();
IoUtil.delete(new File(ROOT_DIR), true);
}
@Test(timeout = 10000)
public void shouldSpinUpAndShutdown() throws Exception
{
launch();
publication = clientA.addPublication(MULTICAST_URI, STREAM_ID);
subscriptionA = clientA.addSubscription(MULTICAST_URI, STREAM_ID);
subscriptionB = clientB.addSubscription(MULTICAST_URI, STREAM_ID);
Thread.sleep(20); // allow for images to be established
}
@Test(timeout = 10000)
public void shouldJoinExistingStreamWithLockStepSendingReceiving() throws Exception
{
final int numMessagesToSendPreJoin = NUM_MESSAGES_PER_TERM / 2;
final int numMessagesToSendPostJoin = NUM_MESSAGES_PER_TERM;
final CountDownLatch newImageLatch = new CountDownLatch(1);
aeronBContext.availableImageHandler((image, subscription, position, info) -> newImageLatch.countDown());
launch();
publication = clientA.addPublication(MULTICAST_URI, STREAM_ID);
subscriptionA = clientA.addSubscription(MULTICAST_URI, STREAM_ID);
for (int i = 0; i < numMessagesToSendPreJoin; i++)
{
while (publication.offer(buffer, 0, buffer.capacity()) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscriptionA.poll(fragmentHandlerA, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
}
subscriptionB = clientB.addSubscription(MULTICAST_URI, STREAM_ID);
// wait until new subscriber gets new image indication
newImageLatch.await();
for (int i = 0; i < numMessagesToSendPostJoin; i++)
{
while (publication.offer(buffer, 0, buffer.capacity()) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscriptionA.poll(fragmentHandlerA, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
fragmentsRead[0] = 0;
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscriptionB.poll(fragmentHandlerB, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
}
verify(fragmentHandlerA, times(numMessagesToSendPreJoin + numMessagesToSendPostJoin)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(MESSAGE_LENGTH),
any(Header.class));
verify(fragmentHandlerB, times(numMessagesToSendPostJoin)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(MESSAGE_LENGTH),
any(Header.class));
}
}
| apache-2.0 |
immutables/immutables | gson/test/org/immutables/gson/packg/sub/C.java | 978 | /*
Copyright 2015 Immutables Authors and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.immutables.gson.packg.sub;
import java.util.List;
import org.immutables.gson.Gson;
import org.immutables.gson.packg.ImmutableA;
import org.immutables.gson.packg.ImmutableB;
import org.immutables.value.Value;
// Yet to be generated ImmutableA or ImmutableB
@Gson.TypeAdapters
@Value.Immutable
interface C {
ImmutableA a();
List<ImmutableB> b();
}
| apache-2.0 |
nunezro2/cassandra_cs597 | test/unit/org/apache/cassandra/db/RemoveColumnFamilyTest.java | 2145 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.db;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import org.junit.Test;
import static junit.framework.Assert.assertNull;
import org.apache.cassandra.db.filter.QueryFilter;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.utils.ByteBufferUtil;
public class RemoveColumnFamilyTest extends SchemaLoader
{
@Test
public void testRemoveColumnFamily() throws IOException, ExecutionException, InterruptedException
{
Table table = Table.open("Keyspace1");
ColumnFamilyStore store = table.getColumnFamilyStore("Standard1");
RowMutation rm;
DecoratedKey dk = Util.dk("key1");
// add data
rm = new RowMutation("Keyspace1", dk.key);
rm.add("Standard1", ByteBufferUtil.bytes("Column1"), ByteBufferUtil.bytes("asdf"), 0);
rm.apply();
// remove
rm = new RowMutation("Keyspace1", dk.key);
rm.delete("Standard1", 1);
rm.apply();
ColumnFamily retrieved = store.getColumnFamily(QueryFilter.getIdentityFilter(dk, "Standard1"));
assert retrieved.isMarkedForDelete();
assertNull(retrieved.getColumn(ByteBufferUtil.bytes("Column1")));
assertNull(Util.cloneAndRemoveDeleted(retrieved, Integer.MAX_VALUE));
}
}
| apache-2.0 |
asedunov/intellij-community | java/debugger/impl/src/com/intellij/debugger/ui/tree/render/OnDemandRenderer.java | 3643 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.ui.tree.render;
import com.intellij.debugger.engine.DebugProcessImpl;
import com.intellij.debugger.engine.FullValueEvaluatorProvider;
import com.intellij.debugger.engine.JavaValue;
import com.intellij.debugger.engine.evaluation.EvaluationContext;
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl;
import com.intellij.debugger.ui.impl.watch.ValueDescriptorImpl;
import com.intellij.debugger.ui.tree.ValueDescriptor;
import com.intellij.debugger.ui.tree.actions.ForceOnDemandRenderersAction;
import com.intellij.openapi.util.Key;
import com.intellij.xdebugger.frame.XFullValueEvaluator;
import com.intellij.xdebugger.frame.XValuePlace;
import com.intellij.xdebugger.impl.XDebugSessionImpl;
import com.intellij.xdebugger.impl.ui.tree.nodes.HeadlessValueEvaluationCallback;
import com.intellij.xdebugger.impl.ui.tree.nodes.XValueNodeImpl;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* @author egor
*/
public interface OnDemandRenderer extends FullValueEvaluatorProvider {
@Nullable
@Override
default XFullValueEvaluator getFullValueEvaluator(EvaluationContextImpl evaluationContext,
ValueDescriptorImpl valueDescriptor) {
if (isOnDemand(evaluationContext, valueDescriptor) && !isCalculated(valueDescriptor)) {
return createFullValueEvaluator(getLinkText());
}
return null;
}
String getLinkText();
default boolean isOnDemand(EvaluationContext evaluationContext, ValueDescriptor valueDescriptor) {
return isOnDemandForced(evaluationContext);
}
default boolean isShowValue(ValueDescriptor valueDescriptor, EvaluationContext evaluationContext) {
return !isOnDemand(evaluationContext, valueDescriptor) || isCalculated(valueDescriptor);
}
static XFullValueEvaluator createFullValueEvaluator(String text) {
return new XFullValueEvaluator(text) {
@Override
public void startEvaluation(@NotNull XFullValueEvaluationCallback callback) {
if (callback instanceof HeadlessValueEvaluationCallback) {
XValueNodeImpl node = ((HeadlessValueEvaluationCallback)callback).getNode();
node.clearFullValueEvaluator();
setCalculated(((JavaValue)node.getValueContainer()).getDescriptor());
node.getValueContainer().computePresentation(node, XValuePlace.TREE);
}
callback.evaluated("");
}
}.setShowValuePopup(false);
}
Key<Boolean> ON_DEMAND_CALCULATED = Key.create("ON_DEMAND_CALCULATED");
static boolean isCalculated(ValueDescriptor descriptor) {
return ON_DEMAND_CALCULATED.get(descriptor, false);
}
static void setCalculated(ValueDescriptor descriptor) {
ON_DEMAND_CALCULATED.set(descriptor, true);
}
static boolean isOnDemandForced(EvaluationContext evaluationContext) {
return ForceOnDemandRenderersAction.isForcedOnDemand(
(XDebugSessionImpl)((DebugProcessImpl)evaluationContext.getDebugProcess()).getXdebugProcess().getSession());
}
}
| apache-2.0 |
xuanaiwu/wechatcms | src/org/sword/wechat4j/oauth/protocol/valid_access_token/ValidAccessTokenResponse.java | 863 | package org.sword.wechat4j.oauth.protocol.valid_access_token;
/**
* 响应:检验授权凭证(access_token)是否有效
* Created by xuwen on 2015-12-11.
*/
public class ValidAccessTokenResponse {
private String errcode;
private String errmsg;
public String getErrcode() {
return errcode;
}
public void setErrcode(String errcode) {
this.errcode = errcode;
}
public String getErrmsg() {
return errmsg;
}
public void setErrmsg(String errmsg) {
this.errmsg = errmsg;
}
public boolean ok(){
return this.errmsg != null && "ok".equals(this.errmsg);
}
@Override
public String toString() {
return "ValidAccessTokenResponse{" +
"errcode='" + errcode + '\'' +
", errmsg='" + errmsg + '\'' +
'}';
}
}
| apache-2.0 |
ilantukh/ignite | modules/core/src/main/java/org/apache/ignite/internal/jdbc/thin/JdbcThinPreparedStatement.java | 18238 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.jdbc.thin;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Calendar;
import org.apache.ignite.internal.processors.odbc.SqlListenerUtils;
import org.apache.ignite.internal.processors.odbc.SqlStateCode;
import org.apache.ignite.internal.processors.odbc.jdbc.JdbcMetaParamsRequest;
import org.apache.ignite.internal.processors.odbc.jdbc.JdbcMetaParamsResult;
import org.apache.ignite.internal.processors.odbc.jdbc.JdbcQuery;
import org.apache.ignite.internal.processors.odbc.jdbc.JdbcStatementType;
/**
* JDBC prepared statement implementation.
*/
public class JdbcThinPreparedStatement extends JdbcThinStatement implements PreparedStatement {
/** SQL query. */
private final String sql;
/** Query arguments. */
protected ArrayList<Object> args;
/** Parameters metadata. */
private JdbcThinParameterMetadata metaData;
/**
* Creates new prepared statement.
*
* @param conn Connection.
* @param sql SQL query.
* @param resHoldability Result set holdability.
* @param schema Schema name.
*/
JdbcThinPreparedStatement(JdbcThinConnection conn, String sql, int resHoldability, String schema) {
super(conn, resHoldability, schema);
this.sql = sql;
}
/** {@inheritDoc} */
@Override public ResultSet executeQuery() throws SQLException {
executeWithArguments(JdbcStatementType.SELECT_STATEMENT_TYPE);
ResultSet rs = getResultSet();
if (rs == null)
throw new SQLException("The query isn't SELECT query: " + sql, SqlStateCode.PARSING_EXCEPTION);
return rs;
}
/** {@inheritDoc} */
@Override public ResultSet executeQuery(String sql) throws SQLException {
throw new SQLException("The method 'executeQuery(String)' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public int executeUpdate() throws SQLException {
executeWithArguments(JdbcStatementType.UPDATE_STMT_TYPE);
int res = getUpdateCount();
if (res == -1)
throw new SQLException("The query is not DML statement: " + sql, SqlStateCode.PARSING_EXCEPTION);
return res;
}
/** {@inheritDoc} */
@Override public int executeUpdate(String sql) throws SQLException {
throw new SQLException("The method 'executeUpdate(String)' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw new SQLException("The method 'executeUpdate(String, int)' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public int executeUpdate(String sql, int columnIndexes[]) throws SQLException {
throw new SQLException("The method 'executeUpdate(String, int[])' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public int executeUpdate(String sql, String columnNames[]) throws SQLException {
throw new SQLException("The method 'executeUpdate(String, String[])' is called on PreparedStatement " +
"instance.", SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public void setNull(int paramIdx, int sqlType) throws SQLException {
setArgument(paramIdx, null);
}
/** {@inheritDoc} */
@Override public void setBoolean(int paramIdx, boolean x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setByte(int paramIdx, byte x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setShort(int paramIdx, short x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setInt(int paramIdx, int x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setLong(int paramIdx, long x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setFloat(int paramIdx, float x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setDouble(int paramIdx, double x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setBigDecimal(int paramIdx, BigDecimal x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setString(int paramIdx, String x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setBytes(int paramIdx, byte[] x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setDate(int paramIdx, Date x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setTime(int paramIdx, Time x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setTimestamp(int paramIdx, Timestamp x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setAsciiStream(int paramIdx, InputStream x, int length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setUnicodeStream(int paramIdx, InputStream x, int length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setBinaryStream(int paramIdx, InputStream x, int length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void clearParameters() throws SQLException {
ensureNotClosed();
args = null;
}
/** {@inheritDoc} */
@Override public void setObject(int paramIdx, Object x, int targetSqlType) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setObject(int paramIdx, Object x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public boolean execute() throws SQLException {
executeWithArguments(JdbcStatementType.ANY_STATEMENT_TYPE);
return resultSets.get(0).isQuery();
}
/**
* Execute query with arguments and nullify them afterwards.
*
* @param stmtType Expected statement type.
* @throws SQLException If failed.
*/
private void executeWithArguments(JdbcStatementType stmtType) throws SQLException {
execute0(stmtType, sql, args);
}
/** {@inheritDoc} */
@Override public boolean execute(String sql) throws SQLException {
throw new SQLException("The method 'execute(String)' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public void addBatch() throws SQLException {
ensureNotClosed();
checkStatementEligibleForBatching(sql);
checkStatementBatchEmpty();
batchSize++;
if (conn.isStream())
conn.addBatch(sql, args);
else {
if (batch == null) {
batch = new ArrayList<>();
batch.add(new JdbcQuery(sql, args.toArray(new Object[args.size()])));
}
else
batch.add(new JdbcQuery(null, args.toArray(new Object[args.size()])));
}
args = null;
}
/** {@inheritDoc} */
@Override public void addBatch(String sql) throws SQLException {
throw new SQLException("The method 'addBatch(String)' is called on PreparedStatement instance.",
SqlStateCode.UNSUPPORTED_OPERATION);
}
/** {@inheritDoc} */
@Override public void setCharacterStream(int paramIdx, Reader x, int length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setRef(int paramIdx, Ref x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setBlob(int paramIdx, Blob x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setClob(int paramIdx, Clob x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setArray(int paramIdx, Array x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public ResultSetMetaData getMetaData() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Meta data for prepared statement is not supported.");
}
/** {@inheritDoc} */
@Override public void setDate(int paramIdx, Date x, Calendar cal) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setTime(int paramIdx, Time x, Calendar cal) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setTimestamp(int paramIdx, Timestamp x, Calendar cal) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setNull(int paramIdx, int sqlType, String typeName) throws SQLException {
if (!JdbcThinUtils.isPlainJdbcType(sqlType)) {
throw new SQLFeatureNotSupportedException("The SQL type is unsupported. [type=" + sqlType + "typeName="
+ typeName + ']');
}
setNull(paramIdx, sqlType);
}
/** {@inheritDoc} */
@Override public void setURL(int paramIdx, URL x) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public ParameterMetaData getParameterMetaData() throws SQLException {
ensureNotClosed();
if (metaData != null)
return metaData;
JdbcMetaParamsResult res = conn.sendRequest(new JdbcMetaParamsRequest(conn.getSchema(), sql)).
response();
metaData = new JdbcThinParameterMetadata(res.meta());
return metaData;
}
/** {@inheritDoc} */
@Override public void setRowId(int paramIdx, RowId x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setNString(int paramIdx, String val) throws SQLException {
ensureNotClosed();
setString(paramIdx, val);
}
/** {@inheritDoc} */
@Override public void setNCharacterStream(int paramIdx, Reader val, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setNClob(int paramIdx, NClob val) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setClob(int paramIdx, Reader reader, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setBlob(int paramIdx, InputStream inputStream, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setNClob(int paramIdx, Reader reader, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setSQLXML(int paramIdx, SQLXML xmlObj) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setObject(int paramIdx, Object x, int targetSqlType, int scaleOrLength) throws SQLException {
setArgument(paramIdx, x);
}
/** {@inheritDoc} */
@Override public void setAsciiStream(int paramIdx, InputStream x, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setBinaryStream(int paramIdx, InputStream x, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setCharacterStream(int paramIdx, Reader x, long length) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setAsciiStream(int paramIdx, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setBinaryStream(int paramIdx, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setCharacterStream(int paramIdx, Reader x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public void setNCharacterStream(int paramIdx, Reader val) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setClob(int paramIdx, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setBlob(int paramIdx, InputStream inputStream) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void setNClob(int paramIdx, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> iface) throws SQLException {
if (!isWrapperFor(iface))
throw new SQLException("Prepared statement is not a wrapper for " + iface.getName());
return (T)this;
}
/** {@inheritDoc} */
@Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface != null && iface.isAssignableFrom(JdbcThinPreparedStatement.class);
}
/**
* Sets query argument value.
*
* @param paramIdx Index.
* @param val Value.
* @throws SQLException If index is invalid.
*/
private void setArgument(int paramIdx, Object val) throws SQLException {
ensureNotClosed();
if (val != null && !SqlListenerUtils.isPlainType(val.getClass()))
throw new SQLException("Parameter type is unsupported. [cls=" + val.getClass() + ']',
SqlStateCode.INVALID_PARAMETER_VALUE);
if (paramIdx < 1)
throw new SQLException("Parameter index is invalid: " + paramIdx);
if (args == null)
args = new ArrayList<>(paramIdx);
while (args.size() < paramIdx)
args.add(null);
args.set(paramIdx - 1, val);
}
}
| apache-2.0 |
marcosemiao/selma | processor/src/test/java/fr/xebia/extras/selma/beans/CityOut.java | 1357 | /*
* Copyright 2013 Xebia and Séven Le Mesle
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fr.xebia.extras.selma.beans;
/**
* Created with IntelliJ IDEA.
* User: slemesle
* Date: 21/11/2013
* Time: 12:12
* To change this template use File | Settings | File Templates.
*/
public class CityOut {
private String name;
private long population;
private boolean capital;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getPopulation() {
return population;
}
public void setPopulation(long population) {
this.population = population;
}
public boolean isCapital() {
return capital;
}
public void setCapital(boolean capital) {
this.capital = capital;
}
}
| apache-2.0 |
janpolowinski/dswarm | controller/src/main/java/org/dswarm/controller/resources/job/FiltersResource.java | 7515 | /**
* Copyright (C) 2013 – 2015 SLUB Dresden & Avantgarde Labs GmbH (<code@dswarm.org>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dswarm.controller.resources.job;
import javax.inject.Inject;
import javax.inject.Provider;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.servlet.RequestScoped;
import com.wordnik.swagger.annotations.Api;
import com.wordnik.swagger.annotations.ApiOperation;
import com.wordnik.swagger.annotations.ApiParam;
import com.wordnik.swagger.annotations.ApiResponse;
import com.wordnik.swagger.annotations.ApiResponses;
import org.dswarm.controller.DMPControllerException;
import org.dswarm.controller.resources.BasicDMPResource;
import org.dswarm.persistence.model.job.Filter;
import org.dswarm.persistence.model.job.proxy.ProxyFilter;
import org.dswarm.persistence.service.job.FilterService;
/**
* A resource (controller service) for {@link Filter}s.
*
* @author tgaengler
* @author fniederlein
*/
@RequestScoped
@Api(value = "/filters", description = "Operations about filters.")
@Path("filters")
public class FiltersResource extends BasicDMPResource<FilterService, ProxyFilter, Filter> {
/**
* Creates a new resource (controller service) for {@link Filter}s with the provider of the filter persistence service, the
* object mapper and metrics registry.
*
* @param persistenceServiceProviderArg
* @param objectMapperProviderArg
*/
@Inject
public FiltersResource(final Provider<FilterService> persistenceServiceProviderArg,
final Provider<ObjectMapper> objectMapperProviderArg) throws DMPControllerException {
super(Filter.class, persistenceServiceProviderArg, objectMapperProviderArg);
}
/**
* This endpoint returns a filter as JSON representation for the provided filter identifier.
*
* @param id a filter identifier
* @return a JSON representation of a filter
*/
@ApiOperation(value = "get the filter that matches the given id", notes = "Returns the Filter object that matches the given id.")
@ApiResponses(value = { @ApiResponse(code = 200, message = "returns the filter (as JSON) that matches the given id"),
@ApiResponse(code = 404, message = "could not find a filter for the given id"),
@ApiResponse(code = 500, message = "internal processing error (see body for details)") })
@GET
@Path("/{id}")
@Produces(MediaType.APPLICATION_JSON)
@Override
public Response getObject(@ApiParam(value = "filter identifier", required = true) @PathParam("id") final String id)
throws DMPControllerException {
return super.getObject(id);
}
/**
* This endpoint consumes a filter as JSON representation and persists this filter in the database.
*
* @param jsonObjectString a JSON representation of one filter
* @return the persisted filter as JSON representation
* @throws DMPControllerException
*/
@ApiOperation(value = "create a new filter", notes = "Returns a new Filter object.", response = Filter.class)
@ApiResponses(value = { @ApiResponse(code = 201, message = "filter was successfully persisted"),
@ApiResponse(code = 500, message = "internal processing error (see body for details)") })
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@Override
public Response createObject(@ApiParam(value = "filter (as JSON)", required = true) final String jsonObjectString) throws DMPControllerException {
return super.createObject(jsonObjectString);
}
/**
* This endpoint returns a list of all filters as JSON representation.
*
* @return a list of all filters as JSON representation
* @throws DMPControllerException
*/
@ApiOperation(value = "get all filters ", notes = "Returns a list of Filter objects.")
@ApiResponses(value = { @ApiResponse(code = 200, message = "returns all available filters (as JSON)"),
@ApiResponse(code = 404, message = "could not find any filter, i.e., there are no filters available"),
@ApiResponse(code = 500, message = "internal processing error (see body for details)") })
@GET
@Produces(MediaType.APPLICATION_JSON)
@Override
public Response getObjects() throws DMPControllerException {
return super.getObjects();
}
/**
* This endpoint consumes a filter as JSON representation and updates this filter in the database.
*
* @param jsonObjectString a JSON representation of one filter
* @param uuid a filter identifier
* @return the updated filter as JSON representation
* @throws DMPControllerException
*/
@Override
@ApiOperation(value = "update filter with given id ", notes = "Returns an updated Filter object.")
@ApiResponses(value = { @ApiResponse(code = 200, message = "filter was successfully updated"),
@ApiResponse(code = 404, message = "could not find a filter for the given id"),
@ApiResponse(code = 500, message = "internal processing error (see body for details)") })
@PUT
@Path("/{id}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response updateObject(@ApiParam(value = "filter (as JSON)", required = true) final String jsonObjectString,
@ApiParam(value = "filter identifier", required = true) @PathParam("id") final String uuid) throws DMPControllerException {
return super.updateObject(jsonObjectString, uuid);
}
/**
* This endpoint deletes a filter that matches the given id.
*
* @param id a filter identifier
* @return status 204 if removal was successful, 404 if id not found, 409 if it couldn't be removed, or 500 if something else
* went wrong
* @throws DMPControllerException
*/
@ApiOperation(value = "delete filter that matches the given id", notes = "Returns status 204 if removal was successful, 404 if id not found, 409 if it couldn't be removed, or 500 if something else went wrong.")
@ApiResponses(value = { @ApiResponse(code = 204, message = "filter was successfully deleted"),
@ApiResponse(code = 404, message = "could not find a filter for the given id"),
@ApiResponse(code = 409, message = "filter couldn't be deleted (maybe there are some existing constraints to related objects)"),
@ApiResponse(code = 500, message = "internal processing error (see body for details)") })
@DELETE
@Path("/{id}")
@Override
public Response deleteObject(@ApiParam(value = "filter identifier", required = true) @PathParam("id") final String id)
throws DMPControllerException {
return super.deleteObject(id);
}
/**
* {@inheritDoc}<br/>
* Updates the name and expression of the filter.
*/
@Override
protected Filter prepareObjectForUpdate(final Filter objectFromJSON, final Filter object) {
super.prepareObjectForUpdate(objectFromJSON, object);
object.setExpression(objectFromJSON.getExpression());
return object;
}
}
| apache-2.0 |
minwoo-jung/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/monitor/collector/jvmgc/DetailedJvmGcMetricCollector.java | 4195 | /*
* Copyright 2018 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.monitor.collector.jvmgc;
import com.navercorp.pinpoint.common.util.Assert;
import com.navercorp.pinpoint.profiler.monitor.collector.AgentStatMetricCollector;
import com.navercorp.pinpoint.profiler.monitor.metric.JvmGcDetailedMetricSnapshot;
import com.navercorp.pinpoint.profiler.monitor.metric.JvmGcMetricSnapshot;
import com.navercorp.pinpoint.profiler.monitor.metric.gc.DetailedGarbageCollectorMetric;
import com.navercorp.pinpoint.profiler.monitor.metric.gc.DetailedGarbageCollectorMetricSnapshot;
import com.navercorp.pinpoint.profiler.monitor.metric.memory.DetailedMemoryMetric;
import com.navercorp.pinpoint.profiler.monitor.metric.memory.DetailedMemoryMetricSnapshot;
/**
* @author dawidmalina
* @author HyunGil Jeong
*/
public class DetailedJvmGcMetricCollector implements AgentStatMetricCollector<JvmGcMetricSnapshot> {
private final BasicJvmGcMetricCollector jvmGcMetricCollector;
private final DetailedMemoryMetric detailedMemoryMetric;
private final DetailedGarbageCollectorMetric detailedGarbageCollectorMetric;
public DetailedJvmGcMetricCollector(
BasicJvmGcMetricCollector jvmGcMetricCollector,
DetailedMemoryMetric detailedMemoryMetric,
DetailedGarbageCollectorMetric detailedGarbageCollectorMetric) {
this.jvmGcMetricCollector = Assert.requireNonNull(jvmGcMetricCollector, "jvmGcMetricCollector");
this.detailedMemoryMetric = Assert.requireNonNull(detailedMemoryMetric, "detailedMemoryMetric");
this.detailedGarbageCollectorMetric = Assert.requireNonNull(detailedGarbageCollectorMetric, "detailedGarbageCollectorMetric");
}
@Override
public JvmGcMetricSnapshot collect() {
final JvmGcMetricSnapshot jvmGcMetricSnapshot = jvmGcMetricCollector.collect();
final DetailedMemoryMetricSnapshot detailedMemoryMetricSnapshot = detailedMemoryMetric.getSnapshot();
final DetailedGarbageCollectorMetricSnapshot detailedGarbageCollectorMetricSnapshot = detailedGarbageCollectorMetric.getSnapshot();
final JvmGcDetailedMetricSnapshot jvmGcDetailedMetricSnapshot = new JvmGcDetailedMetricSnapshot();
jvmGcDetailedMetricSnapshot.setJvmPoolNewGenUsed(detailedMemoryMetricSnapshot.getNewGenUsage());
jvmGcDetailedMetricSnapshot.setJvmPoolOldGenUsed(detailedMemoryMetricSnapshot.getOldGenUsage());
jvmGcDetailedMetricSnapshot.setJvmPoolSurvivorSpaceUsed(detailedMemoryMetricSnapshot.getSurvivorSpaceUsage());
jvmGcDetailedMetricSnapshot.setJvmPoolCodeCacheUsed(detailedMemoryMetricSnapshot.getCodeCacheUsage());
jvmGcDetailedMetricSnapshot.setJvmPoolPermGenUsed(detailedMemoryMetricSnapshot.getPermGenUsage());
jvmGcDetailedMetricSnapshot.setJvmPoolMetaspaceUsed(detailedMemoryMetricSnapshot.getMetaspaceUsage());
jvmGcDetailedMetricSnapshot.setJvmGcNewCount(detailedGarbageCollectorMetricSnapshot.getGcNewCount());
jvmGcDetailedMetricSnapshot.setJvmGcNewTime(detailedGarbageCollectorMetricSnapshot.getGcNewTime());
jvmGcMetricSnapshot.setJvmGcDetailed(jvmGcDetailedMetricSnapshot);
return jvmGcMetricSnapshot;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("DetailedJvmGcMetricCollector{");
sb.append("jvmGcMetricCollector=").append(jvmGcMetricCollector);
sb.append(", detailedMemoryMetric=").append(detailedMemoryMetric);
sb.append(", detailedGarbageCollectorMetric=").append(detailedGarbageCollectorMetric);
sb.append('}');
return sb.toString();
}
}
| apache-2.0 |
codymarcel/phoenix | phoenix-core/src/main/java/org/apache/phoenix/coprocessor/generated/MetaDataProtos.java | 447043 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: MetaDataService.proto
package org.apache.phoenix.coprocessor.generated;
public final class MetaDataProtos {
private MetaDataProtos() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code MutationCode}
*/
public enum MutationCode
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>TABLE_ALREADY_EXISTS = 0;</code>
*/
TABLE_ALREADY_EXISTS(0, 0),
/**
* <code>TABLE_NOT_FOUND = 1;</code>
*/
TABLE_NOT_FOUND(1, 1),
/**
* <code>COLUMN_NOT_FOUND = 2;</code>
*/
COLUMN_NOT_FOUND(2, 2),
/**
* <code>COLUMN_ALREADY_EXISTS = 3;</code>
*/
COLUMN_ALREADY_EXISTS(3, 3),
/**
* <code>CONCURRENT_TABLE_MUTATION = 4;</code>
*/
CONCURRENT_TABLE_MUTATION(4, 4),
/**
* <code>TABLE_NOT_IN_REGION = 5;</code>
*/
TABLE_NOT_IN_REGION(5, 5),
/**
* <code>NEWER_TABLE_FOUND = 6;</code>
*/
NEWER_TABLE_FOUND(6, 6),
/**
* <code>UNALLOWED_TABLE_MUTATION = 7;</code>
*/
UNALLOWED_TABLE_MUTATION(7, 7),
/**
* <code>NO_PK_COLUMNS = 8;</code>
*/
NO_PK_COLUMNS(8, 8),
/**
* <code>PARENT_TABLE_NOT_FOUND = 9;</code>
*/
PARENT_TABLE_NOT_FOUND(9, 9),
/**
* <code>FUNCTION_ALREADY_EXISTS = 10;</code>
*/
FUNCTION_ALREADY_EXISTS(10, 10),
/**
* <code>FUNCTION_NOT_FOUND = 11;</code>
*/
FUNCTION_NOT_FOUND(11, 11),
/**
* <code>NEWER_FUNCTION_FOUND = 12;</code>
*/
NEWER_FUNCTION_FOUND(12, 12),
/**
* <code>FUNCTION_NOT_IN_REGION = 13;</code>
*/
FUNCTION_NOT_IN_REGION(13, 13),
;
/**
* <code>TABLE_ALREADY_EXISTS = 0;</code>
*/
public static final int TABLE_ALREADY_EXISTS_VALUE = 0;
/**
* <code>TABLE_NOT_FOUND = 1;</code>
*/
public static final int TABLE_NOT_FOUND_VALUE = 1;
/**
* <code>COLUMN_NOT_FOUND = 2;</code>
*/
public static final int COLUMN_NOT_FOUND_VALUE = 2;
/**
* <code>COLUMN_ALREADY_EXISTS = 3;</code>
*/
public static final int COLUMN_ALREADY_EXISTS_VALUE = 3;
/**
* <code>CONCURRENT_TABLE_MUTATION = 4;</code>
*/
public static final int CONCURRENT_TABLE_MUTATION_VALUE = 4;
/**
* <code>TABLE_NOT_IN_REGION = 5;</code>
*/
public static final int TABLE_NOT_IN_REGION_VALUE = 5;
/**
* <code>NEWER_TABLE_FOUND = 6;</code>
*/
public static final int NEWER_TABLE_FOUND_VALUE = 6;
/**
* <code>UNALLOWED_TABLE_MUTATION = 7;</code>
*/
public static final int UNALLOWED_TABLE_MUTATION_VALUE = 7;
/**
* <code>NO_PK_COLUMNS = 8;</code>
*/
public static final int NO_PK_COLUMNS_VALUE = 8;
/**
* <code>PARENT_TABLE_NOT_FOUND = 9;</code>
*/
public static final int PARENT_TABLE_NOT_FOUND_VALUE = 9;
/**
* <code>FUNCTION_ALREADY_EXISTS = 10;</code>
*/
public static final int FUNCTION_ALREADY_EXISTS_VALUE = 10;
/**
* <code>FUNCTION_NOT_FOUND = 11;</code>
*/
public static final int FUNCTION_NOT_FOUND_VALUE = 11;
/**
* <code>NEWER_FUNCTION_FOUND = 12;</code>
*/
public static final int NEWER_FUNCTION_FOUND_VALUE = 12;
/**
* <code>FUNCTION_NOT_IN_REGION = 13;</code>
*/
public static final int FUNCTION_NOT_IN_REGION_VALUE = 13;
public final int getNumber() { return value; }
public static MutationCode valueOf(int value) {
switch (value) {
case 0: return TABLE_ALREADY_EXISTS;
case 1: return TABLE_NOT_FOUND;
case 2: return COLUMN_NOT_FOUND;
case 3: return COLUMN_ALREADY_EXISTS;
case 4: return CONCURRENT_TABLE_MUTATION;
case 5: return TABLE_NOT_IN_REGION;
case 6: return NEWER_TABLE_FOUND;
case 7: return UNALLOWED_TABLE_MUTATION;
case 8: return NO_PK_COLUMNS;
case 9: return PARENT_TABLE_NOT_FOUND;
case 10: return FUNCTION_ALREADY_EXISTS;
case 11: return FUNCTION_NOT_FOUND;
case 12: return NEWER_FUNCTION_FOUND;
case 13: return FUNCTION_NOT_IN_REGION;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MutationCode>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<MutationCode>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MutationCode>() {
public MutationCode findValueByNumber(int number) {
return MutationCode.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.getDescriptor().getEnumTypes().get(0);
}
private static final MutationCode[] VALUES = values();
public static MutationCode valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private MutationCode(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:MutationCode)
}
public interface MetaDataResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .MutationCode returnCode = 1;
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
boolean hasReturnCode();
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode getReturnCode();
// optional int64 mutationTime = 2;
/**
* <code>optional int64 mutationTime = 2;</code>
*/
boolean hasMutationTime();
/**
* <code>optional int64 mutationTime = 2;</code>
*/
long getMutationTime();
// optional .PTable table = 3;
/**
* <code>optional .PTable table = 3;</code>
*/
boolean hasTable();
/**
* <code>optional .PTable table = 3;</code>
*/
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable getTable();
/**
* <code>optional .PTable table = 3;</code>
*/
org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder getTableOrBuilder();
// repeated bytes tablesToDelete = 4;
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTablesToDeleteList();
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
int getTablesToDeleteCount();
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
com.google.protobuf.ByteString getTablesToDelete(int index);
// optional bytes columnName = 5;
/**
* <code>optional bytes columnName = 5;</code>
*/
boolean hasColumnName();
/**
* <code>optional bytes columnName = 5;</code>
*/
com.google.protobuf.ByteString getColumnName();
// optional bytes familyName = 6;
/**
* <code>optional bytes familyName = 6;</code>
*/
boolean hasFamilyName();
/**
* <code>optional bytes familyName = 6;</code>
*/
com.google.protobuf.ByteString getFamilyName();
// optional bytes functionName = 7;
/**
* <code>optional bytes functionName = 7;</code>
*/
boolean hasFunctionName();
/**
* <code>optional bytes functionName = 7;</code>
*/
com.google.protobuf.ByteString getFunctionName();
// repeated .PFunction function = 8;
/**
* <code>repeated .PFunction function = 8;</code>
*/
java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction>
getFunctionList();
/**
* <code>repeated .PFunction function = 8;</code>
*/
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction getFunction(int index);
/**
* <code>repeated .PFunction function = 8;</code>
*/
int getFunctionCount();
/**
* <code>repeated .PFunction function = 8;</code>
*/
java.util.List<? extends org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder>
getFunctionOrBuilderList();
/**
* <code>repeated .PFunction function = 8;</code>
*/
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder getFunctionOrBuilder(
int index);
}
/**
* Protobuf type {@code MetaDataResponse}
*/
public static final class MetaDataResponse extends
com.google.protobuf.GeneratedMessage
implements MetaDataResponseOrBuilder {
// Use MetaDataResponse.newBuilder() to construct.
private MetaDataResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MetaDataResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MetaDataResponse defaultInstance;
public static MetaDataResponse getDefaultInstance() {
return defaultInstance;
}
public MetaDataResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MetaDataResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode value = org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
returnCode_ = value;
}
break;
}
case 16: {
bitField0_ |= 0x00000002;
mutationTime_ = input.readInt64();
break;
}
case 26: {
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder subBuilder = null;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
subBuilder = table_.toBuilder();
}
table_ = input.readMessage(org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(table_);
table_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00000004;
break;
}
case 34: {
if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
tablesToDelete_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000008;
}
tablesToDelete_.add(input.readBytes());
break;
}
case 42: {
bitField0_ |= 0x00000008;
columnName_ = input.readBytes();
break;
}
case 50: {
bitField0_ |= 0x00000010;
familyName_ = input.readBytes();
break;
}
case 58: {
bitField0_ |= 0x00000020;
functionName_ = input.readBytes();
break;
}
case 66: {
if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
function_ = new java.util.ArrayList<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction>();
mutable_bitField0_ |= 0x00000080;
}
function_.add(input.readMessage(org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
tablesToDelete_ = java.util.Collections.unmodifiableList(tablesToDelete_);
}
if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) {
function_ = java.util.Collections.unmodifiableList(function_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_MetaDataResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_MetaDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.Builder.class);
}
public static com.google.protobuf.Parser<MetaDataResponse> PARSER =
new com.google.protobuf.AbstractParser<MetaDataResponse>() {
public MetaDataResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MetaDataResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MetaDataResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional .MutationCode returnCode = 1;
public static final int RETURNCODE_FIELD_NUMBER = 1;
private org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode returnCode_;
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public boolean hasReturnCode() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode getReturnCode() {
return returnCode_;
}
// optional int64 mutationTime = 2;
public static final int MUTATIONTIME_FIELD_NUMBER = 2;
private long mutationTime_;
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public boolean hasMutationTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public long getMutationTime() {
return mutationTime_;
}
// optional .PTable table = 3;
public static final int TABLE_FIELD_NUMBER = 3;
private org.apache.phoenix.coprocessor.generated.PTableProtos.PTable table_;
/**
* <code>optional .PTable table = 3;</code>
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .PTable table = 3;</code>
*/
public org.apache.phoenix.coprocessor.generated.PTableProtos.PTable getTable() {
return table_;
}
/**
* <code>optional .PTable table = 3;</code>
*/
public org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder getTableOrBuilder() {
return table_;
}
// repeated bytes tablesToDelete = 4;
public static final int TABLESTODELETE_FIELD_NUMBER = 4;
private java.util.List<com.google.protobuf.ByteString> tablesToDelete_;
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTablesToDeleteList() {
return tablesToDelete_;
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public int getTablesToDeleteCount() {
return tablesToDelete_.size();
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public com.google.protobuf.ByteString getTablesToDelete(int index) {
return tablesToDelete_.get(index);
}
// optional bytes columnName = 5;
public static final int COLUMNNAME_FIELD_NUMBER = 5;
private com.google.protobuf.ByteString columnName_;
/**
* <code>optional bytes columnName = 5;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes columnName = 5;</code>
*/
public com.google.protobuf.ByteString getColumnName() {
return columnName_;
}
// optional bytes familyName = 6;
public static final int FAMILYNAME_FIELD_NUMBER = 6;
private com.google.protobuf.ByteString familyName_;
/**
* <code>optional bytes familyName = 6;</code>
*/
public boolean hasFamilyName() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bytes familyName = 6;</code>
*/
public com.google.protobuf.ByteString getFamilyName() {
return familyName_;
}
// optional bytes functionName = 7;
public static final int FUNCTIONNAME_FIELD_NUMBER = 7;
private com.google.protobuf.ByteString functionName_;
/**
* <code>optional bytes functionName = 7;</code>
*/
public boolean hasFunctionName() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bytes functionName = 7;</code>
*/
public com.google.protobuf.ByteString getFunctionName() {
return functionName_;
}
// repeated .PFunction function = 8;
public static final int FUNCTION_FIELD_NUMBER = 8;
private java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction> function_;
/**
* <code>repeated .PFunction function = 8;</code>
*/
public java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction> getFunctionList() {
return function_;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public java.util.List<? extends org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder>
getFunctionOrBuilderList() {
return function_;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public int getFunctionCount() {
return function_.size();
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction getFunction(int index) {
return function_.get(index);
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder getFunctionOrBuilder(
int index) {
return function_.get(index);
}
private void initFields() {
returnCode_ = org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode.TABLE_ALREADY_EXISTS;
mutationTime_ = 0L;
table_ = org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.getDefaultInstance();
tablesToDelete_ = java.util.Collections.emptyList();
columnName_ = com.google.protobuf.ByteString.EMPTY;
familyName_ = com.google.protobuf.ByteString.EMPTY;
functionName_ = com.google.protobuf.ByteString.EMPTY;
function_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (hasTable()) {
if (!getTable().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
for (int i = 0; i < getFunctionCount(); i++) {
if (!getFunction(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, returnCode_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, mutationTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeMessage(3, table_);
}
for (int i = 0; i < tablesToDelete_.size(); i++) {
output.writeBytes(4, tablesToDelete_.get(i));
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeBytes(5, columnName_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeBytes(6, familyName_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeBytes(7, functionName_);
}
for (int i = 0; i < function_.size(); i++) {
output.writeMessage(8, function_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, returnCode_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(2, mutationTime_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, table_);
}
{
int dataSize = 0;
for (int i = 0; i < tablesToDelete_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tablesToDelete_.get(i));
}
size += dataSize;
size += 1 * getTablesToDeleteList().size();
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(5, columnName_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(6, familyName_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(7, functionName_);
}
for (int i = 0; i < function_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(8, function_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) obj;
boolean result = true;
result = result && (hasReturnCode() == other.hasReturnCode());
if (hasReturnCode()) {
result = result &&
(getReturnCode() == other.getReturnCode());
}
result = result && (hasMutationTime() == other.hasMutationTime());
if (hasMutationTime()) {
result = result && (getMutationTime()
== other.getMutationTime());
}
result = result && (hasTable() == other.hasTable());
if (hasTable()) {
result = result && getTable()
.equals(other.getTable());
}
result = result && getTablesToDeleteList()
.equals(other.getTablesToDeleteList());
result = result && (hasColumnName() == other.hasColumnName());
if (hasColumnName()) {
result = result && getColumnName()
.equals(other.getColumnName());
}
result = result && (hasFamilyName() == other.hasFamilyName());
if (hasFamilyName()) {
result = result && getFamilyName()
.equals(other.getFamilyName());
}
result = result && (hasFunctionName() == other.hasFunctionName());
if (hasFunctionName()) {
result = result && getFunctionName()
.equals(other.getFunctionName());
}
result = result && getFunctionList()
.equals(other.getFunctionList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasReturnCode()) {
hash = (37 * hash) + RETURNCODE_FIELD_NUMBER;
hash = (53 * hash) + hashEnum(getReturnCode());
}
if (hasMutationTime()) {
hash = (37 * hash) + MUTATIONTIME_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getMutationTime());
}
if (hasTable()) {
hash = (37 * hash) + TABLE_FIELD_NUMBER;
hash = (53 * hash) + getTable().hashCode();
}
if (getTablesToDeleteCount() > 0) {
hash = (37 * hash) + TABLESTODELETE_FIELD_NUMBER;
hash = (53 * hash) + getTablesToDeleteList().hashCode();
}
if (hasColumnName()) {
hash = (37 * hash) + COLUMNNAME_FIELD_NUMBER;
hash = (53 * hash) + getColumnName().hashCode();
}
if (hasFamilyName()) {
hash = (37 * hash) + FAMILYNAME_FIELD_NUMBER;
hash = (53 * hash) + getFamilyName().hashCode();
}
if (hasFunctionName()) {
hash = (37 * hash) + FUNCTIONNAME_FIELD_NUMBER;
hash = (53 * hash) + getFunctionName().hashCode();
}
if (getFunctionCount() > 0) {
hash = (37 * hash) + FUNCTION_FIELD_NUMBER;
hash = (53 * hash) + getFunctionList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MetaDataResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_MetaDataResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_MetaDataResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getTableFieldBuilder();
getFunctionFieldBuilder();
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
returnCode_ = org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode.TABLE_ALREADY_EXISTS;
bitField0_ = (bitField0_ & ~0x00000001);
mutationTime_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
if (tableBuilder_ == null) {
table_ = org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.getDefaultInstance();
} else {
tableBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
tablesToDelete_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
columnName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000010);
familyName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000020);
functionName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000040);
if (functionBuilder_ == null) {
function_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
} else {
functionBuilder_.clear();
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_MetaDataResponse_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.returnCode_ = returnCode_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.mutationTime_ = mutationTime_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
if (tableBuilder_ == null) {
result.table_ = table_;
} else {
result.table_ = tableBuilder_.build();
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
tablesToDelete_ = java.util.Collections.unmodifiableList(tablesToDelete_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.tablesToDelete_ = tablesToDelete_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000008;
}
result.columnName_ = columnName_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000010;
}
result.familyName_ = familyName_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000020;
}
result.functionName_ = functionName_;
if (functionBuilder_ == null) {
if (((bitField0_ & 0x00000080) == 0x00000080)) {
function_ = java.util.Collections.unmodifiableList(function_);
bitField0_ = (bitField0_ & ~0x00000080);
}
result.function_ = function_;
} else {
result.function_ = functionBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()) return this;
if (other.hasReturnCode()) {
setReturnCode(other.getReturnCode());
}
if (other.hasMutationTime()) {
setMutationTime(other.getMutationTime());
}
if (other.hasTable()) {
mergeTable(other.getTable());
}
if (!other.tablesToDelete_.isEmpty()) {
if (tablesToDelete_.isEmpty()) {
tablesToDelete_ = other.tablesToDelete_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureTablesToDeleteIsMutable();
tablesToDelete_.addAll(other.tablesToDelete_);
}
onChanged();
}
if (other.hasColumnName()) {
setColumnName(other.getColumnName());
}
if (other.hasFamilyName()) {
setFamilyName(other.getFamilyName());
}
if (other.hasFunctionName()) {
setFunctionName(other.getFunctionName());
}
if (functionBuilder_ == null) {
if (!other.function_.isEmpty()) {
if (function_.isEmpty()) {
function_ = other.function_;
bitField0_ = (bitField0_ & ~0x00000080);
} else {
ensureFunctionIsMutable();
function_.addAll(other.function_);
}
onChanged();
}
} else {
if (!other.function_.isEmpty()) {
if (functionBuilder_.isEmpty()) {
functionBuilder_.dispose();
functionBuilder_ = null;
function_ = other.function_;
bitField0_ = (bitField0_ & ~0x00000080);
functionBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getFunctionFieldBuilder() : null;
} else {
functionBuilder_.addAllMessages(other.function_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (hasTable()) {
if (!getTable().isInitialized()) {
return false;
}
}
for (int i = 0; i < getFunctionCount(); i++) {
if (!getFunction(i).isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional .MutationCode returnCode = 1;
private org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode returnCode_ = org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode.TABLE_ALREADY_EXISTS;
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public boolean hasReturnCode() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode getReturnCode() {
return returnCode_;
}
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public Builder setReturnCode(org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
returnCode_ = value;
onChanged();
return this;
}
/**
* <code>optional .MutationCode returnCode = 1;</code>
*/
public Builder clearReturnCode() {
bitField0_ = (bitField0_ & ~0x00000001);
returnCode_ = org.apache.phoenix.coprocessor.generated.MetaDataProtos.MutationCode.TABLE_ALREADY_EXISTS;
onChanged();
return this;
}
// optional int64 mutationTime = 2;
private long mutationTime_ ;
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public boolean hasMutationTime() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public long getMutationTime() {
return mutationTime_;
}
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public Builder setMutationTime(long value) {
bitField0_ |= 0x00000002;
mutationTime_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 mutationTime = 2;</code>
*/
public Builder clearMutationTime() {
bitField0_ = (bitField0_ & ~0x00000002);
mutationTime_ = 0L;
onChanged();
return this;
}
// optional .PTable table = 3;
private org.apache.phoenix.coprocessor.generated.PTableProtos.PTable table_ = org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable, org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder> tableBuilder_;
/**
* <code>optional .PTable table = 3;</code>
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .PTable table = 3;</code>
*/
public org.apache.phoenix.coprocessor.generated.PTableProtos.PTable getTable() {
if (tableBuilder_ == null) {
return table_;
} else {
return tableBuilder_.getMessage();
}
}
/**
* <code>optional .PTable table = 3;</code>
*/
public Builder setTable(org.apache.phoenix.coprocessor.generated.PTableProtos.PTable value) {
if (tableBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
table_ = value;
onChanged();
} else {
tableBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .PTable table = 3;</code>
*/
public Builder setTable(
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder builderForValue) {
if (tableBuilder_ == null) {
table_ = builderForValue.build();
onChanged();
} else {
tableBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .PTable table = 3;</code>
*/
public Builder mergeTable(org.apache.phoenix.coprocessor.generated.PTableProtos.PTable value) {
if (tableBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004) &&
table_ != org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.getDefaultInstance()) {
table_ =
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.newBuilder(table_).mergeFrom(value).buildPartial();
} else {
table_ = value;
}
onChanged();
} else {
tableBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00000004;
return this;
}
/**
* <code>optional .PTable table = 3;</code>
*/
public Builder clearTable() {
if (tableBuilder_ == null) {
table_ = org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.getDefaultInstance();
onChanged();
} else {
tableBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
/**
* <code>optional .PTable table = 3;</code>
*/
public org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder getTableBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTableFieldBuilder().getBuilder();
}
/**
* <code>optional .PTable table = 3;</code>
*/
public org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder getTableOrBuilder() {
if (tableBuilder_ != null) {
return tableBuilder_.getMessageOrBuilder();
} else {
return table_;
}
}
/**
* <code>optional .PTable table = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable, org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder>
getTableFieldBuilder() {
if (tableBuilder_ == null) {
tableBuilder_ = new com.google.protobuf.SingleFieldBuilder<
org.apache.phoenix.coprocessor.generated.PTableProtos.PTable, org.apache.phoenix.coprocessor.generated.PTableProtos.PTable.Builder, org.apache.phoenix.coprocessor.generated.PTableProtos.PTableOrBuilder>(
table_,
getParentForChildren(),
isClean());
table_ = null;
}
return tableBuilder_;
}
// repeated bytes tablesToDelete = 4;
private java.util.List<com.google.protobuf.ByteString> tablesToDelete_ = java.util.Collections.emptyList();
private void ensureTablesToDeleteIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
tablesToDelete_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tablesToDelete_);
bitField0_ |= 0x00000008;
}
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTablesToDeleteList() {
return java.util.Collections.unmodifiableList(tablesToDelete_);
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public int getTablesToDeleteCount() {
return tablesToDelete_.size();
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public com.google.protobuf.ByteString getTablesToDelete(int index) {
return tablesToDelete_.get(index);
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public Builder setTablesToDelete(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesToDeleteIsMutable();
tablesToDelete_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public Builder addTablesToDelete(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesToDeleteIsMutable();
tablesToDelete_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public Builder addAllTablesToDelete(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTablesToDeleteIsMutable();
super.addAll(values, tablesToDelete_);
onChanged();
return this;
}
/**
* <code>repeated bytes tablesToDelete = 4;</code>
*/
public Builder clearTablesToDelete() {
tablesToDelete_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
// optional bytes columnName = 5;
private com.google.protobuf.ByteString columnName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes columnName = 5;</code>
*/
public boolean hasColumnName() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional bytes columnName = 5;</code>
*/
public com.google.protobuf.ByteString getColumnName() {
return columnName_;
}
/**
* <code>optional bytes columnName = 5;</code>
*/
public Builder setColumnName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000010;
columnName_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes columnName = 5;</code>
*/
public Builder clearColumnName() {
bitField0_ = (bitField0_ & ~0x00000010);
columnName_ = getDefaultInstance().getColumnName();
onChanged();
return this;
}
// optional bytes familyName = 6;
private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes familyName = 6;</code>
*/
public boolean hasFamilyName() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional bytes familyName = 6;</code>
*/
public com.google.protobuf.ByteString getFamilyName() {
return familyName_;
}
/**
* <code>optional bytes familyName = 6;</code>
*/
public Builder setFamilyName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000020;
familyName_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes familyName = 6;</code>
*/
public Builder clearFamilyName() {
bitField0_ = (bitField0_ & ~0x00000020);
familyName_ = getDefaultInstance().getFamilyName();
onChanged();
return this;
}
// optional bytes functionName = 7;
private com.google.protobuf.ByteString functionName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes functionName = 7;</code>
*/
public boolean hasFunctionName() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional bytes functionName = 7;</code>
*/
public com.google.protobuf.ByteString getFunctionName() {
return functionName_;
}
/**
* <code>optional bytes functionName = 7;</code>
*/
public Builder setFunctionName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000040;
functionName_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes functionName = 7;</code>
*/
public Builder clearFunctionName() {
bitField0_ = (bitField0_ & ~0x00000040);
functionName_ = getDefaultInstance().getFunctionName();
onChanged();
return this;
}
// repeated .PFunction function = 8;
private java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction> function_ =
java.util.Collections.emptyList();
private void ensureFunctionIsMutable() {
if (!((bitField0_ & 0x00000080) == 0x00000080)) {
function_ = new java.util.ArrayList<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction>(function_);
bitField0_ |= 0x00000080;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder> functionBuilder_;
/**
* <code>repeated .PFunction function = 8;</code>
*/
public java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction> getFunctionList() {
if (functionBuilder_ == null) {
return java.util.Collections.unmodifiableList(function_);
} else {
return functionBuilder_.getMessageList();
}
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public int getFunctionCount() {
if (functionBuilder_ == null) {
return function_.size();
} else {
return functionBuilder_.getCount();
}
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction getFunction(int index) {
if (functionBuilder_ == null) {
return function_.get(index);
} else {
return functionBuilder_.getMessage(index);
}
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder setFunction(
int index, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction value) {
if (functionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionIsMutable();
function_.set(index, value);
onChanged();
} else {
functionBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder setFunction(
int index, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder builderForValue) {
if (functionBuilder_ == null) {
ensureFunctionIsMutable();
function_.set(index, builderForValue.build());
onChanged();
} else {
functionBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder addFunction(org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction value) {
if (functionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionIsMutable();
function_.add(value);
onChanged();
} else {
functionBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder addFunction(
int index, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction value) {
if (functionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionIsMutable();
function_.add(index, value);
onChanged();
} else {
functionBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder addFunction(
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder builderForValue) {
if (functionBuilder_ == null) {
ensureFunctionIsMutable();
function_.add(builderForValue.build());
onChanged();
} else {
functionBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder addFunction(
int index, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder builderForValue) {
if (functionBuilder_ == null) {
ensureFunctionIsMutable();
function_.add(index, builderForValue.build());
onChanged();
} else {
functionBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder addAllFunction(
java.lang.Iterable<? extends org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction> values) {
if (functionBuilder_ == null) {
ensureFunctionIsMutable();
super.addAll(values, function_);
onChanged();
} else {
functionBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder clearFunction() {
if (functionBuilder_ == null) {
function_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000080);
onChanged();
} else {
functionBuilder_.clear();
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public Builder removeFunction(int index) {
if (functionBuilder_ == null) {
ensureFunctionIsMutable();
function_.remove(index);
onChanged();
} else {
functionBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder getFunctionBuilder(
int index) {
return getFunctionFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder getFunctionOrBuilder(
int index) {
if (functionBuilder_ == null) {
return function_.get(index); } else {
return functionBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public java.util.List<? extends org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder>
getFunctionOrBuilderList() {
if (functionBuilder_ != null) {
return functionBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(function_);
}
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder addFunctionBuilder() {
return getFunctionFieldBuilder().addBuilder(
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.getDefaultInstance());
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder addFunctionBuilder(
int index) {
return getFunctionFieldBuilder().addBuilder(
index, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.getDefaultInstance());
}
/**
* <code>repeated .PFunction function = 8;</code>
*/
public java.util.List<org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder>
getFunctionBuilderList() {
return getFunctionFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder>
getFunctionFieldBuilder() {
if (functionBuilder_ == null) {
functionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunction.Builder, org.apache.phoenix.coprocessor.generated.PFunctionProtos.PFunctionOrBuilder>(
function_,
((bitField0_ & 0x00000080) == 0x00000080),
getParentForChildren(),
isClean());
function_ = null;
}
return functionBuilder_;
}
// @@protoc_insertion_point(builder_scope:MetaDataResponse)
}
static {
defaultInstance = new MetaDataResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MetaDataResponse)
}
public interface GetTableRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes tenantId = 1;
/**
* <code>required bytes tenantId = 1;</code>
*/
boolean hasTenantId();
/**
* <code>required bytes tenantId = 1;</code>
*/
com.google.protobuf.ByteString getTenantId();
// required bytes schemaName = 2;
/**
* <code>required bytes schemaName = 2;</code>
*/
boolean hasSchemaName();
/**
* <code>required bytes schemaName = 2;</code>
*/
com.google.protobuf.ByteString getSchemaName();
// required bytes tableName = 3;
/**
* <code>required bytes tableName = 3;</code>
*/
boolean hasTableName();
/**
* <code>required bytes tableName = 3;</code>
*/
com.google.protobuf.ByteString getTableName();
// required int64 tableTimestamp = 4;
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
boolean hasTableTimestamp();
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
long getTableTimestamp();
// required int64 clientTimestamp = 5;
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
boolean hasClientTimestamp();
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
long getClientTimestamp();
}
/**
* Protobuf type {@code GetTableRequest}
*/
public static final class GetTableRequest extends
com.google.protobuf.GeneratedMessage
implements GetTableRequestOrBuilder {
// Use GetTableRequest.newBuilder() to construct.
private GetTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetTableRequest defaultInstance;
public static GetTableRequest getDefaultInstance() {
return defaultInstance;
}
public GetTableRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetTableRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
tenantId_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
schemaName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
tableName_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000008;
tableTimestamp_ = input.readInt64();
break;
}
case 40: {
bitField0_ |= 0x00000010;
clientTimestamp_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.Builder.class);
}
public static com.google.protobuf.Parser<GetTableRequest> PARSER =
new com.google.protobuf.AbstractParser<GetTableRequest>() {
public GetTableRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetTableRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetTableRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes tenantId = 1;
public static final int TENANTID_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString tenantId_;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
// required bytes schemaName = 2;
public static final int SCHEMANAME_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString schemaName_;
/**
* <code>required bytes schemaName = 2;</code>
*/
public boolean hasSchemaName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public com.google.protobuf.ByteString getSchemaName() {
return schemaName_;
}
// required bytes tableName = 3;
public static final int TABLENAME_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString tableName_;
/**
* <code>required bytes tableName = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes tableName = 3;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
// required int64 tableTimestamp = 4;
public static final int TABLETIMESTAMP_FIELD_NUMBER = 4;
private long tableTimestamp_;
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public boolean hasTableTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public long getTableTimestamp() {
return tableTimestamp_;
}
// required int64 clientTimestamp = 5;
public static final int CLIENTTIMESTAMP_FIELD_NUMBER = 5;
private long clientTimestamp_;
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
private void initFields() {
tenantId_ = com.google.protobuf.ByteString.EMPTY;
schemaName_ = com.google.protobuf.ByteString.EMPTY;
tableName_ = com.google.protobuf.ByteString.EMPTY;
tableTimestamp_ = 0L;
clientTimestamp_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTenantId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasSchemaName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTableTimestamp()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientTimestamp()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, tenantId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, schemaName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, tableName_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, tableTimestamp_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeInt64(5, clientTimestamp_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, tenantId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, schemaName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, tableName_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, tableTimestamp_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(5, clientTimestamp_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest) obj;
boolean result = true;
result = result && (hasTenantId() == other.hasTenantId());
if (hasTenantId()) {
result = result && getTenantId()
.equals(other.getTenantId());
}
result = result && (hasSchemaName() == other.hasSchemaName());
if (hasSchemaName()) {
result = result && getSchemaName()
.equals(other.getSchemaName());
}
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasTableTimestamp() == other.hasTableTimestamp());
if (hasTableTimestamp()) {
result = result && (getTableTimestamp()
== other.getTableTimestamp());
}
result = result && (hasClientTimestamp() == other.hasClientTimestamp());
if (hasClientTimestamp()) {
result = result && (getClientTimestamp()
== other.getClientTimestamp());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTenantId()) {
hash = (37 * hash) + TENANTID_FIELD_NUMBER;
hash = (53 * hash) + getTenantId().hashCode();
}
if (hasSchemaName()) {
hash = (37 * hash) + SCHEMANAME_FIELD_NUMBER;
hash = (53 * hash) + getSchemaName().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasTableTimestamp()) {
hash = (37 * hash) + TABLETIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getTableTimestamp());
}
if (hasClientTimestamp()) {
hash = (37 * hash) + CLIENTTIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getClientTimestamp());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code GetTableRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tenantId_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
schemaName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
tableName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
tableTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
clientTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000010);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetTableRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.tenantId_ = tenantId_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.schemaName_ = schemaName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.tableName_ = tableName_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.tableTimestamp_ = tableTimestamp_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.clientTimestamp_ = clientTimestamp_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.getDefaultInstance()) return this;
if (other.hasTenantId()) {
setTenantId(other.getTenantId());
}
if (other.hasSchemaName()) {
setSchemaName(other.getSchemaName());
}
if (other.hasTableName()) {
setTableName(other.getTableName());
}
if (other.hasTableTimestamp()) {
setTableTimestamp(other.getTableTimestamp());
}
if (other.hasClientTimestamp()) {
setClientTimestamp(other.getClientTimestamp());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTenantId()) {
return false;
}
if (!hasSchemaName()) {
return false;
}
if (!hasTableName()) {
return false;
}
if (!hasTableTimestamp()) {
return false;
}
if (!hasClientTimestamp()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes tenantId = 1;
private com.google.protobuf.ByteString tenantId_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder setTenantId(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tenantId_ = value;
onChanged();
return this;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder clearTenantId() {
bitField0_ = (bitField0_ & ~0x00000001);
tenantId_ = getDefaultInstance().getTenantId();
onChanged();
return this;
}
// required bytes schemaName = 2;
private com.google.protobuf.ByteString schemaName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes schemaName = 2;</code>
*/
public boolean hasSchemaName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public com.google.protobuf.ByteString getSchemaName() {
return schemaName_;
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public Builder setSchemaName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
schemaName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public Builder clearSchemaName() {
bitField0_ = (bitField0_ & ~0x00000002);
schemaName_ = getDefaultInstance().getSchemaName();
onChanged();
return this;
}
// required bytes tableName = 3;
private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes tableName = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes tableName = 3;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
/**
* <code>required bytes tableName = 3;</code>
*/
public Builder setTableName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
tableName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes tableName = 3;</code>
*/
public Builder clearTableName() {
bitField0_ = (bitField0_ & ~0x00000004);
tableName_ = getDefaultInstance().getTableName();
onChanged();
return this;
}
// required int64 tableTimestamp = 4;
private long tableTimestamp_ ;
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public boolean hasTableTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public long getTableTimestamp() {
return tableTimestamp_;
}
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public Builder setTableTimestamp(long value) {
bitField0_ |= 0x00000008;
tableTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>required int64 tableTimestamp = 4;</code>
*/
public Builder clearTableTimestamp() {
bitField0_ = (bitField0_ & ~0x00000008);
tableTimestamp_ = 0L;
onChanged();
return this;
}
// required int64 clientTimestamp = 5;
private long clientTimestamp_ ;
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public Builder setClientTimestamp(long value) {
bitField0_ |= 0x00000010;
clientTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>required int64 clientTimestamp = 5;</code>
*/
public Builder clearClientTimestamp() {
bitField0_ = (bitField0_ & ~0x00000010);
clientTimestamp_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:GetTableRequest)
}
static {
defaultInstance = new GetTableRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetTableRequest)
}
public interface GetFunctionsRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes tenantId = 1;
/**
* <code>required bytes tenantId = 1;</code>
*/
boolean hasTenantId();
/**
* <code>required bytes tenantId = 1;</code>
*/
com.google.protobuf.ByteString getTenantId();
// repeated bytes functionNames = 2;
/**
* <code>repeated bytes functionNames = 2;</code>
*/
java.util.List<com.google.protobuf.ByteString> getFunctionNamesList();
/**
* <code>repeated bytes functionNames = 2;</code>
*/
int getFunctionNamesCount();
/**
* <code>repeated bytes functionNames = 2;</code>
*/
com.google.protobuf.ByteString getFunctionNames(int index);
// repeated int64 functionTimestamps = 3;
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
java.util.List<java.lang.Long> getFunctionTimestampsList();
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
int getFunctionTimestampsCount();
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
long getFunctionTimestamps(int index);
// required int64 clientTimestamp = 4;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
boolean hasClientTimestamp();
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
long getClientTimestamp();
}
/**
* Protobuf type {@code GetFunctionsRequest}
*/
public static final class GetFunctionsRequest extends
com.google.protobuf.GeneratedMessage
implements GetFunctionsRequestOrBuilder {
// Use GetFunctionsRequest.newBuilder() to construct.
private GetFunctionsRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetFunctionsRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetFunctionsRequest defaultInstance;
public static GetFunctionsRequest getDefaultInstance() {
return defaultInstance;
}
public GetFunctionsRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetFunctionsRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
tenantId_ = input.readBytes();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
functionNames_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000002;
}
functionNames_.add(input.readBytes());
break;
}
case 24: {
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
functionTimestamps_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000004;
}
functionTimestamps_.add(input.readInt64());
break;
}
case 26: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00000004) == 0x00000004) && input.getBytesUntilLimit() > 0) {
functionTimestamps_ = new java.util.ArrayList<java.lang.Long>();
mutable_bitField0_ |= 0x00000004;
}
while (input.getBytesUntilLimit() > 0) {
functionTimestamps_.add(input.readInt64());
}
input.popLimit(limit);
break;
}
case 32: {
bitField0_ |= 0x00000002;
clientTimestamp_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
functionNames_ = java.util.Collections.unmodifiableList(functionNames_);
}
if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
functionTimestamps_ = java.util.Collections.unmodifiableList(functionTimestamps_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetFunctionsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetFunctionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.Builder.class);
}
public static com.google.protobuf.Parser<GetFunctionsRequest> PARSER =
new com.google.protobuf.AbstractParser<GetFunctionsRequest>() {
public GetFunctionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetFunctionsRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetFunctionsRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes tenantId = 1;
public static final int TENANTID_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString tenantId_;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
// repeated bytes functionNames = 2;
public static final int FUNCTIONNAMES_FIELD_NUMBER = 2;
private java.util.List<com.google.protobuf.ByteString> functionNames_;
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getFunctionNamesList() {
return functionNames_;
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public int getFunctionNamesCount() {
return functionNames_.size();
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public com.google.protobuf.ByteString getFunctionNames(int index) {
return functionNames_.get(index);
}
// repeated int64 functionTimestamps = 3;
public static final int FUNCTIONTIMESTAMPS_FIELD_NUMBER = 3;
private java.util.List<java.lang.Long> functionTimestamps_;
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public java.util.List<java.lang.Long>
getFunctionTimestampsList() {
return functionTimestamps_;
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public int getFunctionTimestampsCount() {
return functionTimestamps_.size();
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public long getFunctionTimestamps(int index) {
return functionTimestamps_.get(index);
}
// required int64 clientTimestamp = 4;
public static final int CLIENTTIMESTAMP_FIELD_NUMBER = 4;
private long clientTimestamp_;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
private void initFields() {
tenantId_ = com.google.protobuf.ByteString.EMPTY;
functionNames_ = java.util.Collections.emptyList();
functionTimestamps_ = java.util.Collections.emptyList();
clientTimestamp_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTenantId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientTimestamp()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, tenantId_);
}
for (int i = 0; i < functionNames_.size(); i++) {
output.writeBytes(2, functionNames_.get(i));
}
for (int i = 0; i < functionTimestamps_.size(); i++) {
output.writeInt64(3, functionTimestamps_.get(i));
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(4, clientTimestamp_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, tenantId_);
}
{
int dataSize = 0;
for (int i = 0; i < functionNames_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(functionNames_.get(i));
}
size += dataSize;
size += 1 * getFunctionNamesList().size();
}
{
int dataSize = 0;
for (int i = 0; i < functionTimestamps_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeInt64SizeNoTag(functionTimestamps_.get(i));
}
size += dataSize;
size += 1 * getFunctionTimestampsList().size();
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, clientTimestamp_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest) obj;
boolean result = true;
result = result && (hasTenantId() == other.hasTenantId());
if (hasTenantId()) {
result = result && getTenantId()
.equals(other.getTenantId());
}
result = result && getFunctionNamesList()
.equals(other.getFunctionNamesList());
result = result && getFunctionTimestampsList()
.equals(other.getFunctionTimestampsList());
result = result && (hasClientTimestamp() == other.hasClientTimestamp());
if (hasClientTimestamp()) {
result = result && (getClientTimestamp()
== other.getClientTimestamp());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTenantId()) {
hash = (37 * hash) + TENANTID_FIELD_NUMBER;
hash = (53 * hash) + getTenantId().hashCode();
}
if (getFunctionNamesCount() > 0) {
hash = (37 * hash) + FUNCTIONNAMES_FIELD_NUMBER;
hash = (53 * hash) + getFunctionNamesList().hashCode();
}
if (getFunctionTimestampsCount() > 0) {
hash = (37 * hash) + FUNCTIONTIMESTAMPS_FIELD_NUMBER;
hash = (53 * hash) + getFunctionTimestampsList().hashCode();
}
if (hasClientTimestamp()) {
hash = (37 * hash) + CLIENTTIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getClientTimestamp());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code GetFunctionsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetFunctionsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetFunctionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tenantId_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
functionNames_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
functionTimestamps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
clientTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetFunctionsRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.tenantId_ = tenantId_;
if (((bitField0_ & 0x00000002) == 0x00000002)) {
functionNames_ = java.util.Collections.unmodifiableList(functionNames_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.functionNames_ = functionNames_;
if (((bitField0_ & 0x00000004) == 0x00000004)) {
functionTimestamps_ = java.util.Collections.unmodifiableList(functionTimestamps_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.functionTimestamps_ = functionTimestamps_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000002;
}
result.clientTimestamp_ = clientTimestamp_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.getDefaultInstance()) return this;
if (other.hasTenantId()) {
setTenantId(other.getTenantId());
}
if (!other.functionNames_.isEmpty()) {
if (functionNames_.isEmpty()) {
functionNames_ = other.functionNames_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureFunctionNamesIsMutable();
functionNames_.addAll(other.functionNames_);
}
onChanged();
}
if (!other.functionTimestamps_.isEmpty()) {
if (functionTimestamps_.isEmpty()) {
functionTimestamps_ = other.functionTimestamps_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureFunctionTimestampsIsMutable();
functionTimestamps_.addAll(other.functionTimestamps_);
}
onChanged();
}
if (other.hasClientTimestamp()) {
setClientTimestamp(other.getClientTimestamp());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTenantId()) {
return false;
}
if (!hasClientTimestamp()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes tenantId = 1;
private com.google.protobuf.ByteString tenantId_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder setTenantId(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tenantId_ = value;
onChanged();
return this;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder clearTenantId() {
bitField0_ = (bitField0_ & ~0x00000001);
tenantId_ = getDefaultInstance().getTenantId();
onChanged();
return this;
}
// repeated bytes functionNames = 2;
private java.util.List<com.google.protobuf.ByteString> functionNames_ = java.util.Collections.emptyList();
private void ensureFunctionNamesIsMutable() {
if (!((bitField0_ & 0x00000002) == 0x00000002)) {
functionNames_ = new java.util.ArrayList<com.google.protobuf.ByteString>(functionNames_);
bitField0_ |= 0x00000002;
}
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getFunctionNamesList() {
return java.util.Collections.unmodifiableList(functionNames_);
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public int getFunctionNamesCount() {
return functionNames_.size();
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public com.google.protobuf.ByteString getFunctionNames(int index) {
return functionNames_.get(index);
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public Builder setFunctionNames(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionNamesIsMutable();
functionNames_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public Builder addFunctionNames(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureFunctionNamesIsMutable();
functionNames_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public Builder addAllFunctionNames(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureFunctionNamesIsMutable();
super.addAll(values, functionNames_);
onChanged();
return this;
}
/**
* <code>repeated bytes functionNames = 2;</code>
*/
public Builder clearFunctionNames() {
functionNames_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
// repeated int64 functionTimestamps = 3;
private java.util.List<java.lang.Long> functionTimestamps_ = java.util.Collections.emptyList();
private void ensureFunctionTimestampsIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
functionTimestamps_ = new java.util.ArrayList<java.lang.Long>(functionTimestamps_);
bitField0_ |= 0x00000004;
}
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public java.util.List<java.lang.Long>
getFunctionTimestampsList() {
return java.util.Collections.unmodifiableList(functionTimestamps_);
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public int getFunctionTimestampsCount() {
return functionTimestamps_.size();
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public long getFunctionTimestamps(int index) {
return functionTimestamps_.get(index);
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public Builder setFunctionTimestamps(
int index, long value) {
ensureFunctionTimestampsIsMutable();
functionTimestamps_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public Builder addFunctionTimestamps(long value) {
ensureFunctionTimestampsIsMutable();
functionTimestamps_.add(value);
onChanged();
return this;
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public Builder addAllFunctionTimestamps(
java.lang.Iterable<? extends java.lang.Long> values) {
ensureFunctionTimestampsIsMutable();
super.addAll(values, functionTimestamps_);
onChanged();
return this;
}
/**
* <code>repeated int64 functionTimestamps = 3;</code>
*/
public Builder clearFunctionTimestamps() {
functionTimestamps_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
// required int64 clientTimestamp = 4;
private long clientTimestamp_ ;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public Builder setClientTimestamp(long value) {
bitField0_ |= 0x00000008;
clientTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public Builder clearClientTimestamp() {
bitField0_ = (bitField0_ & ~0x00000008);
clientTimestamp_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:GetFunctionsRequest)
}
static {
defaultInstance = new GetFunctionsRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetFunctionsRequest)
}
public interface CreateTableRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
}
/**
* Protobuf type {@code CreateTableRequest}
*
* <pre>
* each byte array represents a MutationProto instance
* </pre>
*/
public static final class CreateTableRequest extends
com.google.protobuf.GeneratedMessage
implements CreateTableRequestOrBuilder {
// Use CreateTableRequest.newBuilder() to construct.
private CreateTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CreateTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CreateTableRequest defaultInstance;
public static CreateTableRequest getDefaultInstance() {
return defaultInstance;
}
public CreateTableRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CreateTableRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.Builder.class);
}
public static com.google.protobuf.Parser<CreateTableRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTableRequest>() {
public CreateTableRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateTableRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CreateTableRequest> getParserForType() {
return PARSER;
}
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code CreateTableRequest}
*
* <pre>
* each byte array represents a MutationProto instance
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateTableRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:CreateTableRequest)
}
static {
defaultInstance = new CreateTableRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:CreateTableRequest)
}
public interface CreateFunctionRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
// required bool temporary = 2;
/**
* <code>required bool temporary = 2;</code>
*/
boolean hasTemporary();
/**
* <code>required bool temporary = 2;</code>
*/
boolean getTemporary();
// optional bool replace = 3;
/**
* <code>optional bool replace = 3;</code>
*/
boolean hasReplace();
/**
* <code>optional bool replace = 3;</code>
*/
boolean getReplace();
}
/**
* Protobuf type {@code CreateFunctionRequest}
*
* <pre>
* each byte array represents a MutationProto instance
* </pre>
*/
public static final class CreateFunctionRequest extends
com.google.protobuf.GeneratedMessage
implements CreateFunctionRequestOrBuilder {
// Use CreateFunctionRequest.newBuilder() to construct.
private CreateFunctionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private CreateFunctionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final CreateFunctionRequest defaultInstance;
public static CreateFunctionRequest getDefaultInstance() {
return defaultInstance;
}
public CreateFunctionRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private CreateFunctionRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
case 16: {
bitField0_ |= 0x00000001;
temporary_ = input.readBool();
break;
}
case 24: {
bitField0_ |= 0x00000002;
replace_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateFunctionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.Builder.class);
}
public static com.google.protobuf.Parser<CreateFunctionRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateFunctionRequest>() {
public CreateFunctionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateFunctionRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<CreateFunctionRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
// required bool temporary = 2;
public static final int TEMPORARY_FIELD_NUMBER = 2;
private boolean temporary_;
/**
* <code>required bool temporary = 2;</code>
*/
public boolean hasTemporary() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool temporary = 2;</code>
*/
public boolean getTemporary() {
return temporary_;
}
// optional bool replace = 3;
public static final int REPLACE_FIELD_NUMBER = 3;
private boolean replace_;
/**
* <code>optional bool replace = 3;</code>
*/
public boolean hasReplace() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool replace = 3;</code>
*/
public boolean getReplace() {
return replace_;
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
temporary_ = false;
replace_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTemporary()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(2, temporary_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, replace_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, temporary_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, replace_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result && (hasTemporary() == other.hasTemporary());
if (hasTemporary()) {
result = result && (getTemporary()
== other.getTemporary());
}
result = result && (hasReplace() == other.hasReplace());
if (hasReplace()) {
result = result && (getReplace()
== other.getReplace());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
if (hasTemporary()) {
hash = (37 * hash) + TEMPORARY_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getTemporary());
}
if (hasReplace()) {
hash = (37 * hash) + REPLACE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getReplace());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code CreateFunctionRequest}
*
* <pre>
* each byte array represents a MutationProto instance
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateFunctionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
temporary_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
replace_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_CreateFunctionRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.temporary_ = temporary_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.replace_ = replace_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
if (other.hasTemporary()) {
setTemporary(other.getTemporary());
}
if (other.hasReplace()) {
setReplace(other.getReplace());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTemporary()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// required bool temporary = 2;
private boolean temporary_ ;
/**
* <code>required bool temporary = 2;</code>
*/
public boolean hasTemporary() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bool temporary = 2;</code>
*/
public boolean getTemporary() {
return temporary_;
}
/**
* <code>required bool temporary = 2;</code>
*/
public Builder setTemporary(boolean value) {
bitField0_ |= 0x00000002;
temporary_ = value;
onChanged();
return this;
}
/**
* <code>required bool temporary = 2;</code>
*/
public Builder clearTemporary() {
bitField0_ = (bitField0_ & ~0x00000002);
temporary_ = false;
onChanged();
return this;
}
// optional bool replace = 3;
private boolean replace_ ;
/**
* <code>optional bool replace = 3;</code>
*/
public boolean hasReplace() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool replace = 3;</code>
*/
public boolean getReplace() {
return replace_;
}
/**
* <code>optional bool replace = 3;</code>
*/
public Builder setReplace(boolean value) {
bitField0_ |= 0x00000004;
replace_ = value;
onChanged();
return this;
}
/**
* <code>optional bool replace = 3;</code>
*/
public Builder clearReplace() {
bitField0_ = (bitField0_ & ~0x00000004);
replace_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:CreateFunctionRequest)
}
static {
defaultInstance = new CreateFunctionRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:CreateFunctionRequest)
}
public interface DropTableRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
// required string tableType = 2;
/**
* <code>required string tableType = 2;</code>
*/
boolean hasTableType();
/**
* <code>required string tableType = 2;</code>
*/
java.lang.String getTableType();
/**
* <code>required string tableType = 2;</code>
*/
com.google.protobuf.ByteString
getTableTypeBytes();
// optional bool cascade = 3;
/**
* <code>optional bool cascade = 3;</code>
*/
boolean hasCascade();
/**
* <code>optional bool cascade = 3;</code>
*/
boolean getCascade();
}
/**
* Protobuf type {@code DropTableRequest}
*/
public static final class DropTableRequest extends
com.google.protobuf.GeneratedMessage
implements DropTableRequestOrBuilder {
// Use DropTableRequest.newBuilder() to construct.
private DropTableRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DropTableRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DropTableRequest defaultInstance;
public static DropTableRequest getDefaultInstance() {
return defaultInstance;
}
public DropTableRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DropTableRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
case 18: {
bitField0_ |= 0x00000001;
tableType_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000002;
cascade_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.Builder.class);
}
public static com.google.protobuf.Parser<DropTableRequest> PARSER =
new com.google.protobuf.AbstractParser<DropTableRequest>() {
public DropTableRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DropTableRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DropTableRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
// required string tableType = 2;
public static final int TABLETYPE_FIELD_NUMBER = 2;
private java.lang.Object tableType_;
/**
* <code>required string tableType = 2;</code>
*/
public boolean hasTableType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string tableType = 2;</code>
*/
public java.lang.String getTableType() {
java.lang.Object ref = tableType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
tableType_ = s;
}
return s;
}
}
/**
* <code>required string tableType = 2;</code>
*/
public com.google.protobuf.ByteString
getTableTypeBytes() {
java.lang.Object ref = tableType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
// optional bool cascade = 3;
public static final int CASCADE_FIELD_NUMBER = 3;
private boolean cascade_;
/**
* <code>optional bool cascade = 3;</code>
*/
public boolean hasCascade() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool cascade = 3;</code>
*/
public boolean getCascade() {
return cascade_;
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
tableType_ = "";
cascade_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTableType()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(2, getTableTypeBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBool(3, cascade_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getTableTypeBytes());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(3, cascade_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result && (hasTableType() == other.hasTableType());
if (hasTableType()) {
result = result && getTableType()
.equals(other.getTableType());
}
result = result && (hasCascade() == other.hasCascade());
if (hasCascade()) {
result = result && (getCascade()
== other.getCascade());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
if (hasTableType()) {
hash = (37 * hash) + TABLETYPE_FIELD_NUMBER;
hash = (53 * hash) + getTableType().hashCode();
}
if (hasCascade()) {
hash = (37 * hash) + CASCADE_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getCascade());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code DropTableRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropTableRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropTableRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
tableType_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
cascade_ = false;
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropTableRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.tableType_ = tableType_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000002;
}
result.cascade_ = cascade_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
if (other.hasTableType()) {
bitField0_ |= 0x00000002;
tableType_ = other.tableType_;
onChanged();
}
if (other.hasCascade()) {
setCascade(other.getCascade());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTableType()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// required string tableType = 2;
private java.lang.Object tableType_ = "";
/**
* <code>required string tableType = 2;</code>
*/
public boolean hasTableType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string tableType = 2;</code>
*/
public java.lang.String getTableType() {
java.lang.Object ref = tableType_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
tableType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string tableType = 2;</code>
*/
public com.google.protobuf.ByteString
getTableTypeBytes() {
java.lang.Object ref = tableType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
tableType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string tableType = 2;</code>
*/
public Builder setTableType(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
tableType_ = value;
onChanged();
return this;
}
/**
* <code>required string tableType = 2;</code>
*/
public Builder clearTableType() {
bitField0_ = (bitField0_ & ~0x00000002);
tableType_ = getDefaultInstance().getTableType();
onChanged();
return this;
}
/**
* <code>required string tableType = 2;</code>
*/
public Builder setTableTypeBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
tableType_ = value;
onChanged();
return this;
}
// optional bool cascade = 3;
private boolean cascade_ ;
/**
* <code>optional bool cascade = 3;</code>
*/
public boolean hasCascade() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional bool cascade = 3;</code>
*/
public boolean getCascade() {
return cascade_;
}
/**
* <code>optional bool cascade = 3;</code>
*/
public Builder setCascade(boolean value) {
bitField0_ |= 0x00000004;
cascade_ = value;
onChanged();
return this;
}
/**
* <code>optional bool cascade = 3;</code>
*/
public Builder clearCascade() {
bitField0_ = (bitField0_ & ~0x00000004);
cascade_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:DropTableRequest)
}
static {
defaultInstance = new DropTableRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:DropTableRequest)
}
public interface AddColumnRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
}
/**
* Protobuf type {@code AddColumnRequest}
*/
public static final class AddColumnRequest extends
com.google.protobuf.GeneratedMessage
implements AddColumnRequestOrBuilder {
// Use AddColumnRequest.newBuilder() to construct.
private AddColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private AddColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final AddColumnRequest defaultInstance;
public static AddColumnRequest getDefaultInstance() {
return defaultInstance;
}
public AddColumnRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private AddColumnRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_AddColumnRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_AddColumnRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.Builder.class);
}
public static com.google.protobuf.Parser<AddColumnRequest> PARSER =
new com.google.protobuf.AbstractParser<AddColumnRequest>() {
public AddColumnRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AddColumnRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<AddColumnRequest> getParserForType() {
return PARSER;
}
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code AddColumnRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_AddColumnRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_AddColumnRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_AddColumnRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:AddColumnRequest)
}
static {
defaultInstance = new AddColumnRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:AddColumnRequest)
}
public interface DropColumnRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
}
/**
* Protobuf type {@code DropColumnRequest}
*/
public static final class DropColumnRequest extends
com.google.protobuf.GeneratedMessage
implements DropColumnRequestOrBuilder {
// Use DropColumnRequest.newBuilder() to construct.
private DropColumnRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DropColumnRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DropColumnRequest defaultInstance;
public static DropColumnRequest getDefaultInstance() {
return defaultInstance;
}
public DropColumnRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DropColumnRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropColumnRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropColumnRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.Builder.class);
}
public static com.google.protobuf.Parser<DropColumnRequest> PARSER =
new com.google.protobuf.AbstractParser<DropColumnRequest>() {
public DropColumnRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DropColumnRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DropColumnRequest> getParserForType() {
return PARSER;
}
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code DropColumnRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropColumnRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropColumnRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropColumnRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:DropColumnRequest)
}
static {
defaultInstance = new DropColumnRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:DropColumnRequest)
}
public interface DropFunctionRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
// optional bool ifExists = 2;
/**
* <code>optional bool ifExists = 2;</code>
*/
boolean hasIfExists();
/**
* <code>optional bool ifExists = 2;</code>
*/
boolean getIfExists();
}
/**
* Protobuf type {@code DropFunctionRequest}
*/
public static final class DropFunctionRequest extends
com.google.protobuf.GeneratedMessage
implements DropFunctionRequestOrBuilder {
// Use DropFunctionRequest.newBuilder() to construct.
private DropFunctionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private DropFunctionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final DropFunctionRequest defaultInstance;
public static DropFunctionRequest getDefaultInstance() {
return defaultInstance;
}
public DropFunctionRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private DropFunctionRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
case 16: {
bitField0_ |= 0x00000001;
ifExists_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropFunctionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.Builder.class);
}
public static com.google.protobuf.Parser<DropFunctionRequest> PARSER =
new com.google.protobuf.AbstractParser<DropFunctionRequest>() {
public DropFunctionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DropFunctionRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<DropFunctionRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
// optional bool ifExists = 2;
public static final int IFEXISTS_FIELD_NUMBER = 2;
private boolean ifExists_;
/**
* <code>optional bool ifExists = 2;</code>
*/
public boolean hasIfExists() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool ifExists = 2;</code>
*/
public boolean getIfExists() {
return ifExists_;
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
ifExists_ = false;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBool(2, ifExists_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBoolSize(2, ifExists_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result && (hasIfExists() == other.hasIfExists());
if (hasIfExists()) {
result = result && (getIfExists()
== other.getIfExists());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
if (hasIfExists()) {
hash = (37 * hash) + IFEXISTS_FIELD_NUMBER;
hash = (53 * hash) + hashBoolean(getIfExists());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code DropFunctionRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropFunctionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
ifExists_ = false;
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_DropFunctionRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000001;
}
result.ifExists_ = ifExists_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
if (other.hasIfExists()) {
setIfExists(other.getIfExists());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// optional bool ifExists = 2;
private boolean ifExists_ ;
/**
* <code>optional bool ifExists = 2;</code>
*/
public boolean hasIfExists() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bool ifExists = 2;</code>
*/
public boolean getIfExists() {
return ifExists_;
}
/**
* <code>optional bool ifExists = 2;</code>
*/
public Builder setIfExists(boolean value) {
bitField0_ |= 0x00000002;
ifExists_ = value;
onChanged();
return this;
}
/**
* <code>optional bool ifExists = 2;</code>
*/
public Builder clearIfExists() {
bitField0_ = (bitField0_ & ~0x00000002);
ifExists_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:DropFunctionRequest)
}
static {
defaultInstance = new DropFunctionRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:DropFunctionRequest)
}
public interface UpdateIndexStateRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated bytes tableMetadataMutations = 1;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
java.util.List<com.google.protobuf.ByteString> getTableMetadataMutationsList();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
int getTableMetadataMutationsCount();
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
com.google.protobuf.ByteString getTableMetadataMutations(int index);
}
/**
* Protobuf type {@code UpdateIndexStateRequest}
*/
public static final class UpdateIndexStateRequest extends
com.google.protobuf.GeneratedMessage
implements UpdateIndexStateRequestOrBuilder {
// Use UpdateIndexStateRequest.newBuilder() to construct.
private UpdateIndexStateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private UpdateIndexStateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final UpdateIndexStateRequest defaultInstance;
public static UpdateIndexStateRequest getDefaultInstance() {
return defaultInstance;
}
public UpdateIndexStateRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UpdateIndexStateRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>();
mutable_bitField0_ |= 0x00000001;
}
tableMetadataMutations_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_UpdateIndexStateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_UpdateIndexStateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.Builder.class);
}
public static com.google.protobuf.Parser<UpdateIndexStateRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateIndexStateRequest>() {
public UpdateIndexStateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateIndexStateRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<UpdateIndexStateRequest> getParserForType() {
return PARSER;
}
// repeated bytes tableMetadataMutations = 1;
public static final int TABLEMETADATAMUTATIONS_FIELD_NUMBER = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_;
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return tableMetadataMutations_;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
private void initFields() {
tableMetadataMutations_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
output.writeBytes(1, tableMetadataMutations_.get(i));
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < tableMetadataMutations_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream
.computeBytesSizeNoTag(tableMetadataMutations_.get(i));
}
size += dataSize;
size += 1 * getTableMetadataMutationsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest) obj;
boolean result = true;
result = result && getTableMetadataMutationsList()
.equals(other.getTableMetadataMutationsList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getTableMetadataMutationsCount() > 0) {
hash = (37 * hash) + TABLEMETADATAMUTATIONS_FIELD_NUMBER;
hash = (53 * hash) + getTableMetadataMutationsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code UpdateIndexStateRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_UpdateIndexStateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_UpdateIndexStateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_UpdateIndexStateRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest(this);
int from_bitField0_ = bitField0_;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = java.util.Collections.unmodifiableList(tableMetadataMutations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableMetadataMutations_ = tableMetadataMutations_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.getDefaultInstance()) return this;
if (!other.tableMetadataMutations_.isEmpty()) {
if (tableMetadataMutations_.isEmpty()) {
tableMetadataMutations_ = other.tableMetadataMutations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.addAll(other.tableMetadataMutations_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated bytes tableMetadataMutations = 1;
private java.util.List<com.google.protobuf.ByteString> tableMetadataMutations_ = java.util.Collections.emptyList();
private void ensureTableMetadataMutationsIsMutable() {
if (!((bitField0_ & 0x00000001) == 0x00000001)) {
tableMetadataMutations_ = new java.util.ArrayList<com.google.protobuf.ByteString>(tableMetadataMutations_);
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public java.util.List<com.google.protobuf.ByteString>
getTableMetadataMutationsList() {
return java.util.Collections.unmodifiableList(tableMetadataMutations_);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public int getTableMetadataMutationsCount() {
return tableMetadataMutations_.size();
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public com.google.protobuf.ByteString getTableMetadataMutations(int index) {
return tableMetadataMutations_.get(index);
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder setTableMetadataMutations(
int index, com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.set(index, value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addTableMetadataMutations(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureTableMetadataMutationsIsMutable();
tableMetadataMutations_.add(value);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder addAllTableMetadataMutations(
java.lang.Iterable<? extends com.google.protobuf.ByteString> values) {
ensureTableMetadataMutationsIsMutable();
super.addAll(values, tableMetadataMutations_);
onChanged();
return this;
}
/**
* <code>repeated bytes tableMetadataMutations = 1;</code>
*/
public Builder clearTableMetadataMutations() {
tableMetadataMutations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:UpdateIndexStateRequest)
}
static {
defaultInstance = new UpdateIndexStateRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:UpdateIndexStateRequest)
}
public interface ClearCacheRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code ClearCacheRequest}
*/
public static final class ClearCacheRequest extends
com.google.protobuf.GeneratedMessage
implements ClearCacheRequestOrBuilder {
// Use ClearCacheRequest.newBuilder() to construct.
private ClearCacheRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClearCacheRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClearCacheRequest defaultInstance;
public static ClearCacheRequest getDefaultInstance() {
return defaultInstance;
}
public ClearCacheRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClearCacheRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.Builder.class);
}
public static com.google.protobuf.Parser<ClearCacheRequest> PARSER =
new com.google.protobuf.AbstractParser<ClearCacheRequest>() {
public ClearCacheRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClearCacheRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClearCacheRequest> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClearCacheRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:ClearCacheRequest)
}
static {
defaultInstance = new ClearCacheRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClearCacheRequest)
}
public interface ClearCacheResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code ClearCacheResponse}
*/
public static final class ClearCacheResponse extends
com.google.protobuf.GeneratedMessage
implements ClearCacheResponseOrBuilder {
// Use ClearCacheResponse.newBuilder() to construct.
private ClearCacheResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClearCacheResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClearCacheResponse defaultInstance;
public static ClearCacheResponse getDefaultInstance() {
return defaultInstance;
}
public ClearCacheResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClearCacheResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.Builder.class);
}
public static com.google.protobuf.Parser<ClearCacheResponse> PARSER =
new com.google.protobuf.AbstractParser<ClearCacheResponse>() {
public ClearCacheResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClearCacheResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClearCacheResponse> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClearCacheResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearCacheResponse_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:ClearCacheResponse)
}
static {
defaultInstance = new ClearCacheResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClearCacheResponse)
}
public interface GetVersionRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code GetVersionRequest}
*/
public static final class GetVersionRequest extends
com.google.protobuf.GeneratedMessage
implements GetVersionRequestOrBuilder {
// Use GetVersionRequest.newBuilder() to construct.
private GetVersionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetVersionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetVersionRequest defaultInstance;
public static GetVersionRequest getDefaultInstance() {
return defaultInstance;
}
public GetVersionRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetVersionRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.Builder.class);
}
public static com.google.protobuf.Parser<GetVersionRequest> PARSER =
new com.google.protobuf.AbstractParser<GetVersionRequest>() {
public GetVersionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetVersionRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetVersionRequest> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code GetVersionRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:GetVersionRequest)
}
static {
defaultInstance = new GetVersionRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetVersionRequest)
}
public interface GetVersionResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required int64 version = 1;
/**
* <code>required int64 version = 1;</code>
*/
boolean hasVersion();
/**
* <code>required int64 version = 1;</code>
*/
long getVersion();
}
/**
* Protobuf type {@code GetVersionResponse}
*/
public static final class GetVersionResponse extends
com.google.protobuf.GeneratedMessage
implements GetVersionResponseOrBuilder {
// Use GetVersionResponse.newBuilder() to construct.
private GetVersionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private GetVersionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final GetVersionResponse defaultInstance;
public static GetVersionResponse getDefaultInstance() {
return defaultInstance;
}
public GetVersionResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private GetVersionResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
version_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.Builder.class);
}
public static com.google.protobuf.Parser<GetVersionResponse> PARSER =
new com.google.protobuf.AbstractParser<GetVersionResponse>() {
public GetVersionResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new GetVersionResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<GetVersionResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int64 version = 1;
public static final int VERSION_FIELD_NUMBER = 1;
private long version_;
/**
* <code>required int64 version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 version = 1;</code>
*/
public long getVersion() {
return version_;
}
private void initFields() {
version_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasVersion()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt64(1, version_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, version_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse) obj;
boolean result = true;
result = result && (hasVersion() == other.hasVersion());
if (hasVersion()) {
result = result && (getVersion()
== other.getVersion());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getVersion());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code GetVersionResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
version_ = 0L;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_GetVersionResponse_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.version_ = version_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasVersion()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required int64 version = 1;
private long version_ ;
/**
* <code>required int64 version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 version = 1;</code>
*/
public long getVersion() {
return version_;
}
/**
* <code>required int64 version = 1;</code>
*/
public Builder setVersion(long value) {
bitField0_ |= 0x00000001;
version_ = value;
onChanged();
return this;
}
/**
* <code>required int64 version = 1;</code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:GetVersionResponse)
}
static {
defaultInstance = new GetVersionResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetVersionResponse)
}
public interface ClearTableFromCacheRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required bytes tenantId = 1;
/**
* <code>required bytes tenantId = 1;</code>
*/
boolean hasTenantId();
/**
* <code>required bytes tenantId = 1;</code>
*/
com.google.protobuf.ByteString getTenantId();
// required bytes schemaName = 2;
/**
* <code>required bytes schemaName = 2;</code>
*/
boolean hasSchemaName();
/**
* <code>required bytes schemaName = 2;</code>
*/
com.google.protobuf.ByteString getSchemaName();
// required bytes tableName = 3;
/**
* <code>required bytes tableName = 3;</code>
*/
boolean hasTableName();
/**
* <code>required bytes tableName = 3;</code>
*/
com.google.protobuf.ByteString getTableName();
// required int64 clientTimestamp = 4;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
boolean hasClientTimestamp();
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
long getClientTimestamp();
}
/**
* Protobuf type {@code ClearTableFromCacheRequest}
*/
public static final class ClearTableFromCacheRequest extends
com.google.protobuf.GeneratedMessage
implements ClearTableFromCacheRequestOrBuilder {
// Use ClearTableFromCacheRequest.newBuilder() to construct.
private ClearTableFromCacheRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClearTableFromCacheRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClearTableFromCacheRequest defaultInstance;
public static ClearTableFromCacheRequest getDefaultInstance() {
return defaultInstance;
}
public ClearTableFromCacheRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClearTableFromCacheRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
tenantId_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
schemaName_ = input.readBytes();
break;
}
case 26: {
bitField0_ |= 0x00000004;
tableName_ = input.readBytes();
break;
}
case 32: {
bitField0_ |= 0x00000008;
clientTimestamp_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.Builder.class);
}
public static com.google.protobuf.Parser<ClearTableFromCacheRequest> PARSER =
new com.google.protobuf.AbstractParser<ClearTableFromCacheRequest>() {
public ClearTableFromCacheRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClearTableFromCacheRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClearTableFromCacheRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes tenantId = 1;
public static final int TENANTID_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString tenantId_;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
// required bytes schemaName = 2;
public static final int SCHEMANAME_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString schemaName_;
/**
* <code>required bytes schemaName = 2;</code>
*/
public boolean hasSchemaName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public com.google.protobuf.ByteString getSchemaName() {
return schemaName_;
}
// required bytes tableName = 3;
public static final int TABLENAME_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString tableName_;
/**
* <code>required bytes tableName = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes tableName = 3;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
// required int64 clientTimestamp = 4;
public static final int CLIENTTIMESTAMP_FIELD_NUMBER = 4;
private long clientTimestamp_;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
private void initFields() {
tenantId_ = com.google.protobuf.ByteString.EMPTY;
schemaName_ = com.google.protobuf.ByteString.EMPTY;
tableName_ = com.google.protobuf.ByteString.EMPTY;
clientTimestamp_ = 0L;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasTenantId()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasSchemaName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasTableName()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasClientTimestamp()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeBytes(1, tenantId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, schemaName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, tableName_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeInt64(4, clientTimestamp_);
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(1, tenantId_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, schemaName_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, tableName_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(4, clientTimestamp_);
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest) obj;
boolean result = true;
result = result && (hasTenantId() == other.hasTenantId());
if (hasTenantId()) {
result = result && getTenantId()
.equals(other.getTenantId());
}
result = result && (hasSchemaName() == other.hasSchemaName());
if (hasSchemaName()) {
result = result && getSchemaName()
.equals(other.getSchemaName());
}
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
result = result && getTableName()
.equals(other.getTableName());
}
result = result && (hasClientTimestamp() == other.hasClientTimestamp());
if (hasClientTimestamp()) {
result = result && (getClientTimestamp()
== other.getClientTimestamp());
}
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTenantId()) {
hash = (37 * hash) + TENANTID_FIELD_NUMBER;
hash = (53 * hash) + getTenantId().hashCode();
}
if (hasSchemaName()) {
hash = (37 * hash) + SCHEMANAME_FIELD_NUMBER;
hash = (53 * hash) + getSchemaName().hashCode();
}
if (hasTableName()) {
hash = (37 * hash) + TABLENAME_FIELD_NUMBER;
hash = (53 * hash) + getTableName().hashCode();
}
if (hasClientTimestamp()) {
hash = (37 * hash) + CLIENTTIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + hashLong(getClientTimestamp());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClearTableFromCacheRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
tenantId_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
schemaName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000002);
tableName_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00000004);
clientTimestamp_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheRequest_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.tenantId_ = tenantId_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.schemaName_ = schemaName_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.tableName_ = tableName_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.clientTimestamp_ = clientTimestamp_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.getDefaultInstance()) return this;
if (other.hasTenantId()) {
setTenantId(other.getTenantId());
}
if (other.hasSchemaName()) {
setSchemaName(other.getSchemaName());
}
if (other.hasTableName()) {
setTableName(other.getTableName());
}
if (other.hasClientTimestamp()) {
setClientTimestamp(other.getClientTimestamp());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasTenantId()) {
return false;
}
if (!hasSchemaName()) {
return false;
}
if (!hasTableName()) {
return false;
}
if (!hasClientTimestamp()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required bytes tenantId = 1;
private com.google.protobuf.ByteString tenantId_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes tenantId = 1;</code>
*/
public boolean hasTenantId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public com.google.protobuf.ByteString getTenantId() {
return tenantId_;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder setTenantId(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
tenantId_ = value;
onChanged();
return this;
}
/**
* <code>required bytes tenantId = 1;</code>
*/
public Builder clearTenantId() {
bitField0_ = (bitField0_ & ~0x00000001);
tenantId_ = getDefaultInstance().getTenantId();
onChanged();
return this;
}
// required bytes schemaName = 2;
private com.google.protobuf.ByteString schemaName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes schemaName = 2;</code>
*/
public boolean hasSchemaName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public com.google.protobuf.ByteString getSchemaName() {
return schemaName_;
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public Builder setSchemaName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
schemaName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes schemaName = 2;</code>
*/
public Builder clearSchemaName() {
bitField0_ = (bitField0_ & ~0x00000002);
schemaName_ = getDefaultInstance().getSchemaName();
onChanged();
return this;
}
// required bytes tableName = 3;
private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes tableName = 3;</code>
*/
public boolean hasTableName() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>required bytes tableName = 3;</code>
*/
public com.google.protobuf.ByteString getTableName() {
return tableName_;
}
/**
* <code>required bytes tableName = 3;</code>
*/
public Builder setTableName(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
tableName_ = value;
onChanged();
return this;
}
/**
* <code>required bytes tableName = 3;</code>
*/
public Builder clearTableName() {
bitField0_ = (bitField0_ & ~0x00000004);
tableName_ = getDefaultInstance().getTableName();
onChanged();
return this;
}
// required int64 clientTimestamp = 4;
private long clientTimestamp_ ;
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public boolean hasClientTimestamp() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public long getClientTimestamp() {
return clientTimestamp_;
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public Builder setClientTimestamp(long value) {
bitField0_ |= 0x00000008;
clientTimestamp_ = value;
onChanged();
return this;
}
/**
* <code>required int64 clientTimestamp = 4;</code>
*/
public Builder clearClientTimestamp() {
bitField0_ = (bitField0_ & ~0x00000008);
clientTimestamp_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:ClearTableFromCacheRequest)
}
static {
defaultInstance = new ClearTableFromCacheRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClearTableFromCacheRequest)
}
public interface ClearTableFromCacheResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code ClearTableFromCacheResponse}
*/
public static final class ClearTableFromCacheResponse extends
com.google.protobuf.GeneratedMessage
implements ClearTableFromCacheResponseOrBuilder {
// Use ClearTableFromCacheResponse.newBuilder() to construct.
private ClearTableFromCacheResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClearTableFromCacheResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClearTableFromCacheResponse defaultInstance;
public static ClearTableFromCacheResponse getDefaultInstance() {
return defaultInstance;
}
public ClearTableFromCacheResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClearTableFromCacheResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.Builder.class);
}
public static com.google.protobuf.Parser<ClearTableFromCacheResponse> PARSER =
new com.google.protobuf.AbstractParser<ClearTableFromCacheResponse>() {
public ClearTableFromCacheResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClearTableFromCacheResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClearTableFromCacheResponse> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse)) {
return super.equals(obj);
}
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse other = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClearTableFromCacheResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.class, org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.Builder.class);
}
// Construct using org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.internal_static_ClearTableFromCacheResponse_descriptor;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse getDefaultInstanceForType() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance();
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse build() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse buildPartial() {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse result = new org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse) {
return mergeFrom((org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse other) {
if (other == org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:ClearTableFromCacheResponse)
}
static {
defaultInstance = new ClearTableFromCacheResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClearTableFromCacheResponse)
}
/**
* Protobuf service {@code MetaDataService}
*/
public static abstract class MetaDataService
implements com.google.protobuf.Service {
protected MetaDataService() {}
public interface Interface {
/**
* <code>rpc getTable(.GetTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc getFunctions(.GetFunctionsRequest) returns (.MetaDataResponse);</code>
*/
public abstract void getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc createTable(.CreateTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc createFunction(.CreateFunctionRequest) returns (.MetaDataResponse);</code>
*/
public abstract void createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropTable(.DropTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropFunction(.DropFunctionRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc addColumn(.AddColumnRequest) returns (.MetaDataResponse);</code>
*/
public abstract void addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropColumn(.DropColumnRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc updateIndexState(.UpdateIndexStateRequest) returns (.MetaDataResponse);</code>
*/
public abstract void updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc clearCache(.ClearCacheRequest) returns (.ClearCacheResponse);</code>
*/
public abstract void clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse> done);
/**
* <code>rpc getVersion(.GetVersionRequest) returns (.GetVersionResponse);</code>
*/
public abstract void getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse> done);
/**
* <code>rpc clearTableFromCache(.ClearTableFromCacheRequest) returns (.ClearTableFromCacheResponse);</code>
*/
public abstract void clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse> done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new MetaDataService() {
@java.lang.Override
public void getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.getTable(controller, request, done);
}
@java.lang.Override
public void getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.getFunctions(controller, request, done);
}
@java.lang.Override
public void createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.createTable(controller, request, done);
}
@java.lang.Override
public void createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.createFunction(controller, request, done);
}
@java.lang.Override
public void dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.dropTable(controller, request, done);
}
@java.lang.Override
public void dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.dropFunction(controller, request, done);
}
@java.lang.Override
public void addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.addColumn(controller, request, done);
}
@java.lang.Override
public void dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.dropColumn(controller, request, done);
}
@java.lang.Override
public void updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
impl.updateIndexState(controller, request, done);
}
@java.lang.Override
public void clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse> done) {
impl.clearCache(controller, request, done);
}
@java.lang.Override
public void getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse> done) {
impl.getVersion(controller, request, done);
}
@java.lang.Override
public void clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse> done) {
impl.clearTableFromCache(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request)
throws com.google.protobuf.ServiceException {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callBlockingMethod() given method descriptor for " +
"wrong service type.");
}
switch(method.getIndex()) {
case 0:
return impl.getTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest)request);
case 1:
return impl.getFunctions(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest)request);
case 2:
return impl.createTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest)request);
case 3:
return impl.createFunction(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest)request);
case 4:
return impl.dropTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest)request);
case 5:
return impl.dropFunction(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest)request);
case 6:
return impl.addColumn(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest)request);
case 7:
return impl.dropColumn(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest)request);
case 8:
return impl.updateIndexState(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest)request);
case 9:
return impl.clearCache(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest)request);
case 10:
return impl.getVersion(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest)request);
case 11:
return impl.clearTableFromCache(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.getDefaultInstance();
case 1:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.getDefaultInstance();
case 2:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.getDefaultInstance();
case 3:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.getDefaultInstance();
case 4:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.getDefaultInstance();
case 5:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.getDefaultInstance();
case 6:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.getDefaultInstance();
case 7:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.getDefaultInstance();
case 8:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.getDefaultInstance();
case 9:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.getDefaultInstance();
case 10:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.getDefaultInstance();
case 11:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 1:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 2:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 3:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 4:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 5:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 6:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 7:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 8:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 9:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance();
case 10:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance();
case 11:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* <code>rpc getTable(.GetTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc getFunctions(.GetFunctionsRequest) returns (.MetaDataResponse);</code>
*/
public abstract void getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc createTable(.CreateTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc createFunction(.CreateFunctionRequest) returns (.MetaDataResponse);</code>
*/
public abstract void createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropTable(.DropTableRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropFunction(.DropFunctionRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc addColumn(.AddColumnRequest) returns (.MetaDataResponse);</code>
*/
public abstract void addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc dropColumn(.DropColumnRequest) returns (.MetaDataResponse);</code>
*/
public abstract void dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc updateIndexState(.UpdateIndexStateRequest) returns (.MetaDataResponse);</code>
*/
public abstract void updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done);
/**
* <code>rpc clearCache(.ClearCacheRequest) returns (.ClearCacheResponse);</code>
*/
public abstract void clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse> done);
/**
* <code>rpc getVersion(.GetVersionRequest) returns (.GetVersionResponse);</code>
*/
public abstract void getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse> done);
/**
* <code>rpc clearTableFromCache(.ClearTableFromCacheRequest) returns (.ClearTableFromCacheResponse);</code>
*/
public abstract void clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse> done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.getDescriptor().getServices().get(0);
}
public final com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
com.google.protobuf.Message request,
com.google.protobuf.RpcCallback<
com.google.protobuf.Message> done) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.callMethod() given method descriptor for wrong " +
"service type.");
}
switch(method.getIndex()) {
case 0:
this.getTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 1:
this.getFunctions(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 2:
this.createTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 3:
this.createFunction(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 4:
this.dropTable(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 5:
this.dropFunction(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 6:
this.addColumn(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 7:
this.dropColumn(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 8:
this.updateIndexState(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse>specializeCallback(
done));
return;
case 9:
this.clearCache(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse>specializeCallback(
done));
return;
case 10:
this.getVersion(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse>specializeCallback(
done));
return;
case 11:
this.clearTableFromCache(controller, (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest)request,
com.google.protobuf.RpcUtil.<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse>specializeCallback(
done));
return;
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getRequestPrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest.getDefaultInstance();
case 1:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest.getDefaultInstance();
case 2:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest.getDefaultInstance();
case 3:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest.getDefaultInstance();
case 4:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest.getDefaultInstance();
case 5:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest.getDefaultInstance();
case 6:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest.getDefaultInstance();
case 7:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest.getDefaultInstance();
case 8:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest.getDefaultInstance();
case 9:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest.getDefaultInstance();
case 10:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest.getDefaultInstance();
case 11:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
if (method.getService() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"Service.getResponsePrototype() given method " +
"descriptor for wrong service type.");
}
switch(method.getIndex()) {
case 0:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 1:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 2:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 3:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 4:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 5:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 6:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 7:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 8:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance();
case 9:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance();
case 10:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance();
case 11:
return org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataService implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance()));
}
public void clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(9),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance()));
}
public void getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(10),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance()));
}
public void clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request,
com.google.protobuf.RpcCallback<org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse> done) {
channel.callMethod(
getDescriptor().getMethods().get(11),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance(),
com.google.protobuf.RpcUtil.generalizeCallback(
done,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.class,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request)
throws com.google.protobuf.ServiceException;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse getTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetTableRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(0),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse getFunctions(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetFunctionsRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(1),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse createTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateTableRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(2),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse createFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.CreateFunctionRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(3),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropTable(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropTableRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(4),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropFunction(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropFunctionRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(5),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse addColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.AddColumnRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(6),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse dropColumn(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.DropColumnRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(7),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse updateIndexState(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.UpdateIndexStateRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(8),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse clearCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(9),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearCacheResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse getVersion(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(10),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.GetVersionResponse.getDefaultInstance());
}
public org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse clearTableFromCache(
com.google.protobuf.RpcController controller,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse) channel.callBlockingMethod(
getDescriptor().getMethods().get(11),
controller,
request,
org.apache.phoenix.coprocessor.generated.MetaDataProtos.ClearTableFromCacheResponse.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:MetaDataService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MetaDataResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MetaDataResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetTableRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetTableRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetFunctionsRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetFunctionsRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CreateTableRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_CreateTableRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CreateFunctionRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_CreateFunctionRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_DropTableRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DropTableRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_AddColumnRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_AddColumnRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_DropColumnRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DropColumnRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_DropFunctionRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DropFunctionRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_UpdateIndexStateRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_UpdateIndexStateRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClearCacheRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClearCacheRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClearCacheResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClearCacheResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetVersionRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetVersionRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetVersionResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetVersionResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClearTableFromCacheRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClearTableFromCacheRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClearTableFromCacheResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClearTableFromCacheResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\025MetaDataService.proto\032\014PTable.proto\032\017P" +
"Function.proto\"\327\001\n\020MetaDataResponse\022!\n\nr" +
"eturnCode\030\001 \001(\0162\r.MutationCode\022\024\n\014mutati" +
"onTime\030\002 \001(\003\022\026\n\005table\030\003 \001(\0132\007.PTable\022\026\n\016" +
"tablesToDelete\030\004 \003(\014\022\022\n\ncolumnName\030\005 \001(\014" +
"\022\022\n\nfamilyName\030\006 \001(\014\022\024\n\014functionName\030\007 \001" +
"(\014\022\034\n\010function\030\010 \003(\0132\n.PFunction\"{\n\017GetT" +
"ableRequest\022\020\n\010tenantId\030\001 \002(\014\022\022\n\nschemaN" +
"ame\030\002 \002(\014\022\021\n\ttableName\030\003 \002(\014\022\026\n\016tableTim" +
"estamp\030\004 \002(\003\022\027\n\017clientTimestamp\030\005 \002(\003\"s\n",
"\023GetFunctionsRequest\022\020\n\010tenantId\030\001 \002(\014\022\025" +
"\n\rfunctionNames\030\002 \003(\014\022\032\n\022functionTimesta" +
"mps\030\003 \003(\003\022\027\n\017clientTimestamp\030\004 \002(\003\"4\n\022Cr" +
"eateTableRequest\022\036\n\026tableMetadataMutatio" +
"ns\030\001 \003(\014\"[\n\025CreateFunctionRequest\022\036\n\026tab" +
"leMetadataMutations\030\001 \003(\014\022\021\n\ttemporary\030\002" +
" \002(\010\022\017\n\007replace\030\003 \001(\010\"V\n\020DropTableReques" +
"t\022\036\n\026tableMetadataMutations\030\001 \003(\014\022\021\n\ttab" +
"leType\030\002 \002(\t\022\017\n\007cascade\030\003 \001(\010\"2\n\020AddColu" +
"mnRequest\022\036\n\026tableMetadataMutations\030\001 \003(",
"\014\"3\n\021DropColumnRequest\022\036\n\026tableMetadataM" +
"utations\030\001 \003(\014\"G\n\023DropFunctionRequest\022\036\n" +
"\026tableMetadataMutations\030\001 \003(\014\022\020\n\010ifExist" +
"s\030\002 \001(\010\"9\n\027UpdateIndexStateRequest\022\036\n\026ta" +
"bleMetadataMutations\030\001 \003(\014\"\023\n\021ClearCache" +
"Request\"\024\n\022ClearCacheResponse\"\023\n\021GetVers" +
"ionRequest\"%\n\022GetVersionResponse\022\017\n\007vers" +
"ion\030\001 \002(\003\"n\n\032ClearTableFromCacheRequest\022" +
"\020\n\010tenantId\030\001 \002(\014\022\022\n\nschemaName\030\002 \002(\014\022\021\n" +
"\ttableName\030\003 \002(\014\022\027\n\017clientTimestamp\030\004 \002(",
"\003\"\035\n\033ClearTableFromCacheResponse*\365\002\n\014Mut" +
"ationCode\022\030\n\024TABLE_ALREADY_EXISTS\020\000\022\023\n\017T" +
"ABLE_NOT_FOUND\020\001\022\024\n\020COLUMN_NOT_FOUND\020\002\022\031" +
"\n\025COLUMN_ALREADY_EXISTS\020\003\022\035\n\031CONCURRENT_" +
"TABLE_MUTATION\020\004\022\027\n\023TABLE_NOT_IN_REGION\020" +
"\005\022\025\n\021NEWER_TABLE_FOUND\020\006\022\034\n\030UNALLOWED_TA" +
"BLE_MUTATION\020\007\022\021\n\rNO_PK_COLUMNS\020\010\022\032\n\026PAR" +
"ENT_TABLE_NOT_FOUND\020\t\022\033\n\027FUNCTION_ALREAD" +
"Y_EXISTS\020\n\022\026\n\022FUNCTION_NOT_FOUND\020\013\022\030\n\024NE" +
"WER_FUNCTION_FOUND\020\014\022\032\n\026FUNCTION_NOT_IN_",
"REGION\020\r2\304\005\n\017MetaDataService\022/\n\010getTable" +
"\022\020.GetTableRequest\032\021.MetaDataResponse\0227\n" +
"\014getFunctions\022\024.GetFunctionsRequest\032\021.Me" +
"taDataResponse\0225\n\013createTable\022\023.CreateTa" +
"bleRequest\032\021.MetaDataResponse\022;\n\016createF" +
"unction\022\026.CreateFunctionRequest\032\021.MetaDa" +
"taResponse\0221\n\tdropTable\022\021.DropTableReque" +
"st\032\021.MetaDataResponse\0227\n\014dropFunction\022\024." +
"DropFunctionRequest\032\021.MetaDataResponse\0221" +
"\n\taddColumn\022\021.AddColumnRequest\032\021.MetaDat",
"aResponse\0223\n\ndropColumn\022\022.DropColumnRequ" +
"est\032\021.MetaDataResponse\022?\n\020updateIndexSta" +
"te\022\030.UpdateIndexStateRequest\032\021.MetaDataR" +
"esponse\0225\n\nclearCache\022\022.ClearCacheReques" +
"t\032\023.ClearCacheResponse\0225\n\ngetVersion\022\022.G" +
"etVersionRequest\032\023.GetVersionResponse\022P\n" +
"\023clearTableFromCache\022\033.ClearTableFromCac" +
"heRequest\032\034.ClearTableFromCacheResponseB" +
"B\n(org.apache.phoenix.coprocessor.genera" +
"tedB\016MetaDataProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_MetaDataResponse_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_MetaDataResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MetaDataResponse_descriptor,
new java.lang.String[] { "ReturnCode", "MutationTime", "Table", "TablesToDelete", "ColumnName", "FamilyName", "FunctionName", "Function", });
internal_static_GetTableRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_GetTableRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetTableRequest_descriptor,
new java.lang.String[] { "TenantId", "SchemaName", "TableName", "TableTimestamp", "ClientTimestamp", });
internal_static_GetFunctionsRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_GetFunctionsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetFunctionsRequest_descriptor,
new java.lang.String[] { "TenantId", "FunctionNames", "FunctionTimestamps", "ClientTimestamp", });
internal_static_CreateTableRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_CreateTableRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CreateTableRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", });
internal_static_CreateFunctionRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_CreateFunctionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CreateFunctionRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", "Temporary", "Replace", });
internal_static_DropTableRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_DropTableRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DropTableRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", "TableType", "Cascade", });
internal_static_AddColumnRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_AddColumnRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_AddColumnRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", });
internal_static_DropColumnRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_DropColumnRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DropColumnRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", });
internal_static_DropFunctionRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_DropFunctionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DropFunctionRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", "IfExists", });
internal_static_UpdateIndexStateRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_UpdateIndexStateRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_UpdateIndexStateRequest_descriptor,
new java.lang.String[] { "TableMetadataMutations", });
internal_static_ClearCacheRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_ClearCacheRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClearCacheRequest_descriptor,
new java.lang.String[] { });
internal_static_ClearCacheResponse_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_ClearCacheResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClearCacheResponse_descriptor,
new java.lang.String[] { });
internal_static_GetVersionRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_GetVersionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetVersionRequest_descriptor,
new java.lang.String[] { });
internal_static_GetVersionResponse_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_GetVersionResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetVersionResponse_descriptor,
new java.lang.String[] { "Version", });
internal_static_ClearTableFromCacheRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_ClearTableFromCacheRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClearTableFromCacheRequest_descriptor,
new java.lang.String[] { "TenantId", "SchemaName", "TableName", "ClientTimestamp", });
internal_static_ClearTableFromCacheResponse_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_ClearTableFromCacheResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClearTableFromCacheResponse_descriptor,
new java.lang.String[] { });
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
org.apache.phoenix.coprocessor.generated.PTableProtos.getDescriptor(),
org.apache.phoenix.coprocessor.generated.PFunctionProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}
| apache-2.0 |
julianhyde/calcite | core/src/test/java/org/apache/calcite/test/StatesTableFunction.java | 4941 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.DataContext;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.Linq4j;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.schema.ScannableTable;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.Statistics;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.ImmutableBitSet;
import com.google.common.collect.ImmutableList;
/** A table function that returns states and their boundaries; also national
* parks.
*
* <p>Has same content as
* <code>file/src/test/resources/geo/states.json</code>. */
public class StatesTableFunction {
private StatesTableFunction() {}
private static final Object[][] STATE_ROWS = {
{"NV", "Polygon((-120 42, -114 42, -114 37, -114.75 35.1, -120 39,"
+ " -120 42))"},
{"UT", "Polygon((-114 42, -111.05 42, -111.05 41, -109.05 41, -109.05 37,"
+ " -114 37, -114 42))"},
{"CA", "Polygon((-124.25 42, -120 42, -120 39, -114.75 35.1,"
+ " -114.75 32.5, -117.15 32.5, -118.30 33.75, -120.5 34.5,"
+ " -122.4 37.2, -124.25 42))"},
{"AZ", "Polygon((-114 37, -109.05 37, -109.05 31.33, -111.07 31.33,"
+ " -114.75 32.5, -114.75 35.1, -114 37))"},
{"CO", "Polygon((-109.05 41, -102 41, -102 37, -109.05 37, -109.05 41))"},
{"OR", "Polygon((-123.9 46.2, -122.7 45.7, -119 46, -117 46, -116.5 45.5,"
+ " -117.03 44.2, -117.03 42, -124.25 42, -124.6 42.8,"
+ " -123.9 46.2))"},
{"WA", "Polygon((-124.80 48.4, -123.2 48.2, -123.2 49, -117 49, -117 46,"
+ " -119 46, -122.7 45.7, -123.9 46.2, -124.80 48.4))"},
{"ID", "Polygon((-117 49, -116.05 49, -116.05 48, -114.4 46.6,"
+ " -112.9 44.45, -111.05 44.45, -111.05 42, -117.03 42,"
+ " -117.03 44.2, -116.5 45.5, -117 46, -117 49))"},
{"MT", "Polygon((-116.05 49, -104.05 49, -104.05 45, -111.05 45,"
+ " -111.05 44.45, -112.9 44.45, -114.4 46.6, -116.05 48,"
+ " -116.05 49))"},
{"WY", "Polygon((-111.05 45, -104.05 45, -104.05 41, -111.05 41,"
+ " -111.05 45))"},
{"NM", "Polygon((-109.05 37, -103 37, -103 32, -106.65 32, -106.5 31.8,"
+ " -108.2 31.8, -108.2 31.33, -109.05 31.33, -109.05 37))"}
};
private static final Object[][] PARK_ROWS = {
{"Yellowstone NP", "Polygon((-111.2 45.1, -109.30 45.1, -109.30 44.1,"
+ " -109 43.8, -110 43, -111.2 43.4, -111.2 45.1))"},
{"Yosemite NP", "Polygon((-120.2 38, -119.30 38.2, -119 37.7,"
+ " -119.9 37.6, -120.2 38))"},
{"Death Valley NP", "Polygon((-118.2 37.3, -117 37, -116.3 35.7,"
+ " -117 35.7, -117.2 36.2, -117.8 36.4, -118.2 37.3))"},
};
public static ScannableTable states(boolean b) {
return eval(STATE_ROWS);
};
public static ScannableTable parks(boolean b) {
return eval(PARK_ROWS);
};
private static ScannableTable eval(final Object[][] rows) {
return new ScannableTable() {
public Enumerable<Object[]> scan(DataContext root) {
return Linq4j.asEnumerable(rows);
}
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
return typeFactory.builder()
.add("name", SqlTypeName.VARCHAR)
.add("geom", SqlTypeName.VARCHAR)
.build();
}
public Statistic getStatistic() {
return Statistics.of(rows.length,
ImmutableList.of(ImmutableBitSet.of(0)));
}
public Schema.TableType getJdbcTableType() {
return Schema.TableType.TABLE;
}
public boolean isRolledUp(String column) {
return false;
}
public boolean rolledUpColumnValidInsideAgg(String column, SqlCall call,
SqlNode parent, CalciteConnectionConfig config) {
return false;
}
};
}
}
| apache-2.0 |
netboynb/search-core | src/main/java/org/apache/solr/search/grouping/distributed/shardresultserializer/TopGroupsResultTransformer.java | 12627 | package org.apache.solr.search.grouping.distributed.shardresultserializer;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DocumentStoredFieldVisitor;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.grouping.GroupDocs;
import org.apache.lucene.search.grouping.TopGroups;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.UnicodeUtil;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.handler.component.ShardDoc;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.grouping.Command;
import org.apache.solr.search.grouping.distributed.command.QueryCommand;
import org.apache.solr.search.grouping.distributed.command.QueryCommandResult;
import org.apache.solr.search.grouping.distributed.command.TopGroupsFieldCommand;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Implementation for transforming {@link TopGroups} and {@link TopDocs} into a {@link NamedList} structure and
* visa versa.
*/
public class TopGroupsResultTransformer implements ShardResultTransformer<List<Command>, Map<String, ?>> {
private final ResponseBuilder rb;
private static final Logger log = LoggerFactory.getLogger(TopGroupsResultTransformer.class);
public TopGroupsResultTransformer(ResponseBuilder rb) {
this.rb = rb;
}
/**
* {@inheritDoc}
*/
@Override
public NamedList transform(List<Command> data) throws IOException {
NamedList<NamedList> result = new NamedList<NamedList>();
for (Command command : data) {
NamedList commandResult;
if (TopGroupsFieldCommand.class.isInstance(command)) {
TopGroupsFieldCommand fieldCommand = (TopGroupsFieldCommand) command;
SchemaField groupField = rb.req.getSearcher().getSchema().getField(fieldCommand.getKey());
commandResult = serializeTopGroups(fieldCommand.result(), groupField);
} else if (QueryCommand.class.isInstance(command)) {
QueryCommand queryCommand = (QueryCommand) command;
commandResult = serializeTopDocs(queryCommand.result());
} else {
commandResult = null;
}
result.add(command.getKey(), commandResult);
}
return result;
}
/**
* {@inheritDoc}
*/
@Override
public Map<String, ?> transformToNative(NamedList<NamedList> shardResponse, Sort groupSort, Sort sortWithinGroup, String shard) {
Map<String, Object> result = new HashMap<String, Object>();
for (Map.Entry<String, NamedList> entry : shardResponse) {
String key = entry.getKey();
NamedList commandResult = entry.getValue();
Integer totalGroupedHitCount = (Integer) commandResult.get("totalGroupedHitCount");
Integer totalHits = (Integer) commandResult.get("totalHits");
if (totalHits != null) {
Integer matches = (Integer) commandResult.get("matches");
Float maxScore = (Float) commandResult.get("maxScore");
if (maxScore == null) {
maxScore = Float.NaN;
}
@SuppressWarnings("unchecked")
List<NamedList<Object>> documents = (List<NamedList<Object>>) commandResult.get("documents");
ScoreDoc[] scoreDocs = new ScoreDoc[documents.size()];
int j = 0;
for (NamedList<Object> document : documents) {
Object docId = document.get("id");
Object uniqueId = null;
if (docId != null)
uniqueId = docId.toString();
else
log.warn("doc {} has null 'id'", document);
Float score = (Float) document.get("score");
if (score == null) {
score = Float.NaN;
}
Object[] sortValues = null;
Object sortValuesVal = document.get("sortValues");
if (sortValuesVal != null) {
sortValues = ((List) sortValuesVal).toArray();
}
else {
log.warn("doc {} has null 'sortValues'", document);
}
scoreDocs[j++] = new ShardDoc(score, sortValues, uniqueId, shard);
}
result.put(key, new QueryCommandResult(new TopDocs(totalHits, scoreDocs, maxScore), matches));
continue;
}
Integer totalHitCount = (Integer) commandResult.get("totalHitCount");
List<GroupDocs<BytesRef>> groupDocs = new ArrayList<GroupDocs<BytesRef>>();
for (int i = 2; i < commandResult.size(); i++) {
String groupValue = commandResult.getName(i);
@SuppressWarnings("unchecked")
NamedList<Object> groupResult = (NamedList<Object>) commandResult.getVal(i);
Integer totalGroupHits = (Integer) groupResult.get("totalHits");
Float maxScore = (Float) groupResult.get("maxScore");
if (maxScore == null) {
maxScore = Float.NaN;
}
@SuppressWarnings("unchecked")
List<NamedList<Object>> documents = (List<NamedList<Object>>) groupResult.get("documents");
ScoreDoc[] scoreDocs = new ScoreDoc[documents.size()];
int j = 0;
for (NamedList<Object> document : documents) {
Object uniqueId = document.get("id").toString();
Float score = (Float) document.get("score");
if (score == null) {
score = Float.NaN;
}
Object[] sortValues = ((List) document.get("sortValues")).toArray();
scoreDocs[j++] = new ShardDoc(score, sortValues, uniqueId, shard);
}
BytesRef groupValueRef = groupValue != null ? new BytesRef(groupValue) : null;
groupDocs.add(new GroupDocs<BytesRef>(Float.NaN, maxScore, totalGroupHits, scoreDocs, groupValueRef, null));
}
@SuppressWarnings("unchecked")
GroupDocs<BytesRef>[] groupDocsArr = groupDocs.toArray(new GroupDocs[groupDocs.size()]);
TopGroups<BytesRef> topGroups = new TopGroups<BytesRef>(
groupSort.getSort(), sortWithinGroup.getSort(), totalHitCount, totalGroupedHitCount, groupDocsArr, Float.NaN
);
result.put(key, topGroups);
}
return result;
}
protected NamedList serializeTopGroups(TopGroups<BytesRef> data, SchemaField groupField) throws IOException {
NamedList<Object> result = new NamedList<Object>();
result.add("totalGroupedHitCount", data.totalGroupedHitCount);
result.add("totalHitCount", data.totalHitCount);
if (data.totalGroupCount != null) {
result.add("totalGroupCount", data.totalGroupCount);
}
CharsRef spare = new CharsRef();
SchemaField uniqueField = rb.req.getSearcher().getSchema().getUniqueKeyField();
for (GroupDocs<BytesRef> searchGroup : data.groups) {
NamedList<Object> groupResult = new NamedList<Object>();
groupResult.add("totalHits", searchGroup.totalHits);
if (!Float.isNaN(searchGroup.maxScore)) {
groupResult.add("maxScore", searchGroup.maxScore);
}
List<NamedList<Object>> documents = new ArrayList<NamedList<Object>>();
for (int i = 0; i < searchGroup.scoreDocs.length; i++) {
NamedList<Object> document = new NamedList<Object>();
documents.add(document);
Document doc = retrieveDocument(uniqueField, searchGroup.scoreDocs[i].doc);
document.add("id", uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
if (!Float.isNaN(searchGroup.scoreDocs[i].score)) {
document.add("score", searchGroup.scoreDocs[i].score);
}
if (!(searchGroup.scoreDocs[i] instanceof FieldDoc)) {
continue;
}
FieldDoc fieldDoc = (FieldDoc) searchGroup.scoreDocs[i];
Object[] convertedSortValues = new Object[fieldDoc.fields.length];
for (int j = 0; j < fieldDoc.fields.length; j++) {
Object sortValue = fieldDoc.fields[j];
Sort sortWithinGroup = rb.getGroupingSpec().getSortWithinGroup();
SchemaField field = sortWithinGroup.getSort()[j].getField() != null ? rb.req.getSearcher().getSchema().getFieldOrNull(sortWithinGroup.getSort()[j].getField()) : null;
if (field != null) {
FieldType fieldType = field.getType();
if (sortValue instanceof BytesRef) {
UnicodeUtil.UTF8toUTF16((BytesRef)sortValue, spare);
String indexedValue = spare.toString();
sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 1.0f));
} else if (sortValue instanceof String) {
sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 1.0f));
}
}
convertedSortValues[j] = sortValue;
}
document.add("sortValues", convertedSortValues);
}
groupResult.add("documents", documents);
String groupValue = searchGroup.groupValue != null ? groupField.getType().indexedToReadable(searchGroup.groupValue.utf8ToString()): null;
result.add(groupValue, groupResult);
}
return result;
}
protected NamedList serializeTopDocs(QueryCommandResult result) throws IOException {
NamedList<Object> queryResult = new NamedList<Object>();
queryResult.add("matches", result.getMatches());
queryResult.add("totalHits", result.getTopDocs().totalHits);
if (rb.getGroupingSpec().isNeedScore()) {
queryResult.add("maxScore", result.getTopDocs().getMaxScore());
}
List<NamedList> documents = new ArrayList<NamedList>();
queryResult.add("documents", documents);
SchemaField uniqueField = rb.req.getSearcher().getSchema().getUniqueKeyField();
CharsRef spare = new CharsRef();
for (ScoreDoc scoreDoc : result.getTopDocs().scoreDocs) {
NamedList<Object> document = new NamedList<Object>();
documents.add(document);
Document doc = retrieveDocument(uniqueField, scoreDoc.doc);
document.add("id", uniqueField.getType().toExternal(doc.getField(uniqueField.getName())));
if (rb.getGroupingSpec().isNeedScore()) {
document.add("score", scoreDoc.score);
}
if (!FieldDoc.class.isInstance(scoreDoc)) {
continue;
}
FieldDoc fieldDoc = (FieldDoc) scoreDoc;
Object[] convertedSortValues = new Object[fieldDoc.fields.length];
for (int j = 0; j < fieldDoc.fields.length; j++) {
Object sortValue = fieldDoc.fields[j];
Sort groupSort = rb.getGroupingSpec().getGroupSort();
SchemaField field = groupSort.getSort()[j].getField() != null ? rb.req.getSearcher().getSchema().getFieldOrNull(groupSort.getSort()[j].getField()) : null;
if (field != null) {
FieldType fieldType = field.getType();
if (sortValue instanceof BytesRef) {
UnicodeUtil.UTF8toUTF16((BytesRef)sortValue, spare);
String indexedValue = spare.toString();
sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable(indexedValue), 1.0f));
} else if (sortValue instanceof String) {
sortValue = fieldType.toObject(field.createField(fieldType.indexedToReadable((String) sortValue), 1.0f));
}
}
convertedSortValues[j] = sortValue;
}
document.add("sortValues", convertedSortValues);
}
return queryResult;
}
private Document retrieveDocument(final SchemaField uniqueField, int doc) throws IOException {
DocumentStoredFieldVisitor visitor = new DocumentStoredFieldVisitor(uniqueField.getName());
rb.req.getSearcher().doc(doc, visitor);
return visitor.getDocument();
}
}
| apache-2.0 |
yuyijq/dubbo | dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/extension/SpringExtensionFactoryTest.java | 3339 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config.spring.extension;
import org.apache.dubbo.config.spring.api.DemoService;
import org.apache.dubbo.config.spring.api.HelloService;
import org.apache.dubbo.config.spring.impl.DemoServiceImpl;
import org.apache.dubbo.config.spring.impl.HelloServiceImpl;
import org.apache.dubbo.rpc.Protocol;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.annotation.DirtiesContext;
@Configuration
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
public class SpringExtensionFactoryTest {
private SpringExtensionFactory springExtensionFactory = new SpringExtensionFactory();
private AnnotationConfigApplicationContext context1;
private AnnotationConfigApplicationContext context2;
@BeforeEach
public void init() {
SpringExtensionFactory.clearContexts();
context1 = new AnnotationConfigApplicationContext();
context1.register(getClass());
context1.refresh();
context2 = new AnnotationConfigApplicationContext();
context2.register(BeanForContext2.class);
context2.refresh();
SpringExtensionFactory.addApplicationContext(context1);
SpringExtensionFactory.addApplicationContext(context2);
}
@Test
public void testGetExtensionBySPI() {
Protocol protocol = springExtensionFactory.getExtension(Protocol.class, "protocol");
Assertions.assertNull(protocol);
}
@Test
public void testGetExtensionByName() {
DemoService bean = springExtensionFactory.getExtension(DemoService.class, "bean1");
Assertions.assertNotNull(bean);
HelloService hello = springExtensionFactory.getExtension(HelloService.class, "hello");
Assertions.assertNotNull(hello);
}
@AfterEach
public void destroy() {
SpringExtensionFactory.clearContexts();
context1.close();
context2.close();
}
@Bean("bean1")
public DemoService bean1() {
return new DemoServiceImpl();
}
@Bean("bean2")
public DemoService bean2() {
return new DemoServiceImpl();
}
@Bean("hello")
public HelloService helloService() {
return new HelloServiceImpl();
}
}
| apache-2.0 |
vzhukovskyi/kaa | common/core/src/main/java/org/kaaproject/kaa/server/common/core/algorithms/generation/DefaultRecordGenerationAlgorithmImpl.java | 11974 | /*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.core.algorithms.generation;
import static org.kaaproject.kaa.server.common.core.algorithms.CommonConstants.BY_DEFAULT_FIELD;
import static org.kaaproject.kaa.server.common.core.algorithms.CommonConstants.UUID_FIELD;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericEnumSymbol;
import org.apache.avro.generic.GenericFixed;
import org.apache.avro.generic.GenericRecord;
import org.codehaus.jackson.JsonNode;
import org.kaaproject.kaa.common.avro.GenericAvroConverter;
import org.kaaproject.kaa.server.common.core.algorithms.AvroUtils;
import org.kaaproject.kaa.server.common.core.configuration.KaaData;
import org.kaaproject.kaa.server.common.core.configuration.KaaDataFactory;
import org.kaaproject.kaa.server.common.core.schema.KaaSchema;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default implementation of
* {@link org.kaaproject.kaa.server.common.dao.configuration.DefaultRecordGenerationAlgorithm}
*
*/
public class DefaultRecordGenerationAlgorithmImpl<U extends KaaSchema, T extends KaaData<U>> implements DefaultRecordGenerationAlgorithm<T> {
/** The Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(DefaultRecordGenerationAlgorithmImpl.class);
/** The processed types. */
private final Map<String, GenericRecord> processedTypes = new HashMap<>();
/** The avro schema parser. */
private final Schema.Parser avroSchemaParser;
/** The avro base schema. */
private final Schema avroBaseSchema;
/** The data factory. */
private final KaaDataFactory<U, T> dataFactory;
/** The root schema. */
private final U rootSchema;
/**
* Instantiates a new default configuration processor.
*
* @param kaaSchema the base schema
* @throws ConfigurationGenerationException the configuration processing exception
*/
public DefaultRecordGenerationAlgorithmImpl(U kaaSchema, KaaDataFactory<U, T> factory) throws ConfigurationGenerationException {
LOG.debug("Generating default configuration for configuration schema: " + kaaSchema.getRawSchema());
this.rootSchema = kaaSchema;
this.dataFactory = factory;
this.avroSchemaParser = new Schema.Parser();
this.avroBaseSchema = this.avroSchemaParser.parse(kaaSchema.getRawSchema());
}
/**
* Applies the default value.
*
* @param schemaNode the schema node.
* @param byDefault the default value.
* @return generated value.
* @throws ConfigurationGenerationException the configuration processing exception.
*/
private Object applyDefaultValue(Schema schemaNode, JsonNode byDefault) throws ConfigurationGenerationException {
if (byDefault.isArray() && AvroUtils.getSchemaByType(schemaNode, Type.BYTES) != null) {
// if this is a 'bytes' type then convert json bytes array to
// avro 'bytes' representation or
// if this is a named type - look for already processed types
// or throw an exception because "by_default" is missed
ByteBuffer byteBuffer = ByteBuffer.allocate(byDefault.size());
for (JsonNode oneByte : byDefault) {
byteBuffer.put((byte) oneByte.asInt());
}
byteBuffer.flip();
return byteBuffer;
}
if (byDefault.isBoolean() && AvroUtils.getSchemaByType(schemaNode, Type.BOOLEAN) != null) {
return byDefault.asBoolean();
}
if (byDefault.isDouble()) {
if (AvroUtils.getSchemaByType(schemaNode, Type.DOUBLE) != null) {
return byDefault.asDouble();
} else if (AvroUtils.getSchemaByType(schemaNode, Type.FLOAT) != null) {
return (float) byDefault.asDouble();
}
}
if (byDefault.isInt() && AvroUtils.getSchemaByType(schemaNode, Type.INT) != null) {
return byDefault.asInt();
}
if (byDefault.isLong() && AvroUtils.getSchemaByType(schemaNode, Type.LONG) != null) {
return byDefault.asLong();
}
if (byDefault.isTextual()) {
Schema enumSchema = AvroUtils.getSchemaByType(schemaNode, Type.ENUM);
if (enumSchema != null) {
String textDefaultValue = byDefault.asText();
if (enumSchema.hasEnumSymbol(textDefaultValue)) {
return new GenericData.EnumSymbol(enumSchema, textDefaultValue);
}
}
if (AvroUtils.getSchemaByType(schemaNode, Type.STRING) != null) {
return byDefault.asText();
}
}
throw new ConfigurationGenerationException("Default value " + byDefault.toString() + " is not applicable for the field");
}
/**
* Processes generic type.
*
* @param schemaNode schema for current type.
* @param byDefault the by default.
* @return generated value for input type.
* @throws ConfigurationGenerationException configuration processing
* exception
*/
private Object processType(Schema schemaNode, JsonNode byDefault) throws ConfigurationGenerationException {
if (byDefault != null && !byDefault.isNull()) {
return applyDefaultValue(schemaNode, byDefault);
}
if (AvroUtils.getSchemaByType(schemaNode, Type.NULL) != null) {
return null;
}
Schema schemaToProcess = schemaNode;
if (schemaToProcess.getType().equals(Type.UNION)) {
schemaToProcess = schemaToProcess.getTypes().get(0);
}
switch (schemaToProcess.getType()) {
case ARRAY:
// if this an array type then return empty array instance
return processArray(schemaToProcess);
case RECORD:
return processRecord(schemaToProcess);
case FIXED:
return processFixed(schemaToProcess);
case ENUM:
return processEnum(schemaToProcess);
case BYTES:
ByteBuffer byteBuffer = ByteBuffer.allocate(byDefault.size());
byteBuffer.put((byte) 0);
byteBuffer.flip();
return byteBuffer;
case MAP:
throw new ConfigurationGenerationException("Map is not supported.");
case INT:
return new Integer(0);
case BOOLEAN:
return Boolean.FALSE;
case DOUBLE:
return new Double(0.0);
case LONG:
return new Long(0);
case STRING:
return new String("");
case FLOAT:
return new Float(0.0);
default:
return null;
}
}
/**
* Processes record type.
*
* @param schemaNode schema for current type.
* @return generated value for input record type.
* @throws ConfigurationGenerationException configuration processing
* exception
*/
private Object processRecord(Schema schemaNode) throws ConfigurationGenerationException {
GenericRecord result = new GenericData.Record(schemaNode);
processedTypes.put(schemaNode.getFullName(), result);
// process each field
List<Field> fields = schemaNode.getFields();
for (Field field : fields) {
Object processFieldResult = processField(field);
if (processFieldResult != null) {
result.put(field.name(), processFieldResult);
}
}
return result;
}
/**
* Processes array type.
*
* @param schemaNode schema for current type.
* @return generated value for input array type.
*/
private Object processArray(Schema schemaNode) {
Schema elementTypeSchema = Schema.create(Type.NULL);
return new GenericData.Array<>(0, Schema.createArray(elementTypeSchema));
}
/**
* Processes enum type.
*
* @param schemaNode schema for current type.
* @return generated value for input enum type.
*/
private Object processEnum(Schema schemaNode) {
GenericEnumSymbol result = new GenericData.EnumSymbol(schemaNode, schemaNode.getEnumSymbols().get(0));
return result;
}
/**
* Processes fixed type.
*
* @param schemaNode schema for current type.
* @return generated value for input record type.
*/
private Object processFixed(Schema schemaNode) {
int size = schemaNode.getFixedSize();
byte [] bytes = new byte [size];
for (int i = 0; i < size; i++) {
bytes[i] = (byte) 0;
}
GenericFixed result = new GenericData.Fixed(schemaNode, bytes);
return result;
}
/**
* Process field of a record type.
*
* @param fieldDefinition schema for field.
* @return generated value for field based on its definition.
* @throws ConfigurationGenerationException configuration processing
* exception
*/
private Object processField(Field fieldDefinition) throws ConfigurationGenerationException {
// if this a "uuid" type then generate it
if (UUID_FIELD.equals(fieldDefinition.name())) {
return AvroUtils.generateUuidObject();
}
return processType(fieldDefinition.schema(), fieldDefinition.getJsonProp(BY_DEFAULT_FIELD));
}
/* (non-Javadoc)
* @see org.kaaproject.kaa.server.common.dao.configuration.ConfigurationProcessor#getRootConfiguration()
*/
@Override
public final GenericRecord getRootConfiguration() throws ConfigurationGenerationException {
return getConfigurationByName(avroBaseSchema.getName(), avroBaseSchema.getNamespace());
}
/* (non-Javadoc)
* @see org.kaaproject.kaa.server.common.dao.configuration.ConfigurationProcessor#getRootJsonConfiguration()
*/
@Override
public final T getRootData() throws IOException, ConfigurationGenerationException {
GenericRecord root = getRootConfiguration();
GenericAvroConverter<GenericRecord> converter = new GenericAvroConverter<>(root.getSchema());
try {
return dataFactory.createData(rootSchema, converter.encodeToJson(root));
} catch (RuntimeException e) {
// NPE is thrown if "null" was written into a field that is not nullable
// CGE is thrown if value of wrong type was written into a field
LOG.error("Unexpected exception occurred while generating configuration.", e);
throw new ConfigurationGenerationException(e);
}
}
@Override
public final GenericRecord getConfigurationByName(String name, String namespace) throws ConfigurationGenerationException {
if (name == null || namespace == null) {
return null;
}
if (processedTypes.containsKey(namespace + "." + name)) {
return processedTypes.get(namespace + "." + name);
}
Schema schema = avroSchemaParser.getTypes().get(namespace + "." + name);
if (schema != null) {
return (GenericRecord) processType(schema, null);
}
return null;
}
}
| apache-2.0 |
pdeva/druid | server/src/test/java/io/druid/segment/indexing/DataSchemaTest.java | 8065 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment.indexing;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.metamx.common.IAE;
import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.JSONParseSpec;
import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularity;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.segment.indexing.granularity.ArbitraryGranularitySpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;
import java.util.Map;
public class DataSchemaTest
{
private final ObjectMapper jsonMapper;
public DataSchemaTest()
{
jsonMapper = new DefaultObjectMapper();
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
}
@Test
public void testDefaultExclusions() throws Exception
{
Map<String, Object> parser = jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("time", "auto", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dimB", "dimA")), null, null)
)
), new TypeReference<Map<String, Object>>() {}
);
DataSchema schema = new DataSchema(
"test",
parser,
new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"),
new DoubleSumAggregatorFactory("metric2", "col2"),
},
new ArbitraryGranularitySpec(QueryGranularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))),
jsonMapper
);
Assert.assertEquals(
ImmutableSet.of("time", "col1", "col2", "metric1", "metric2"),
schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions()
);
}
@Test
public void testExplicitInclude() throws Exception
{
Map<String, Object> parser = jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("time", "auto", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "col2")), ImmutableList.of("dimC"), null)
)
), new TypeReference<Map<String, Object>>() {}
);
DataSchema schema = new DataSchema(
"test",
parser,
new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"),
new DoubleSumAggregatorFactory("metric2", "col2"),
},
new ArbitraryGranularitySpec(QueryGranularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))),
jsonMapper
);
Assert.assertEquals(
ImmutableSet.of("dimC", "col1", "metric1", "metric2"),
schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions()
);
}
@Test(expected = IAE.class)
public void testOverlapMetricNameAndDim() throws Exception
{
Map<String, Object> parser = jsonMapper.convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("time", "auto", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("time", "dimA", "dimB", "metric1")), ImmutableList.of("dimC"), null)
)
), new TypeReference<Map<String, Object>>() {}
);
DataSchema schema = new DataSchema(
"test",
parser,
new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1"),
new DoubleSumAggregatorFactory("metric2", "col2"),
},
new ArbitraryGranularitySpec(QueryGranularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))),
jsonMapper
);
schema.getParser();
}
@Test
public void testSerdeWithInvalidParserMap() throws Exception
{
String jsonStr = "{"
+ "\"dataSource\":\"test\","
+ "\"parser\":{\"type\":\"invalid\"},"
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
+ "\"granularitySpec\":{"
+ "\"type\":\"arbitrary\","
+ "\"queryGranularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1970-01-01T00:00:00.000Z\"},"
+ "\"intervals\":[\"2014-01-01T00:00:00.000Z/2015-01-01T00:00:00.000Z\"]}}";
//no error on serde as parser is converted to InputRowParser lazily when really needed
DataSchema schema = jsonMapper.readValue(
jsonMapper.writeValueAsString(
jsonMapper.readValue(jsonStr, DataSchema.class)
),
DataSchema.class
);
try {
schema.getParser();
Assert.fail("should've failed to get parser.");
}
catch (IllegalArgumentException ex) {
}
}
@Test
public void testSerde() throws Exception
{
String jsonStr = "{"
+ "\"dataSource\":\"test\","
+ "\"parser\":{"
+ "\"type\":\"string\","
+ "\"parseSpec\":{"
+ "\"format\":\"json\","
+ "\"timestampSpec\":{\"column\":\"xXx\", \"format\": \"auto\", \"missingValue\": null},"
+ "\"dimensionsSpec\":{\"dimensions\":[], \"dimensionExclusions\":[]},"
+ "\"flattenSpec\":{\"useFieldDiscovery\":true, \"fields\":[]},"
+ "\"featureSpec\":{}},"
+ "\"encoding\":\"UTF-8\""
+ "},"
+ "\"metricsSpec\":[{\"type\":\"doubleSum\",\"name\":\"metric1\",\"fieldName\":\"col1\"}],"
+ "\"granularitySpec\":{"
+ "\"type\":\"arbitrary\","
+ "\"queryGranularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1970-01-01T00:00:00.000Z\"},"
+ "\"intervals\":[\"2014-01-01T00:00:00.000Z/2015-01-01T00:00:00.000Z\"]}}";
DataSchema actual = jsonMapper.readValue(
jsonMapper.writeValueAsString(
jsonMapper.readValue(jsonStr, DataSchema.class)
),
DataSchema.class
);
Assert.assertEquals(
new DataSchema(
"test",
jsonMapper.<Map<String, Object>>convertValue(
new StringInputRowParser(
new JSONParseSpec(
new TimestampSpec("xXx", null, null),
new DimensionsSpec(null, null, null)
)
), new TypeReference<Map<String, Object>>() {}
),
new AggregatorFactory[]{
new DoubleSumAggregatorFactory("metric1", "col1")
},
new ArbitraryGranularitySpec(QueryGranularity.DAY, ImmutableList.of(Interval.parse("2014/2015"))),
jsonMapper
),
actual
);
}
}
| apache-2.0 |
madhawa-gunasekara/product-ei | integration/mediation-tests/tests-mediator-1/src/test/java/org/wso2/carbon/esb/message/store/test/MessageStoreMessageConcurrencyTestCase.java | 6317 | /*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.esb.message.store.test;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.util.AXIOMUtil;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.esb.integration.common.clients.mediation.MessageStoreAdminClient;
import org.wso2.esb.integration.common.utils.Utils;
import org.wso2.esb.integration.common.utils.clients.stockquoteclient.StockQuoteClient;
import org.wso2.esb.integration.common.utils.ESBIntegrationTest;
import org.wso2.carbon.message.store.stub.MessageInfo;
import java.util.ArrayList;
/**
* This class checks in memory message store again concurrent large no of messages
*/
public class MessageStoreMessageConcurrencyTestCase extends ESBIntegrationTest {
private MessageStoreAdminClient messageStoreAdminClient;
private final String MESSAGE_STORE_NAME = "automationMessageStore";
private boolean isMessageStoreCreated = false;
private String[] messageStores = null;
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception {
init();
messageStoreAdminClient =
new MessageStoreAdminClient(contextUrls.getBackEndUrl(),
getSessionCookie());
initialize();
}
@Test(groups = {"wso2.esb"}, description = "Test whether all messages are stored from different sources")
public void messageStoreQuantityTest() throws Exception {
// The count should be 0 as soon as the message store is created
Assert.assertTrue(messageStoreAdminClient.getMessageCount(MESSAGE_STORE_NAME) == 0,
"Message store should be initially empty");
// refer within a sequence through a store mediator, mediate messages
// and verify the messages are stored correctly in the store.
loadESBConfigurationFromClasspath("/artifacts/ESB/synapseconfig/messageStore/sample_700.xml");
ArrayList<Thread> threads = new ArrayList<Thread>();
for (int i = 0; i < 10; i++) {
threads.add(new Sender());
}
//each thread send 5 message simultaneously
for (int i = 0; i < 10; i++) {
threads.get(i).start();
}
Assert.assertTrue(Utils.waitForMessageCount(messageStoreAdminClient, MESSAGE_STORE_NAME, 40, 30000),
"Messsages are missing or repeated");
MessageInfo info[] = messageStoreAdminClient.getPaginatedMessages(MESSAGE_STORE_NAME, 0);
String sendEnvelope =
"<?xml version='1.0' encoding='utf-8'?><soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\"><soapenv:Body><ns:getSimpleQuote xmlns:ns=\"http://services.samples\"><ns:symbol>WSO2</ns:symbol></ns:getSimpleQuote></soapenv:Body></soapenv:Envelope>";
OMElement sendElement = AXIOMUtil.stringToOM(sendEnvelope);
for (int i = 1; i <= 4; i++) {
for (int j = 0; j < info.length; j++) {
OMElement stored = AXIOMUtil.stringToOM(info[j].getSoapXml());
// verify whether the SOAP message is equivalent to what was
// mediated
Assert.assertEquals(sendElement.toString(), stored.toString());
}
info = messageStoreAdminClient.getPaginatedMessages(MESSAGE_STORE_NAME, i);
}
}
@AfterClass(alwaysRun = true)
public void close() throws Exception {
clear();
messageStoreAdminClient = null;
super.cleanup();
}
// creates a message store
public void initialize() throws Exception {
OMElement messageStore =
AXIOMUtil.stringToOM("<messageStore xmlns=\"http://ws.apache.org/ns/synapse\" name=\"" +
MESSAGE_STORE_NAME +
"\">" +
"<parameter name=\"abc\">10</parameter>" +
"</messageStore>");
addMessageStore(messageStore);
messageStores = messageStoreAdminClient.getMessageStores();
// addEndpoint is a a asynchronous call, it will take some time to write
// to a registry
int i = 0;
boolean found = false;
for (i = 0; i < 50; i++) {
Thread.sleep(1000);
if (messageStores != null) {
for (int j = 0; j < messageStores.length; j++) {
String string = messageStores[j];
if (string.equalsIgnoreCase(MESSAGE_STORE_NAME)) {
found = true;
isMessageStoreCreated = true;
break;
}
}
}
if (found) {
break;
}
messageStores = messageStoreAdminClient.getMessageStores();
}
if (i == 50) {
Assert.fail("message store creation failed");
}
}
// delete the message store
public void clear() throws Exception {
if (isMessageStoreCreated) {
esbUtils.deleteMessageStore(contextUrls.getBackEndUrl(), getSessionCookie(), MESSAGE_STORE_NAME);
}
}
class Sender extends Thread {
private StockQuoteClient client = new StockQuoteClient();
@Override
public void run() {
for (int i = 0; i < 4; i++) {
try {
client.sendSimpleQuoteRequest(getMainSequenceURL(), null, "WSO2");
} catch (Exception e) {
}
}
}
}
}
| apache-2.0 |
arvindsv/gocd | config/config-server/src/main/java/com/thoughtworks/go/config/GoConfigMigrationResult.java | 1608 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
public class GoConfigMigrationResult {
enum Validity {FAILED_UPGRADE, SUCCESS, UNEXPECTED_FAILURE }
private Validity validity = Validity.UNEXPECTED_FAILURE;
private final String message;
public GoConfigMigrationResult(Validity validity, String message) {
this.validity = validity;
this.message = message;
}
public static GoConfigMigrationResult failedToUpgrade(String message) {
return new GoConfigMigrationResult(Validity.FAILED_UPGRADE, message);
}
public static GoConfigMigrationResult success() {
return new GoConfigMigrationResult(Validity.SUCCESS, null);
}
public static GoConfigMigrationResult unexpectedFailure(String message) {
return new GoConfigMigrationResult(Validity.UNEXPECTED_FAILURE, message);
}
public boolean isUpgradeFailure() {
return Validity.FAILED_UPGRADE.equals(validity);
}
public String message() {
return message;
}
}
| apache-2.0 |
wwjiang007/alluxio | tests/src/test/java/alluxio/client/fs/concurrent/ConcurrentDeleteIntegrationTest.java | 9181 | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.fs.concurrent;
import alluxio.AlluxioURI;
import alluxio.AuthenticatedUserRule;
import alluxio.Constants;
import alluxio.UnderFileSystemFactoryRegistryRule;
import alluxio.client.file.FileSystem;
import alluxio.client.file.URIStatus;
import alluxio.conf.PropertyKey;
import alluxio.conf.ServerConfiguration;
import alluxio.grpc.CreateDirectoryPOptions;
import alluxio.grpc.CreateFilePOptions;
import alluxio.grpc.WritePType;
import alluxio.master.file.FileSystemMaster;
import alluxio.testutils.BaseIntegrationTest;
import alluxio.testutils.LocalAlluxioClusterResource;
import alluxio.testutils.underfs.sleeping.SleepingUnderFileSystemFactory;
import alluxio.testutils.underfs.sleeping.SleepingUnderFileSystemOptions;
import com.google.common.io.Files;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import java.util.List;
/**
* Tests to validate the concurrency in {@link FileSystemMaster}. These tests all use a local
* path as the under storage system.
*
* The tests validate the correctness of concurrent operations, ie. no corrupted/partial state is
* exposed, through a series of concurrent operations followed by verification of the final
* state, or inspection of the in-progress state as the operations are carried out.
*
* The tests also validate that operations are concurrent by injecting a short sleep in the
* critical code path. Tests will timeout if the critical section is performed serially.
*/
public class ConcurrentDeleteIntegrationTest extends BaseIntegrationTest {
private static final String TEST_USER = "test";
private static final int CONCURRENCY_FACTOR = 50;
/** Duration to sleep during the rename call to show the benefits of concurrency. */
private static final long SLEEP_MS = Constants.SECOND_MS;
/** Timeout for the concurrent test after which we will mark the test as failed. */
private static final long LIMIT_MS = SLEEP_MS * CONCURRENCY_FACTOR / 2;
/**
* Options to mark a created file as persisted. Note that this does not actually persist the
* file but flag the file to be treated as persisted, which will invoke ufs operations.
*/
private static CreateFilePOptions sCreatePersistedFileOptions =
CreateFilePOptions.newBuilder().setWriteType(WritePType.THROUGH).build();
private static CreateDirectoryPOptions sCreatePersistedDirOptions =
CreateDirectoryPOptions.newBuilder().setWriteType(WritePType.THROUGH).build();
private FileSystem mFileSystem;
private String mLocalUfsPath = Files.createTempDir().getAbsolutePath();
@Rule
public AuthenticatedUserRule mAuthenticatedUser = new AuthenticatedUserRule(TEST_USER,
ServerConfiguration.global());
@Rule
public LocalAlluxioClusterResource mLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder().setProperty(PropertyKey.MASTER_MOUNT_TABLE_ROOT_UFS,
"sleep://" + mLocalUfsPath).setProperty(PropertyKey
.USER_FILE_MASTER_CLIENT_POOL_SIZE_MAX, CONCURRENCY_FACTOR).build();
@ClassRule
public static UnderFileSystemFactoryRegistryRule sUnderfilesystemfactoryregistry =
new UnderFileSystemFactoryRegistryRule(new SleepingUnderFileSystemFactory(
new SleepingUnderFileSystemOptions().setMkdirsMs(SLEEP_MS).setIsDirectoryMs(SLEEP_MS)));
@Before
public void before() {
mFileSystem = FileSystem.Factory.create(ServerConfiguration.global());
}
/**
* Tests concurrent deletes within the root do not block on each other.
*/
@Test
public void rootConcurrentDelete() throws Exception {
final int numThreads = CONCURRENCY_FACTOR;
AlluxioURI[] paths = new AlluxioURI[numThreads];
for (int i = 0; i < numThreads; i++) {
paths[i] = new AlluxioURI("/file" + i);
mFileSystem.createFile(paths[i], sCreatePersistedFileOptions).close();
}
List<Throwable> errors = ConcurrentFileSystemMasterUtils
.unaryOperation(mFileSystem, ConcurrentFileSystemMasterUtils.UnaryOperation.DELETE, paths,
LIMIT_MS);
if (!errors.isEmpty()) {
Assert.fail("Encountered " + errors.size() + " errors, the first one is " + errors.get(0));
}
List<URIStatus> files = mFileSystem.listStatus(new AlluxioURI("/"));
Assert.assertEquals(0, files.size());
}
/**
* Tests concurrent deletes within a folder do not block on each other.
*/
@Test
public void folderConcurrentDelete() throws Exception {
final int numThreads = CONCURRENCY_FACTOR;
AlluxioURI[] paths = new AlluxioURI[numThreads];
AlluxioURI dir = new AlluxioURI("/dir");
mFileSystem.createDirectory(dir);
for (int i = 0; i < numThreads; i++) {
paths[i] = dir.join("/file" + i);
mFileSystem.createFile(paths[i], sCreatePersistedFileOptions).close();
}
List<Throwable> errors = ConcurrentFileSystemMasterUtils
.unaryOperation(mFileSystem, ConcurrentFileSystemMasterUtils.UnaryOperation.DELETE, paths,
LIMIT_MS);
if (!errors.isEmpty()) {
Assert.fail("Encountered " + errors.size() + " errors, the first one is " + errors.get(0));
}
List<URIStatus> files = mFileSystem.listStatus(dir);
Assert.assertEquals(0, files.size());
}
/**
* Tests concurrent deletes with shared prefix do not block on each other.
*/
@Test
public void prefixConcurrentDelete() throws Exception {
final int numThreads = CONCURRENCY_FACTOR;
AlluxioURI[] paths = new AlluxioURI[numThreads];
AlluxioURI dir1 = new AlluxioURI("/dir1");
mFileSystem.createDirectory(dir1);
AlluxioURI dir2 = new AlluxioURI("/dir1/dir2");
mFileSystem.createDirectory(dir2);
AlluxioURI dir3 = new AlluxioURI("/dir1/dir2/dir3");
mFileSystem.createDirectory(dir3);
for (int i = 0; i < numThreads; i++) {
if (i % 3 == 0) {
paths[i] = dir1.join("/file" + i);
} else if (i % 3 == 1) {
paths[i] = dir2.join("/file" + i);
} else {
paths[i] = dir3.join("/file" + i);
}
mFileSystem.createFile(paths[i], sCreatePersistedFileOptions).close();
}
List<Throwable> errors = ConcurrentFileSystemMasterUtils
.unaryOperation(mFileSystem, ConcurrentFileSystemMasterUtils.UnaryOperation.DELETE, paths,
LIMIT_MS);
if (!errors.isEmpty()) {
Assert.fail("Encountered " + errors.size() + " errors, the first one is " + errors.get(0));
}
List<URIStatus> files = mFileSystem.listStatus(dir1);
// Should only contain a single directory
Assert.assertEquals(1, files.size());
Assert.assertEquals("dir2", files.get(0).getName());
files = mFileSystem.listStatus(dir2);
// Should only contain a single directory
Assert.assertEquals(1, files.size());
Assert.assertEquals("dir3", files.get(0).getName());
files = mFileSystem.listStatus(dir3);
Assert.assertEquals(0, files.size());
}
/**
* Tests that many threads concurrently deleting the same file will only succeed once.
*/
@Test
public void sameFileConcurrentDelete() throws Exception {
int numThreads = CONCURRENCY_FACTOR;
final AlluxioURI[] paths = new AlluxioURI[numThreads];
for (int i = 0; i < numThreads; i++) {
paths[i] = new AlluxioURI("/file");
}
// Create the single file
mFileSystem.createFile(paths[0], sCreatePersistedFileOptions).close();
List<Throwable> errors = ConcurrentFileSystemMasterUtils
.unaryOperation(mFileSystem, ConcurrentFileSystemMasterUtils.UnaryOperation.DELETE, paths,
LIMIT_MS);
// We should get an error for all but 1 delete
Assert.assertEquals(numThreads - 1, errors.size());
List<URIStatus> files = mFileSystem.listStatus(new AlluxioURI("/"));
Assert.assertEquals(0, files.size());
}
/**
* Tests that many threads concurrently deleting the same directory will only succeed once.
*/
@Test
public void sameDirConcurrentDelete() throws Exception {
int numThreads = CONCURRENCY_FACTOR;
final AlluxioURI[] paths = new AlluxioURI[numThreads];
for (int i = 0; i < numThreads; i++) {
paths[i] = new AlluxioURI("/dir");
}
// Create the single directory
mFileSystem.createDirectory(paths[0], sCreatePersistedDirOptions);
List<Throwable> errors = ConcurrentFileSystemMasterUtils
.unaryOperation(mFileSystem, ConcurrentFileSystemMasterUtils.UnaryOperation.DELETE, paths,
LIMIT_MS);
// We should get an error for all but 1 delete
Assert.assertEquals(numThreads - 1, errors.size());
List<URIStatus> dirs = mFileSystem.listStatus(new AlluxioURI("/"));
Assert.assertEquals(0, dirs.size());
}
}
| apache-2.0 |
Luca-spopo/cloudsim_project | source/cloudsim-cloudsim-4.0/modules/cloudsim/src/test/java/org/cloudbus/cloudsim/TimeSharedProblemDetector.java | 8448 | package org.cloudbus.cloudsim;
/*
* Title: CloudSim Toolkit
* Description: CloudSim (Cloud Simulation) Toolkit for Modeling and Simulation
* of Clouds
* Licence: GPL - http://www.gnu.org/copyleft/gpl.html
*
* Copyright (c) 2009, The University of Melbourne, Australia
*/
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.LinkedList;
import java.util.List;
import org.cloudbus.cloudsim.Cloudlet;
import org.cloudbus.cloudsim.CloudletSchedulerTimeShared;
import org.cloudbus.cloudsim.Datacenter;
import org.cloudbus.cloudsim.DatacenterBroker;
import org.cloudbus.cloudsim.DatacenterCharacteristics;
import org.cloudbus.cloudsim.Storage;
import org.cloudbus.cloudsim.UtilizationModel;
import org.cloudbus.cloudsim.UtilizationModelFull;
import org.cloudbus.cloudsim.core.CloudSim;
import org.cloudbus.cloudsim.provisioners.BwProvisionerSimple;
import org.cloudbus.cloudsim.provisioners.PeProvisionerSimple;
import org.cloudbus.cloudsim.provisioners.RamProvisionerSimple;
/**
* A simple example showing how to create a datacenter with one host and run one
* cloudlet on it.
*/
public class TimeSharedProblemDetector {
/** The cloudlet list. */
private static List<Cloudlet> cloudletList;
/** The vmlist. */
private static List<Vm> vmlist;
/**
* Creates main() to run this example.
*
* @param args the args
*/
public static void main(String[] args) {
Log.printLine("Starting CloudSimExample1...");
try {
// First step: Initialize the CloudSim package. It should be called
// before creating any entities.
int num_user = 1; // number of cloud users
Calendar calendar = Calendar.getInstance();
boolean trace_flag = false; // mean trace events
// Initialize the CloudSim library
CloudSim.init(num_user, calendar, trace_flag);
// Second step: Create Datacenters
// Datacenters are the resource providers in CloudSim. We need at
// list one of them to run a CloudSim simulation
@SuppressWarnings("unused")
Datacenter datacenter0 = createDatacenter("Datacenter_0");
// Third step: Create Broker
DatacenterBroker broker = createBroker();
int brokerId = broker.getId();
// Fourth step: Create one virtual machine
vmlist = new ArrayList<Vm>();
// VM description
int vmid = 0;
int mips = 1000;
long size = 10000; // image size (MB)
int ram = 512; // vm memory (MB)
long bw = 1000;
int pesNumber = 1; // number of cpus
String vmm = "Xen"; // VMM name
// create VM
Vm vm = new Vm(vmid, brokerId, mips, pesNumber, ram, bw, size, vmm, new CloudletSchedulerTimeShared());
Vm vm1 = new Vm(1, brokerId, mips, pesNumber, ram, bw, size, vmm, new CloudletSchedulerTimeShared());
Vm vm2 = new Vm(2, brokerId, mips, pesNumber, ram, bw, size, vmm, new CloudletSchedulerTimeShared());
// add the VM to the vmList
vmlist.add(vm);
vmlist.add(vm1);
vmlist.add(vm2);
// submit vm list to the broker
broker.submitVmList(vmlist);
// Fifth step: Create one Cloudlet
cloudletList = new ArrayList<Cloudlet>();
// Cloudlet properties
int id = 0;
long length = 400000;
long fileSize = 300;
long outputSize = 300;
UtilizationModel utilizationModel = new UtilizationModelFull();
Cloudlet cloudlet = new Cloudlet(id, length, pesNumber, fileSize, outputSize, utilizationModel, utilizationModel, utilizationModel);
cloudlet.setUserId(brokerId);
cloudlet.setVmId(vmid);
Cloudlet cloudlet1 = new Cloudlet(1, length, pesNumber, fileSize, outputSize, utilizationModel, utilizationModel, utilizationModel);
cloudlet1.setUserId(brokerId);
cloudlet1.setVmId(1);
Cloudlet cloudlet2 = new Cloudlet(2, length, pesNumber, fileSize, outputSize, utilizationModel, utilizationModel, utilizationModel);
cloudlet2.setUserId(brokerId);
cloudlet2.setVmId(2);
// add the cloudlet to the list
cloudletList.add(cloudlet);
cloudletList.add(cloudlet1);
cloudletList.add(cloudlet2);
// submit cloudlet list to the broker
broker.submitCloudletList(cloudletList);
// Sixth step: Starts the simulation
CloudSim.startSimulation();
CloudSim.stopSimulation();
//Final step: Print results when simulation is over
List<Cloudlet> newList = broker.getCloudletReceivedList();
printCloudletList(newList);
Log.printLine("CloudSimExample1 finished!");
} catch (Exception e) {
e.printStackTrace();
Log.printLine("The simulation has been terminated due to an unexpected error");
}
}
/**
* Creates the datacenter.
*
* @param name the name
*
* @return the datacenter
*/
private static Datacenter createDatacenter(String name) {
// Here are the steps needed to create a PowerDatacenter:
// 1. We need to create a list to store
// our machine
List<Host> hostList = new ArrayList<Host>();
// 2. A Machine contains one or more PEs or CPUs/Cores.
// In this example, it will have only one core.
List<Pe> peList = new ArrayList<Pe>();
int mips = 1000;
// 3. Create PEs and add these into a list.
peList.add(new Pe(0, new PeProvisionerSimple(mips))); // need to store Pe id and MIPS Rating
// 4. Create Host with its id and list of PEs and add them to the list
// of machines
int hostId = 0;
int ram = 2048; // host memory (MB)
long storage = Consts.MILLION; // host storage
int bw = 10000;
hostList.add(
new Host(
hostId,
new RamProvisionerSimple(ram),
new BwProvisionerSimple(bw),
storage,
peList,
new VmSchedulerTimeSharedOverSubscription(peList)
)
); // This is our machine
// 5. Create a DatacenterCharacteristics object that stores the
// properties of a data center: architecture, OS, list of
// Machines, allocation policy: time- or space-shared, time zone
// and its price (G$/Pe time unit).
String arch = "x86"; // system architecture
String os = "Linux"; // operating system
String vmm = "Xen";
double time_zone = 10.0; // time zone this resource located
double cost = 3.0; // the cost of using processing in this resource
double costPerMem = 0.05; // the cost of using memory in this resource
double costPerStorage = 0.001; // the cost of using storage in this
// resource
double costPerBw = 0.0; // the cost of using bw in this resource
LinkedList<Storage> storageList = new LinkedList<Storage>(); // we are not adding SAN
// devices by now
DatacenterCharacteristics characteristics = new DatacenterCharacteristics(
arch, os, vmm, hostList, time_zone, cost, costPerMem,
costPerStorage, costPerBw);
// 6. Finally, we need to create a PowerDatacenter object.
Datacenter datacenter = null;
try {
datacenter = new Datacenter(name, characteristics, new VmAllocationPolicySimple(hostList), storageList, 0);
} catch (Exception e) {
e.printStackTrace();
}
return datacenter;
}
// We strongly encourage users to develop their own broker policies, to
// submit vms and cloudlets according
// to the specific rules of the simulated scenario
/**
* Creates the broker.
*
* @return the datacenter broker
*/
private static DatacenterBroker createBroker() {
DatacenterBroker broker = null;
try {
broker = new DatacenterBroker("Broker");
} catch (Exception e) {
e.printStackTrace();
return null;
}
return broker;
}
/**
* Prints the Cloudlet objects.
*
* @param list list of Cloudlets
*/
private static void printCloudletList(List<Cloudlet> list) {
int size = list.size();
Cloudlet cloudlet;
String indent = " ";
Log.printLine();
Log.printLine("========== OUTPUT ==========");
Log.printLine("Cloudlet ID" + indent + "STATUS" + indent
+ "Data center ID" + indent + "VM ID" + indent + "Time" + indent
+ "Start Time" + indent + "Finish Time");
DecimalFormat dft = new DecimalFormat("###.##");
for (int i = 0; i < size; i++) {
cloudlet = list.get(i);
Log.print(indent + cloudlet.getCloudletId() + indent + indent);
if (cloudlet.getCloudletStatus() == Cloudlet.SUCCESS) {
Log.print("SUCCESS");
Log.printLine(indent + indent + cloudlet.getResourceId()
+ indent + indent + indent + cloudlet.getVmId()
+ indent + indent
+ dft.format(cloudlet.getActualCPUTime()) + indent
+ indent + dft.format(cloudlet.getExecStartTime())
+ indent + indent
+ dft.format(cloudlet.getFinishTime()));
}
}
}
}
| apache-2.0 |
apache/santuario-java | src/test/java/org/apache/xml/security/test/stax/c14n/Canonicalizer11Test.java | 24214 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.xml.security.test.stax.c14n;
import org.junit.jupiter.api.Test;
import org.apache.xml.security.stax.ext.stax.XMLSecEvent;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer11_OmitCommentsTransformer;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer11_WithCommentsTransformer;
import org.apache.xml.security.stax.impl.transformer.canonicalizer.CanonicalizerBase;
import org.apache.xml.security.test.stax.utils.XMLSecEventAllocator;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLResolver;
import javax.xml.stream.XMLStreamException;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
*/
public class Canonicalizer11Test {
private XMLInputFactory xmlInputFactory;
public Canonicalizer11Test() throws Exception {
this.xmlInputFactory = XMLInputFactory.newInstance();
this.xmlInputFactory.setEventAllocator(new XMLSecEventAllocator());
XMLResolver xmlResolver = new XMLResolver() {
@Override
public Object resolveEntity(String publicID, String systemID, String baseURI, String namespace) throws XMLStreamException {
return this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/in/" + systemID);
}
};
this.xmlInputFactory.setXMLResolver(xmlResolver);
}
/**
* 3.1 PIs, Comments, and Outside of Document Element
*/
@Test
public void test31withCommentsSubtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/31_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/31_c14n-comments.xml");
c14nAndCompare(fileIn, fileRef, false);
}
/**
* 3.1 PIs, Comments, and Outside of Document Element
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-OutsideDoc">the example from the spec</A>
*/
@Test
public void test31subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/31_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/31_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.2 Whitespace in Document Content
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-WhitespaceInContent">the example from the spec</A>
*/
@Test
public void test32subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/32_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/32_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.3 Start and End Tags
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-SETags">the example from the spec</A>
*/
@Test
public void test33subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/33_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/33_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.4 Character Modifications and Character References
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-Chars">the example from the spec</A>
*/
@Test
public void test34() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/34_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/34_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.4 Character Modifications and Character References (patched to run on validating Parsers)
* <p></p>
* <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119"> The spec</A> states that:
* <p></p>
* Note: The last element, normId, is well-formed but violates a validity
* constraint for attributes of type ID. For testing canonical XML
* implementations based on validating processors, remove the line
* containing this element from the input and canonical form. In general,
* XML consumers should be discouraged from using this feature of XML.
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-Chars">the example from the spec</A>
*/
@Test
public void test34subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/34_input_validatingParser.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/34_c14n_validatingParser.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.5 Entity References
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-Entities">the example from the spec</A>
*/
@Test
public void test35subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/35_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/35_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.6 UTF-8 Encoding
*
* @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-UTF8">the example from the spec</A>
*/
@Test
public void test36subtree() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/36_input.xml");
URL fileRef =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/36_c14n.xml");
c14nAndCompare(fileIn, fileRef, true);
}
/**
* 3.8 Document Subsets and XML Attributes (modified)
*
* @see <A HREF="http://www.w3.org/TR/2007/CR-xml-c14n11-20070621/#Example-DocSubsetsXMLAttrs">the example from the spec</A>
*/
@Test
@org.junit.jupiter.api.Disabled
public void test38() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Canonicalizer11_OmitCommentsTransformer c = new Canonicalizer11_OmitCommentsTransformer();
c.setOutputStream(baos);
XMLEventReader xmlSecEventReader = xmlInputFactory.createXMLEventReader(
this.getClass().getClassLoader().getResourceAsStream(
"org/apache/xml/security/c14n/in/38_input.xml")
);
XMLSecEvent xmlSecEvent = null;
while (xmlSecEventReader.hasNext()) {
xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent();
if (xmlSecEvent.isStartElement() && xmlSecEvent.asStartElement().getName().equals(new QName("http://www.ietf.org", "e1"))) {
break;
}
}
while (xmlSecEventReader.hasNext()) {
c.transform(xmlSecEvent);
if (xmlSecEvent.isEndElement() && xmlSecEvent.asEndElement().getName().equals(new QName("http://www.ietf.org", "e1"))) {
break;
}
xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent();
}
byte[] reference =
getBytesFromResource(this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/38_c14n.xml"));
boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray());
if (!equals) {
System.out.println("Expected:\n" + new String(reference, StandardCharsets.UTF_8));
System.out.println("");
System.out.println("Got:\n" + new String(baos.toByteArray(), StandardCharsets.UTF_8));
}
assertTrue(equals);
}
// /**
// * 3.7 Document Subsets
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @see <A HREF="http://www.w3.org/TR/2001/PR-xml-c14n-20010119#Example-DocSubsets">the example from the spec</A>
// * @throws javax.xml.transform.TransformerException
// */
// public static void test37() throws Exception {
//
// String descri = "3.7 Document Subsets. (uncommented)";
// String fileIn = prefix + "in/37_input.xml";
// String fileRef = prefix + "in/37_c14n.xml";
// String fileOut = prefix + "out/xpath_37_output.xml";
// String c14nURI = Canonicalizer.ALGO_ID_C14N_OMIT_COMMENTS;
// boolean validating = true;
// Element xpath = null;
// DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
//
// dfactory.setNamespaceAware(true);
//
// DocumentBuilder db = dfactory.newDocumentBuilder();
// Document doc = db.newDocument();
//
// xpath = XMLUtils.createElementInSignatureSpace(doc, Constants._TAG_XPATH);
//
// xpath.setAttributeNS(Constants.NamespaceSpecNS, "xmlns:ietf", "http://www.ietf.org");
//
// //J-
// String xpathFromSpec =
// "(//. | //@* | //namespace::*)"
// + "[ "
// + "self::ietf:e1 or "
// + "(parent::ietf:e1 and not(self::text() or self::e2)) or "
// + "count(id(\"E3\")|ancestor-or-self::node()) = count(ancestor-or-self::node()) "
// + "]";
//
// //J+
// xpath.appendChild(doc.createTextNode(xpathFromSpec));
// assertTrue(descri,
// c14nAndCompare(fileIn, fileRef, fileOut, c14nURI, validating,
// xpath));
// }
//
/**
* Note: This specification supports the recent XML plenary decision to
* deprecate relative namespace URIs as follows: implementations of XML
* canonicalization MUST report an operation failure on documents containing
* relative namespace URIs. XML canonicalization MUST NOT be implemented
* with an XML parser that converts relative URIs to absolute URIs.
* <p></p>
* Implementations MUST report an operation failure on documents containing
* relative namespace URIs.
*/
@Test
public void testRelativeNSbehaviour() throws Exception {
URL fileIn =
this.getClass().getClassLoader().getResource(
"org/apache/xml/security/c14n/in/relative-ns-behaviour.xml");
try {
c14nAndCompare(fileIn, fileIn, true);
fail();
} catch (XMLStreamException cex) {
assertNotNull(cex);
}
}
//
// /**
// * Method testXMLAttributes1
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void testXMLAttributes1() throws Exception {
// //J-
// String input = ""
// + "<included xml:lang='de'>"
// + "<notIncluded xml:lang='de'>"
// + "<notIncluded xml:lang='uk'>"
// + "<included >"
// + "</included>"
// + "</notIncluded>"
// + "</notIncluded>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"de\">"
// + "<included xml:lang=\"uk\">"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method testXMLAttributes2
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void testXMLAttributes2() throws Exception {
// //J-
// String input = ""
// + "<included xml:lang='uk'>"
// + "<notIncluded xml:lang='de'>"
// + "<notIncluded xml:lang='uk'>"
// + "<included >"
// + "</included>"
// + "</notIncluded>"
// + "</notIncluded>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"uk\">"
// + "<included xml:lang=\"uk\">"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method testXMLAttributes3
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void testXMLAttributes3() throws Exception {
// //J-
// String input = ""
// + "<included xml:lang='de'>"
// + "<notIncluded xml:lang='de'>"
// + "<notIncluded xml:lang='uk'>"
// + "<included xml:lang='de'>"
// + "</included>"
// + "</notIncluded>"
// + "</notIncluded>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"de\">"
// + "<included xml:lang=\"de\">"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method testXMLAttributes4
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void _testXMLAttributes4() throws Exception {
// //J-
// String input = ""
// + "<included xml:lang='de'>"
// + "<included xml:lang='de'>"
// + "<notIncluded xml:lang='uk'>"
// + "<included >"
// + "</included>"
// + "</notIncluded>"
// + "</included>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"de\">"
// + "<included>"
// + "<included xml:lang=\"uk\">"
// + "</included>"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method testXMLAttributes5
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void _testXMLAttributes5() throws Exception {
// //J-
// String input = ""
// + "<included xml:lang='de'>"
// + "<included xml:lang='de'>"
// + "<notIncluded xml:space='preserve' xml:lang='uk'>"
// + "<included >"
// + "</included>"
// + "</notIncluded>"
// + "</included>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"de\">"
// + "<included>"
// + "<included xml:lang=\"uk\" xml:space=\"preserve\">"
// + "</included>"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method testXMLAttributes6
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// public static void _testXMLAttributes6() throws Exception {
// //J-
// String input = ""
// + "<included xml:space='preserve' xml:lang='de'>"
// + "<included xml:lang='de'>"
// + "<notIncluded xml:lang='uk'>"
// + "<included>"
// + "</included>"
// + "</notIncluded>"
// + "</included>"
// + "</included>";
//
// String definedOutput = ""
// + "<included xml:lang=\"de\" xml:space=\"preserve\">"
// + "<included>"
// + "<included xml:lang=\"uk\" xml:space=\"preserve\">"
// + "</included>"
// + "</included>"
// + "</included>";
// //J+
// assertTrue(doTestXMLAttributes(input, definedOutput));
// }
//
// /**
// * Method doTestXMLAttributes
// *
// * @param input
// * @param definedOutput
// * @param writeResultsToFile
// *
// * @throws CanonicalizationException
// * @throws java.io.FileNotFoundException
// * @throws java.io.IOException
// * @throws InvalidCanonicalizerException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerException
// */
// private static boolean doTestXMLAttributes(
// String input, String definedOutput) throws Exception {
//
// DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
//
// dfactory.setNamespaceAware(true);
// dfactory.setValidating(true);
//
// DocumentBuilder db = dfactory.newDocumentBuilder();
//
// db.setErrorHandler(new org.apache.xml.security.utils
// .IgnoreAllErrorHandler());
//
// Document doc = XMLUtils.read(new ByteArrayInputStream(input.getBytes()));
// Canonicalizer c14nizer =
// Canonicalizer.getInstance(Canonicalizer.ALGO_ID_C14N_OMIT_COMMENTS);
// CachedXPathAPI xpathAPI = new CachedXPathAPI();
//
// //XMLUtils.circumventBug2650(doc);
//
// NodeList nodes =
// xpathAPI.selectNodeList(doc, "(//*[local-name()='included'] | //@*[parent::node()[local-name()='included']])");
// byte[] result = c14nizer.canonicalizeXPathNodeSet(nodes);
// byte[] defined = definedOutput.getBytes();
// assertEquals(definedOutput, new String(result));
// return java.security.MessageDigest.isEqual(defined, result);
// }
/**
* Method c14nAndCompare
*/
private void c14nAndCompare(
URL fileIn, URL fileRef, boolean omitComments) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CanonicalizerBase canonicalizerBase;
if (omitComments) {
canonicalizerBase = new Canonicalizer11_OmitCommentsTransformer();
canonicalizerBase.setOutputStream(baos);
} else {
canonicalizerBase = new Canonicalizer11_WithCommentsTransformer();
canonicalizerBase.setOutputStream(baos);
}
XMLEventReader xmlSecEventReader = xmlInputFactory.createXMLEventReader(fileIn.openStream());
while (xmlSecEventReader.hasNext()) {
XMLSecEvent xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent();
canonicalizerBase.transform(xmlSecEvent);
}
// org.xml.sax.InputSource refIs = resolver.resolveEntity(null, fileRef);
// byte[] refBytes = JavaUtils.getBytesFromStream(refIs.getByteStream());
byte[] refBytes = getBytesFromResource(fileRef);
// if everything is OK, result is true; we do a binary compare, byte by byte
boolean result = java.security.MessageDigest.isEqual(refBytes, baos.toByteArray());
if (!result) {
assertEquals(new String(baos.toByteArray(), StandardCharsets.UTF_8), new String(refBytes, StandardCharsets.UTF_8));
}
assertTrue(result);
}
public static byte[] getBytesFromResource(URL resource) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
InputStream inputStream = resource.openStream();
try {
byte[] buf = new byte[1024];
int len;
while ((len = inputStream.read(buf)) > 0) {
baos.write(buf, 0, len);
}
return baos.toByteArray();
} finally {
inputStream.close();
}
}
// /**
// * This method takes the input bytes as XML Document and converts it to an
// * UTF-16 encoded XML document which is serialized to byte[] and returned.
// *
// * @param input
// *
// * @throws java.io.IOException
// * @throws javax.xml.parsers.ParserConfigurationException
// * @throws org.xml.sax.SAXException
// * @throws javax.xml.transform.TransformerConfigurationException
// * @throws javax.xml.transform.TransformerException
// */
// public static byte[] convertToUTF16(byte[] input) throws Exception {
//
// //String ENCODING_ISO8859_1 = "ISO-8859-1";
// //String ENCODING_UTF8 = java.nio.charset.StandardCharsets.UTF_8;
// String ENCODING_UTF16 = "UTF-16";
// DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
// DocumentBuilder db = dbf.newDocumentBuilder();
// Document doc = XMLUtils.read(new ByteArrayInputStream(input));
// TransformerFactory tFactory = TransformerFactory.newInstance();
// Transformer transformer = tFactory.newTransformer();
//
// transformer.setOutputProperty(OutputKeys.ENCODING, ENCODING_UTF16);
// transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
//
// DOMSource source = new DOMSource(doc);
// ByteArrayOutputStream os = new ByteArrayOutputStream();
// StreamResult result = new StreamResult(os);
//
// transformer.transform(source, result);
//
// return os.toByteArray();
// }
} | apache-2.0 |
fitermay/intellij-community | plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/codeInspection/unused/defaultParameter/GrUnusedDefaultParameterInspection.java | 4752 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.codeInspection.unused.defaultParameter;
import com.intellij.codeInspection.CleanupLocalInspectionTool;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.codeInspection.ProblemsHolder;
import com.intellij.codeInspection.compiler.RemoveElementQuickFix;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.impl.FindSuperElementsHelper;
import com.intellij.psi.search.searches.MethodReferencesSearch;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.groovy.lang.psi.GroovyElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.GroovyPsiElementVisitor;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrExpression;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameter;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.params.GrParameterList;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrMethod;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.typedef.members.GrReflectedMethod;
import org.jetbrains.plugins.groovy.lang.psi.util.PsiUtil;
import static org.jetbrains.plugins.groovy.codeInspection.GroovyInspectionBundle.message;
public class GrUnusedDefaultParameterInspection extends LocalInspectionTool implements CleanupLocalInspectionTool {
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
return new GroovyPsiElementVisitor(new GroovyElementVisitor() {
@Override
public void visitExpression(@NotNull GrExpression expression) {
PsiElement expressionParent = expression.getParent();
if (!(expressionParent instanceof GrParameter)) return;
GrParameter parameter = (GrParameter)expressionParent;
if (parameter.getInitializerGroovy() != expression) return;
PsiElement parameterParent = parameter.getParent();
if (!(parameterParent instanceof GrParameterList)) return;
PsiElement parameterListParent = parameterParent.getParent();
if (!(parameterListParent instanceof GrMethod)) return;
GrMethod method = (GrMethod)parameterListParent;
if (PsiUtil.OPERATOR_METHOD_NAMES.contains(method.getName())) return;
if (isInitializerUnused(parameter, method)) {
holder.registerProblem(
expression, message("unused.default.parameter.message"), ProblemHighlightType.LIKE_UNUSED_SYMBOL,
new RemoveElementQuickFix(message("unused.default.parameter.fix"))
);
}
}
});
}
/**
* Consider following method:
* <pre>
* def foo(a = 1, b = 2, c = 3) {}
* </pre>
* Its reflected methods:
* <pre>
* def foo(a, b, c) {}
* def foo(a, b) {}
* def foo(a) {}
* def foo() {}
* </pre>
* Initializer for '<code>a</code>' is used only when <code>foo</code> called without arguments,
* we do not care if <code>foo</code> is called with one, two ot three arguments.
* <p>
* In case of <code>b</code> we search <code>foo()</code> or <code>foo(1)</code> calls.
* <p>
* The general idea: search usages of last N reflected methods where N is number of current parameter among other default parameters.
*/
private static boolean isInitializerUnused(@NotNull GrParameter parameter, @NotNull GrMethod method) {
int optionalParameterNumber = 0;
for (GrParameter someParameter : method.getParameters()) {
if (someParameter.isOptional()) optionalParameterNumber++;
if (someParameter == parameter) break;
}
GrReflectedMethod[] reflectedMethods = method.getReflectedMethods();
for (int i = reflectedMethods.length - optionalParameterNumber; i < reflectedMethods.length; i++) {
GrReflectedMethod reflectedMethod = reflectedMethods[i];
if (FindSuperElementsHelper.findSuperElements(reflectedMethod).length > 0) return false;
if (MethodReferencesSearch.search(reflectedMethod).findFirst() != null) return false;
}
return true;
}
}
| apache-2.0 |
daversilva/nfe | src/main/java/com/fincatto/nfe310/classes/nota/NFInfoCupomFiscalReferenciado.java | 1488 | package com.fincatto.nfe310.classes.nota;
import org.simpleframework.xml.Element;
import com.fincatto.nfe310.classes.NFBase;
import com.fincatto.nfe310.validadores.StringValidador;
public class NFInfoCupomFiscalReferenciado extends NFBase {
@Element(name = "mod", required = true)
private String modeloDocumentoFiscal;
@Element(name = "nECF", required = true)
private String numeroOrdemSequencialECF;
@Element(name = "nCOO", required = true)
private String numeroContadorOrdemOperacao;
public void setModeloDocumentoFiscal(final String modeloDocumentoFiscal) {
StringValidador.exatamente2(modeloDocumentoFiscal);
this.modeloDocumentoFiscal = modeloDocumentoFiscal;
}
public void setNumeroOrdemSequencialECF(final String numeroOrdemSequencialECF) {
StringValidador.exatamente3(numeroOrdemSequencialECF);
this.numeroOrdemSequencialECF = numeroOrdemSequencialECF;
}
public void setNumeroContadorOrdemOperacao(final String numeroContadorOrdemOperacao) {
StringValidador.exatamente6(numeroContadorOrdemOperacao);
this.numeroContadorOrdemOperacao = numeroContadorOrdemOperacao;
}
public String getModeloDocumentoFiscal() {
return this.modeloDocumentoFiscal;
}
public String getNumeroContadorOrdemOperacao() {
return this.numeroContadorOrdemOperacao;
}
public String getNumeroOrdemSequencialECF() {
return this.numeroOrdemSequencialECF;
}
} | apache-2.0 |
glammedia/phoenix | phoenix-core/src/test/java/org/apache/phoenix/hbase/index/write/TestWALRecoveryCaching.java | 15081 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.hbase.index.write;
import static org.apache.phoenix.query.BaseTest.setUpConfigForMiniCluster;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil.RegionServerThread;
import org.apache.phoenix.hbase.index.IndexTestingUtils;
import org.apache.phoenix.hbase.index.Indexer;
import org.apache.phoenix.hbase.index.TableName;
import org.apache.phoenix.hbase.index.covered.example.ColumnGroup;
import org.apache.phoenix.hbase.index.covered.example.CoveredColumn;
import org.apache.phoenix.hbase.index.covered.example.CoveredColumnIndexSpecifierBuilder;
import org.apache.phoenix.hbase.index.covered.example.CoveredColumnIndexer;
import org.apache.phoenix.hbase.index.table.HTableInterfaceReference;
import org.apache.phoenix.hbase.index.util.IndexManagementUtil;
import org.apache.phoenix.hbase.index.write.recovery.PerRegionIndexWriteCache;
import org.apache.phoenix.hbase.index.write.recovery.StoreFailuresInCachePolicy;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import com.google.common.collect.Multimap;
/**
* When a regionserver crashes, its WAL is split and then replayed to the server. If the index
* region was present on the same server, we have to make a best effort to not kill the server for
* not succeeding on index writes while the index region is coming up.
*/
public class TestWALRecoveryCaching {
private static final Log LOG = LogFactory.getLog(TestWALRecoveryCaching.class);
private static final long ONE_SEC = 1000;
private static final long ONE_MIN = 60 * ONE_SEC;
private static final long TIMEOUT = ONE_MIN;
@Rule
public TableName testTable = new TableName();
private String getIndexTableName() {
return this.testTable.getTableNameString() + "_index";
}
// -----------------------------------------------------------------------------------------------
// Warning! The classes here rely on this static. Adding multiple tests to this class and running
// them concurrently could have unexpected results (including, but not limited to, odd failures
// and flapping tests).
// -----------------------------------------------------------------------------------------------
private static CountDownLatch allowIndexTableToRecover;
public static class IndexTableBlockingReplayObserver extends BaseRegionObserver {
@Override
public void preWALRestore(ObserverContext<RegionCoprocessorEnvironment> env, HRegionInfo info,
HLogKey logKey, WALEdit logEdit) throws IOException {
try {
LOG.debug("Restoring logs for index table");
if (allowIndexTableToRecover != null) {
allowIndexTableToRecover.await();
LOG.debug("Completed index table recovery wait latch");
}
} catch (InterruptedException e) {
Assert.fail("Should not be interrupted while waiting to allow the index to restore WALs.");
}
}
}
public static class ReleaseLatchOnFailurePolicy extends StoreFailuresInCachePolicy {
/**
* @param failedIndexEdits
*/
public ReleaseLatchOnFailurePolicy(PerRegionIndexWriteCache failedIndexEdits) {
super(failedIndexEdits);
}
@Override
public void handleFailure(Multimap<HTableInterfaceReference, Mutation> attempted,
Exception cause) throws IOException {
LOG.debug("Found index update failure!");
if (allowIndexTableToRecover != null) {
LOG.info("failed index write on WAL recovery - allowing index table to be restored.");
allowIndexTableToRecover.countDown();
}
super.handleFailure(attempted, cause);
}
}
//TODO: Jesse to fix
@SuppressWarnings("deprecation")
@Ignore("Configuration issue - valid test, just needs fixing")
@Test
public void testWaitsOnIndexRegionToReload() throws Exception {
HBaseTestingUtility util = new HBaseTestingUtility();
Configuration conf = util.getConfiguration();
setUpConfigForMiniCluster(conf);
// setup other useful stats
IndexTestingUtils.setupConfig(conf);
conf.setBoolean(Indexer.CHECK_VERSION_CONF_KEY, false);
// make sure everything is setup correctly
IndexManagementUtil.ensureMutableIndexingCorrectlyConfigured(conf);
// start the cluster with 2 rs
util.startMiniCluster(2);
HBaseAdmin admin = util.getHBaseAdmin();
// setup the index
byte[] family = Bytes.toBytes("family");
byte[] qual = Bytes.toBytes("qualifier");
byte[] nonIndexedFamily = Bytes.toBytes("nonIndexedFamily");
String indexedTableName = getIndexTableName();
ColumnGroup columns = new ColumnGroup(indexedTableName);
columns.add(new CoveredColumn(family, qual));
CoveredColumnIndexSpecifierBuilder builder = new CoveredColumnIndexSpecifierBuilder();
builder.addIndexGroup(columns);
// create the primary table w/ indexing enabled
HTableDescriptor primaryTable = new HTableDescriptor(testTable.getTableName());
primaryTable.addFamily(new HColumnDescriptor(family));
primaryTable.addFamily(new HColumnDescriptor(nonIndexedFamily));
builder.addArbitraryConfigForTesting(Indexer.RecoveryFailurePolicyKeyForTesting,
ReleaseLatchOnFailurePolicy.class.getName());
builder.build(primaryTable);
admin.createTable(primaryTable);
// create the index table
HTableDescriptor indexTableDesc = new HTableDescriptor(Bytes.toBytes(getIndexTableName()));
indexTableDesc.addCoprocessor(IndexTableBlockingReplayObserver.class.getName());
CoveredColumnIndexer.createIndexTable(admin, indexTableDesc);
// figure out where our tables live
ServerName shared =
ensureTablesLiveOnSameServer(util.getMiniHBaseCluster(), Bytes.toBytes(indexedTableName),
testTable.getTableName());
// load some data into the table
Put p = new Put(Bytes.toBytes("row"));
p.add(family, qual, Bytes.toBytes("value"));
HTable primary = new HTable(conf, testTable.getTableName());
primary.put(p);
primary.flushCommits();
// turn on the recovery latch
allowIndexTableToRecover = new CountDownLatch(1);
// kill the server where the tables live - this should trigger distributed log splitting
// find the regionserver that matches the passed server
List<HRegion> online = new ArrayList<HRegion>();
online.addAll(getRegionsFromServerForTable(util.getMiniHBaseCluster(), shared,
testTable.getTableName()));
online.addAll(getRegionsFromServerForTable(util.getMiniHBaseCluster(), shared,
Bytes.toBytes(indexedTableName)));
// log all the current state of the server
LOG.info("Current Server/Region paring: ");
for (RegionServerThread t : util.getMiniHBaseCluster().getRegionServerThreads()) {
// check all the conditions for the server to be done
HRegionServer server = t.getRegionServer();
if (server.isStopping() || server.isStopped() || server.isAborted()) {
LOG.info("\t== Offline: " + server.getServerName());
continue;
}
List<HRegionInfo> regions = ProtobufUtil.getOnlineRegions(server.getRSRpcServices());
LOG.info("\t" + server.getServerName() + " regions: " + regions);
}
LOG.debug("Killing server " + shared);
util.getMiniHBaseCluster().killRegionServer(shared);
LOG.debug("Waiting on server " + shared + "to die");
util.getMiniHBaseCluster().waitForRegionServerToStop(shared, TIMEOUT);
// force reassign the regions from the table
// LOG.debug("Forcing region reassignment from the killed server: " + shared);
// for (HRegion region : online) {
// util.getMiniHBaseCluster().getMaster().assign(region.getRegionName());
// }
System.out.println(" ====== Killed shared server ==== ");
// make a second put that (1), isn't indexed, so we can be sure of the index state and (2)
// ensures that our table is back up
Put p2 = new Put(p.getRow());
p2.add(nonIndexedFamily, Bytes.toBytes("Not indexed"), Bytes.toBytes("non-indexed value"));
primary.put(p2);
primary.flushCommits();
// make sure that we actually failed the write once (within a 5 minute window)
assertTrue("Didn't find an error writing to index table within timeout!",
allowIndexTableToRecover.await(ONE_MIN * 5, TimeUnit.MILLISECONDS));
// scan the index to make sure it has the one entry, (that had to be replayed from the WAL,
// since we hard killed the server)
Scan s = new Scan();
HTable index = new HTable(conf, getIndexTableName());
ResultScanner scanner = index.getScanner(s);
int count = 0;
for (Result r : scanner) {
LOG.info("Got index table result:" + r);
count++;
}
assertEquals("Got an unexpected found of index rows", 1, count);
// cleanup
scanner.close();
index.close();
primary.close();
util.shutdownMiniCluster();
}
/**
* @param cluster
* @param server
* @param table
* @return
*/
private List<HRegion> getRegionsFromServerForTable(MiniHBaseCluster cluster, ServerName server,
byte[] table) {
List<HRegion> online = Collections.emptyList();
for (RegionServerThread rst : cluster.getRegionServerThreads()) {
// if its the server we are going to kill, get the regions we want to reassign
if (rst.getRegionServer().getServerName().equals(server)) {
online = rst.getRegionServer().getOnlineRegions(org.apache.hadoop.hbase.TableName.valueOf(table));
break;
}
}
return online;
}
/**
* @param cluster
* @param indexTable
* @param primaryTable
*/
private ServerName ensureTablesLiveOnSameServer(MiniHBaseCluster cluster, byte[] indexTable,
byte[] primaryTable) throws Exception {
ServerName shared = getSharedServer(cluster, indexTable, primaryTable);
boolean tryIndex = true;
while (shared == null) {
// start killing servers until we get an overlap
Set<ServerName> servers;
byte[] table = null;
// switch which server we kill each time to get region movement
if (tryIndex) {
table = indexTable;
} else {
table = primaryTable;
}
servers = getServersForTable(cluster, table);
tryIndex = !tryIndex;
for (ServerName server : servers) {
// find the regionserver that matches the passed server
List<HRegion> online = getRegionsFromServerForTable(cluster, server, table);
LOG.info("Shutting down and reassigning regions from " + server);
cluster.stopRegionServer(server);
cluster.waitForRegionServerToStop(server, TIMEOUT);
// force reassign the regions from the table
for (HRegion region : online) {
cluster.getMaster().assignRegion(region.getRegionInfo());
}
LOG.info("Starting region server:" + server.getHostname());
cluster.startRegionServer(server.getHostname());
cluster.waitForRegionServerToStart(server.getHostname(), TIMEOUT);
// start a server to get back to the base number of servers
LOG.info("STarting server to replace " + server);
cluster.startRegionServer();
break;
}
shared = getSharedServer(cluster, indexTable, primaryTable);
}
return shared;
}
/**
* @param cluster
* @param indexTable
* @param primaryTable
* @return
* @throws Exception
*/
private ServerName getSharedServer(MiniHBaseCluster cluster, byte[] indexTable,
byte[] primaryTable) throws Exception {
Set<ServerName> indexServers = getServersForTable(cluster, indexTable);
Set<ServerName> primaryServers = getServersForTable(cluster, primaryTable);
Set<ServerName> joinSet = new HashSet<ServerName>(indexServers);
joinSet.addAll(primaryServers);
// if there is already an overlap, then find it and return it
if (joinSet.size() < indexServers.size() + primaryServers.size()) {
// find the first overlapping server
for (ServerName server : joinSet) {
if (indexServers.contains(server) && primaryServers.contains(server)) {
return server;
}
}
throw new RuntimeException(
"Couldn't find a matching server on which both the primary and index table live, "
+ "even though they have overlapping server sets");
}
return null;
}
private Set<ServerName> getServersForTable(MiniHBaseCluster cluster, byte[] table)
throws Exception {
List<HRegion> indexRegions = cluster.getRegions(table);
Set<ServerName> indexServers = new HashSet<ServerName>();
for (HRegion region : indexRegions) {
indexServers.add(cluster.getServerHoldingRegion(null, region.getRegionName()));
}
return indexServers;
}
} | apache-2.0 |
mswiderski/drools | drools-core/src/main/java/org/drools/process/instance/WorkItem.java | 1009 | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.process.instance;
import java.util.Map;
public interface WorkItem extends org.drools.runtime.process.WorkItem {
void setName(String name);
void setParameter(String name, Object value);
void setParameters(Map<String, Object> parameters);
void setResults(Map<String, Object> results);
void setState(int state);
void setProcessInstanceId(long processInstanceId);
}
| apache-2.0 |
romankagan/DDBWorkbench | platform/vcs-api/src/com/intellij/openapi/vcs/checkin/VcsCheckinHandlerFactory.java | 1779 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.checkin;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vcs.CheckinProjectPanel;
import com.intellij.openapi.vcs.VcsKey;
import com.intellij.openapi.vcs.changes.CommitContext;
import org.jetbrains.annotations.NotNull;
public abstract class VcsCheckinHandlerFactory implements BaseCheckinHandlerFactory {
public static final ExtensionPointName<VcsCheckinHandlerFactory> EP_NAME = ExtensionPointName.create("com.intellij.vcsCheckinHandlerFactory");
private VcsKey myKey;
protected VcsCheckinHandlerFactory(@NotNull final VcsKey key) {
myKey = key;
}
@NotNull
@Override
public CheckinHandler createHandler(CheckinProjectPanel panel, CommitContext commitContext) {
if (! panel.vcsIsAffected(myKey.getName())) return CheckinHandler.DUMMY;
return createVcsHandler(panel);
}
@NotNull
protected abstract CheckinHandler createVcsHandler(CheckinProjectPanel panel);
public VcsKey getKey() {
return myKey;
}
@Override
public BeforeCheckinDialogHandler createSystemReadyHandler(Project project) {
return null;
}
}
| apache-2.0 |
howepeng/isis | core/specsupport/src/main/java/org/apache/isis/core/specsupport/specs/CukeGlueAbstract.java | 11631 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.isis.core.specsupport.specs;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.List;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import org.jmock.Sequence;
import org.jmock.States;
import org.jmock.internal.ExpectationBuilder;
import org.junit.Assert;
import cucumber.api.DataTable;
import cucumber.api.java.Before;
import org.apache.isis.applib.DomainObjectContainer;
import org.apache.isis.applib.services.wrapper.WrapperFactory;
import org.apache.isis.core.specsupport.scenarios.ScenarioExecution;
import org.apache.isis.core.specsupport.scenarios.ScenarioExecutionForUnit;
import org.apache.isis.core.specsupport.scenarios.ScenarioExecutionScope;
/**
* Base class for Cucumber-JVM step definitions.
*
* <p>
* Simply declares that an instance of {@link ScenarioExecution} (or a subclass)
* must be instantiated by the Cucumber-JVM runtime and injected into the step definitions.
*/
public abstract class CukeGlueAbstract {
/**
* Access the {@link ScenarioExecution} as setup through a previous call to {@link #before(ScenarioExecutionScope)}.
*
* <p>
* This corresponds, broadly, to the (Ruby) Cucumber's "World" object.
*/
protected ScenarioExecution scenarioExecution() {
if(ScenarioExecution.current() == null) {
throw new IllegalStateException("The scenario execution has not been set up; call #before(ScenarioExecutionScope) first");
}
return ScenarioExecution.current();
}
// //////////////////////////////////////
/**
* Intended to be called at the beginning of any 'when' (after all the 'given's)
* or at the beginning of any 'then' (after all the 'when's)
*
* <p>
* Simply {@link ScenarioExecution#endTran(boolean) ends any existing transaction} and
* then {@link ScenarioExecution#beginTran() starts a new one}.
*/
protected void nextTransaction() {
scenarioExecution().endTran(true);
scenarioExecution().beginTran();
}
// //////////////////////////////////////
/**
* Convenience method
*/
public Object getVar(String type, String id) {
return scenarioExecution().getVar(type, id);
}
/**
* Convenience method
*/
public <X> X getVar(String type, String id, Class<X> cls) {
return scenarioExecution().getVar(type, id ,cls);
}
/**
* Convenience method
*/
public void putVar(String type, String id, Object value) {
scenarioExecution().putVar(type, id, value);
}
/**
* Convenience method
*/
public void removeVar(String type, String id) {
scenarioExecution().removeVar(type, id);
}
/**
* Convenience method
*/
protected <T> T service(Class<T> cls) {
return scenarioExecution().service(cls);
}
/**
* Convenience method
*/
protected DomainObjectContainer container() {
return scenarioExecution().container();
}
/**
* Convenience method
*/
protected WrapperFactory wrapperFactory() {
return scenarioExecution().wrapperFactory();
}
/**
* Convenience method
*/
protected <T> T wrap(T obj) {
return wrapperFactory().wrap(obj);
}
/**
* Convenience method
*/
protected <T> T unwrap(T obj) {
return wrapperFactory().unwrap(obj);
}
/**
* Convenience method
* @return
*/
public boolean supportsMocks() {
return scenarioExecution().supportsMocks();
}
/**
* Convenience method
*/
public void checking(ExpectationBuilder expectations) {
scenarioExecution().checking(expectations);
}
/**
* Convenience method
*/
public void assertMocksSatisfied() {
scenarioExecution().assertIsSatisfied();
}
/**
* Convenience method
*/
public Sequence sequence(String name) {
return scenarioExecution().sequence(name);
}
/**
* Convenience method
*/
public States states(String name) {
return scenarioExecution().states(name);
}
// //////////////////////////////////////
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void assertTableEquals(final List listOfExpecteds, final Iterable iterableOfActuals) {
final List<Object> listOfActuals = Lists.newArrayList(iterableOfActuals);
assertThat(listOfActuals.size(), is(listOfExpecteds.size()));
final StringBuilder buf = new StringBuilder();
for (int i=0; i<listOfActuals.size(); i++) {
final Object actual = listOfActuals.get(i);
final Object expected = listOfExpecteds.get(i);
final Field[] expectedFields = expected.getClass().getDeclaredFields();
for (Field field : expectedFields) {
final String propertyName = field.getName();
final Object actualProp = getProperty(actual, propertyName );
final Object expectedProp = getProperty(expected, propertyName);
if(!Objects.equal(actualProp, expectedProp)) {
buf.append("#" + i + ": " + propertyName + ": " + expectedProp + " vs " + actualProp).append("\n");
}
}
}
if(buf.length() != 0) {
Assert.fail("\n" + buf.toString());
}
}
private static Object getProperty(Object obj, String propertyName) {
if(obj == null) {
return null;
}
final Class<? extends Object> cls = obj.getClass();
try {
final String methodName = "get" + capitalize(propertyName);
final Method method = cls.getMethod(methodName, new Class[]{});
if(method != null) {
return method.invoke(obj);
}
} catch (Exception e) {
// continue
}
try {
final String methodName = "is" + capitalize(propertyName);
final Method method = cls.getMethod(methodName, new Class[]{});
if(method != null) {
return method.invoke(obj);
}
} catch (Exception e) {
// continue
}
try {
final Field field = cls.getDeclaredField(propertyName);
if(field != null) {
if(!field.isAccessible()) {
field.setAccessible(true);
}
return field.get(obj);
}
} catch (Exception e) {
// continue
}
return null;
}
private static String capitalize(final String str) {
if (str == null || str.length() == 0) {
return str;
}
if (str.length() == 1) {
return str.toUpperCase();
}
return Character.toUpperCase(str.charAt(0)) + str.substring(1);
}
// //////////////////////////////////////
/**
* Indicate that a scenario is starting, and specify the {@link ScenarioExecutionScope scope}
* at which to run the scenario.
*
* <p>
* This method should be called from a "before" hook (a method annotated with
* Cucumber's {@link Before} annotation, in a step definition subclass. The tag
* should be appropriate for the scope specified. Typically this method should be delegated to
* twice, in two mutually exclusive before hooks.
*
* <p>
* Calling this method makes the {@link ScenarioExecution} available (via {@link #scenarioExecution()}).
* It also delegates to the scenario to {@link ScenarioExecution#beginTran() begin the transaction}.
* (Whether this actually does anything depends in implementation of the {@link ScenarioExecution}).
*
* <p>
* The boilerplate (to copy-n-paste as required) is:
* <pre>
* @cucumber.api.java.Before("@unit")
* public void beforeScenarioUnitScope() {
* before(ScenarioExecutionScope.UNIT);
* }
* @cucumber.api.java.Before("@integration")
* public void beforeScenarioIntegrationScope() {
* before(ScenarioExecutionScope.INTEGRATION);
* }
* </pre>
* The built-in {@link ScenarioExecutionScope#UNIT unit}-level scope will instantiate a
* {@link ScenarioExecutionForUnit}, while the built-in
* {@link ScenarioExecutionScope#INTEGRATION integration}-level scope instantiates
* <tt>ScenarioExecutionForIntegration</tt> (from the <tt>isis-core-integtestsupport</tt> module).
* The former provides access to domain services as mocks, whereas the latter wraps a running
* <tt>IsisSystemForTest</tt>.
*
* <p>
* If need be, it is also possible to define custom scopes, with a different implementation of
* {@link ScenarioExecution}. This might be done when unit testing where a large number of specs
* have similar expectations needing to be set on the mock domain services.
*
* <p>
* Not every class holding step definitions should have these hooks, only those that correspond to the logical
* beginning and end of scenario. As such, this method may only be called once per scenario execution
* (and fails fast if called more than once).
*/
protected void before(ScenarioExecutionScope scope) {
final ScenarioExecution scenarioExecution = scope.instantiate();
scenarioExecution.beginTran();
}
/**
* Indicate that a scenario is ending; the {@link ScenarioExecution} is discarded and no
* longer {@link #scenarioExecution() available}.
*
* <p>
* Before being discarded, the {@link ScenarioExecution} is delegated to
* in order to {@link ScenarioExecution#endTran(boolean) end the transaction}.
* (Whether this actually does anything depends in implementation of the {@link ScenarioExecution}).
*
* <p>
* The boilerplate (to copy-n-paste as required) is:
* <pre>
* @cucumber.api.java.After
* public void afterScenario(cucumber.api.Scenario sc) {
* after(sc);
* }
* </pre>
*
* <p>
* Not every class holding step definitions should have this hook, only those that correspond to the logical
* beginning and end of scenario. As such, this method may only be called once per scenario execution
* (and fails fast if called more than once).
*/
public void after(cucumber.api.Scenario sc) {
ScenarioExecution.current().endTran(!sc.isFailed());
}
}
| apache-2.0 |
Edward608/RxBinding | rxbinding-support-v4/src/androidTest/java/com/jakewharton/rxbinding2/support/v4/widget/RxSlidingPaneLayoutTest.java | 5405 | package com.jakewharton.rxbinding2.support.v4.widget;
import android.app.Instrumentation;
import android.support.test.InstrumentationRegistry;
import android.support.test.espresso.Espresso;
import android.support.test.espresso.contrib.CountingIdlingResource;
import android.support.test.espresso.matcher.BoundedMatcher;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.support.v4.widget.SlidingPaneLayout;
import android.view.View;
import com.jakewharton.rxbinding2.RecordingObserver;
import com.jakewharton.rxbinding2.UnsafeRunnable;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.functions.Consumer;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.assertion.ViewAssertions.matches;
import static android.support.test.espresso.matcher.ViewMatchers.withId;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(AndroidJUnit4.class) public class RxSlidingPaneLayoutTest {
@Rule public final ActivityTestRule<RxSlidingPaneLayoutTestActivity> activityRule =
new ActivityTestRule<>(RxSlidingPaneLayoutTestActivity.class);
private final Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation();
SlidingPaneLayout view;
CountingIdlingResource idler;
@Before public void setUp() {
RxSlidingPaneLayoutTestActivity activity = activityRule.getActivity();
view = activity.slidingPaneLayout;
idler = new CountingIdlingResource("counting idler");
Espresso.registerIdlingResources(idler);
}
@After public void teardown() {
Espresso.unregisterIdlingResources(idler);
}
@Test public void paneOpen() {
RecordingObserver<Boolean> o = new RecordingObserver<>();
RxSlidingPaneLayout.panelOpens(view)
.subscribeOn(AndroidSchedulers.mainThread())
.subscribe(o);
assertFalse(o.takeNext());
instrumentation.runOnMainSync(new Runnable() {
@Override public void run() {
view.openPane();
}
});
assertTrue(o.takeNext());
instrumentation.runOnMainSync(new Runnable() {
@Override public void run() {
view.closePane();
}
});
assertFalse(o.takeNext());
o.dispose();
instrumentation.runOnMainSync(new Runnable() {
@Override public void run() {
view.openPane();
}
});
o.assertNoMoreEvents();
}
@Test public void slides() {
RecordingObserver<Float> o1 = new RecordingObserver<>();
RxSlidingPaneLayout.panelSlides(view)
.subscribeOn(AndroidSchedulers.mainThread())
.subscribe(o1);
o1.assertNoMoreEvents();
instrumentation.runOnMainSync(new Runnable() {
@Override public void run() {
view.openPane();
}
});
instrumentation.waitForIdleSync();
assertTrue(o1.takeNext() > 0f);
o1.dispose();
o1.assertNoMoreEvents();
RecordingObserver<Float> o2 = new RecordingObserver<>();
RxSlidingPaneLayout.panelSlides(view)
.subscribeOn(AndroidSchedulers.mainThread())
.subscribe(o2);
o2.assertNoMoreEvents();
instrumentation.runOnMainSync(new Runnable() {
@Override public void run() {
view.closePane();
}
});
instrumentation.waitForIdleSync();
assertTrue(o2.takeNext() < 1f);
o2.dispose();
o2.assertNoMoreEvents();
}
@Test public void open() {
final Consumer<? super Boolean> open = RxSlidingPaneLayout.open(view);
view.setPanelSlideListener(new SlidingPaneLayout.SimplePanelSlideListener() {
@Override public void onPanelOpened(View panel) {
idler.decrement();
}
@Override public void onPanelClosed(View panel) {
idler.decrement();
}
});
idler.increment();
instrumentation.runOnMainSync(new UnsafeRunnable() {
@Override protected void unsafeRun() throws Exception {
open.accept(true);
}
});
instrumentation.waitForIdleSync();
onView(withId(view.getId())).check(matches(isOpen()));
idler.increment();
instrumentation.runOnMainSync(new UnsafeRunnable() {
@Override protected void unsafeRun() throws Exception {
open.accept(false);
}
});
instrumentation.waitForIdleSync();
onView(withId(view.getId())).check(matches(isClosed()));
view.setPanelSlideListener(null);
}
private static Matcher<View> isOpen() {
return new BoundedMatcher<View, SlidingPaneLayout>(SlidingPaneLayout.class) {
@Override public void describeTo(Description description) {
description.appendText("is pane open");
}
@Override public boolean matchesSafely(SlidingPaneLayout slidingPaneLayout) {
return slidingPaneLayout.isOpen();
}
};
}
private static Matcher<View> isClosed() {
return new BoundedMatcher<View, SlidingPaneLayout>(SlidingPaneLayout.class) {
@Override public void describeTo(Description description) {
description.appendText("is pane closed");
}
@Override public boolean matchesSafely(SlidingPaneLayout slidingPaneLayout) {
return !slidingPaneLayout.isOpen();
}
};
}
}
| apache-2.0 |
Geomatys/sis | core/sis-feature/src/main/java/org/apache/sis/feature/FoliationRepresentation.java | 2662 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.feature;
/**
* Specifies whether trajectories are represented in a single <cite>moving feature</cite> instance
* or fragmented in distinct static feature instances.
*
* <div class="note"><b>Example:</b>
* consider the following trajectories:
*
* <table class="sis">
* <caption>Moving features example</caption>
* <tr><th>Identifier</th> <th>Time</th> <th>Trajectory</th> <th>Status</th></tr>
* <tr><td>John Smith</td> <td>8:00</td> <td>(3 4), (3 5)</td> <td>Walking</td></tr>
* <tr><td>Joe Blo</td> <td>8:00</td> <td>(5 5), (6 6)</td> <td>Walking</td></tr>
* <tr><td>John Smith</td> <td>8:05</td> <td>(3 5), (3 9)</td> <td>Running</td></tr>
* </table>
*
* In this example, John Smith's trajectory can be represented in two ways:
* we can construct a single <cite>moving feature</cite> instance representing the full trajectory (3 4), (3 5), (3 9)
* with some time representation (for example by adding a temporal dimension in each coordinate) and dynamic "Status"
* property. Or alternatively, we can keep John Smith's trajectory fragmented in two {@code Feature} instance where
* each instance can be handled as a static feature.
* </div>
*
* This enumeration can be used with {@link org.apache.sis.storage.DataStore} as a hint about desired representation
* of moving features.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.0
* @since 1.0
* @module
*/
public enum FoliationRepresentation {
/**
* Trajectories stored in a single {@code Feature} instance.
* Every point on the trajectory may be at a different time.
* Properties may be dynamic, i.e. have time-dependent value.
*/
ASSEMBLED,
/**
* Trajectories stored in distinct {@code Feature} instances,
* each of them handled as if it was a static feature.
*/
FRAGMENTED
}
| apache-2.0 |
nybbs2003/jsyn | src/com/jsyn/unitgen/FilterStateVariable.java | 4482 | /*
* Copyright 2009 Phil Burk, Mobileer Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jsyn.unitgen;
import com.jsyn.ports.UnitInputPort;
import com.jsyn.ports.UnitOutputPort;
/**
* A versatile filter described in Hal Chamberlain's "Musical Applications of MicroProcessors". It
* is convenient because its frequency and resonance can each be controlled by a single value. The
* "output" port of this filter is the "lowPass" output multiplied by the "amplitude"
*
* @author Phil Burk (C) 2009 Mobileer Inc
* @see FilterLowPass
* @see FilterHighPass
*/
public class FilterStateVariable extends TunableFilter {
/**
* Amplitude of Output in the range of 0.0 to 1.0. SIGNAL_TYPE_RAW_SIGNED Defaults to 1.0
* <P>
* Note that the amplitude only affects the "output" port and not the lowPass, bandPass or
* highPass signals. Use a MultiplyUnit if you need to scale those signals.
*/
public UnitInputPort amplitude;
/**
* Controls feedback that causes self oscillation. Actually 1/Q - SIGNAL_TYPE_RAW_SIGNED in the
* range of 0.0 to 1.0. Defaults to 0.125.
*/
public UnitInputPort resonance;
/**
* Low pass filtered signal.
* <P>
* Note that this signal is not affected by the amplitude port.
*/
public UnitOutputPort lowPass;
/**
* Band pass filtered signal.
* <P>
* Note that this signal is not affected by the amplitude port.
*/
public UnitOutputPort bandPass;
/**
* High pass filtered signal.
* <P>
* Note that this signal is not affected by the amplitude port.
*/
public UnitOutputPort highPass;
private double freqInternal;
private double previousFrequency = Double.MAX_VALUE; // So we trigger an immediate update.
private double lowPassValue;
private double bandPassValue;
/**
* No-argument constructor instantiates the Biquad common and adds an amplitude port to this
* filter.
*/
public FilterStateVariable() {
frequency.set(440.0);
addPort(resonance = new UnitInputPort("Resonance", 0.2));
addPort(amplitude = new UnitInputPort("Amplitude", 1.0));
addPort(lowPass = new UnitOutputPort("LowPass"));
addPort(bandPass = new UnitOutputPort("BandPass"));
addPort(highPass = new UnitOutputPort("HighPass"));
}
@Override
public void generate(int start, int limit) {
double[] inputs = input.getValues();
double[] outputs = output.getValues();
double[] frequencies = frequency.getValues();
double[] amplitudes = amplitude.getValues();
double[] reses = resonance.getValues();
double[] lows = lowPass.getValues();
double[] highs = highPass.getValues();
double[] bands = bandPass.getValues();
double newFreq = frequencies[0];
if (newFreq != previousFrequency) {
previousFrequency = newFreq;
freqInternal = 2.0 * Math.sin(Math.PI * newFreq * getFramePeriod());
}
for (int i = start; i < limit; i++) {
lowPassValue = (freqInternal * bandPassValue) + lowPassValue;
// Clip between -1 and +1 to prevent blowup.
lowPassValue = (lowPassValue < -1.0) ? -1.0 : ((lowPassValue > 1.0) ? 1.0
: lowPassValue);
lows[i] = lowPassValue;
outputs[i] = lowPassValue * (amplitudes[i]);
double highPassValue = inputs[i] - (reses[i] * bandPassValue) - lowPassValue;
// System.out.println("low = " + lowPassValue + ", band = " + bandPassValue +
// ", high = " + highPassValue );
highs[i] = highPassValue;
bandPassValue = (freqInternal * highPassValue) + bandPassValue;
bands[i] = bandPassValue;
// System.out.println("low = " + lowPassValue + ", band = " + bandPassValue +
// ", high = " + highPassValue );
}
}
}
| apache-2.0 |
bbossgroups/bbossgroups-3.5 | bboss-core/test/org/frameworkset/soa/list/BaseInfoModel.java | 6189 | /**
* 功能说明:套餐包--基本信息
*
* 修改说明:新增
* 修改时间:2015-8.24
* 修 改 人:YAOJIAN
*/
package org.frameworkset.soa.list;
import java.io.Serializable;
public class BaseInfoModel extends BaseModel implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2015082511301231L;
/**
* 判断是否新增标识的package id
*/
private String id;
/**
* 套餐包ID
*/
private Integer packageId;
/**
* 套餐包名称
*/
private String name;
/**
* 套餐包类型:0-主套餐包,1-合约套餐包,2-上网卡套餐包
*/
private String type;
/**
* 套餐包描述
*/
private String desc;
/**
* 网别:20-2G,30-3G,40-4G
*/
private String netType;
/**
* 品牌
*/
private String brand;
/**
* 生效日期
*/
private String startDate;
/**
* 失效日期
*/
private String endDate;
/**
* 生效方式:0-立即,1-按照偏移值和偏移单位生效
*/
private String startTag;
/**
* 生效偏移值
*/
private Integer startOffset;
/**
* 生效偏移单位:0:天 1:自然天 2:月 3:自然月
*/
private String startUnit;
/**
* 失效方式:0-立即,1-按照偏移值和偏移单位生效
*/
private String endTag;
/**
* 失效偏移值
*/
private Integer endOffset;
/**
* 失效偏移单位:0:天 1:自然天 2:月 3:自然月 4:年 5:自然年
*/
private String endUnit;
/**
* 付费标识:0-后付费,1-预付费,2-准预付费
*/
private String prepay;
/**
* 创建时间
*/
private String createDate;
/**
* 接入类产品编码
*/
private Integer mainProdId;
/**
* 是否自由组合套餐:0-非自由组合套餐,1-自由组合套餐
*/
private String compTag;
/**
* 是否融合套餐:0—非融合套餐,1—融合套餐
*/
private String groupTag;
/**
* 是否默认展开:0-默认不展开,1-默认展开
*/
private String needExp;
/**
* 版本号
*/
private String version;
/**
* 套餐状态:0-正常在用,1-已废弃
*/
private String state;
/**
* 修改人
*/
private String staffId;
/**
* 修改部门
*/
private String departId;
/**
* 修改时间
*/
private String updateDate;
/**
* 费用值:以分为单位
*/
private double fee;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getDesc() {
return desc;
}
public void setDesc(String desc) {
this.desc = desc;
}
public String getNetType() {
return netType;
}
public void setNetType(String netType) {
this.netType = netType;
}
public String getBrand() {
return brand;
}
public void setBrand(String brand) {
this.brand = brand;
}
public String getStartDate() {
return startDate;
}
public void setStartDate(String startDate) {
this.startDate = startDate;
}
public String getEndDate() {
return endDate;
}
public void setEndDate(String endDate) {
this.endDate = endDate;
}
public String getStartTag() {
return startTag;
}
public void setStartTag(String startTag) {
this.startTag = startTag;
}
public String getStartUnit() {
return startUnit;
}
public void setStartUnit(String startUnit) {
this.startUnit = startUnit;
}
public String getEndTag() {
return endTag;
}
public void setEndTag(String endTag) {
this.endTag = endTag;
}
public String getEndUnit() {
return endUnit;
}
public void setEndUnit(String endUnit) {
this.endUnit = endUnit;
}
public String getPrepay() {
return prepay;
}
public void setPrepay(String prepay) {
this.prepay = prepay;
}
public String getCreateDate() {
return createDate;
}
public void setCreateDate(String createDate) {
this.createDate = createDate;
}
public String getCompTag() {
return compTag;
}
public void setCompTag(String compTag) {
this.compTag = compTag;
}
public String getGroupTag() {
return groupTag;
}
public void setGroupTag(String groupTag) {
this.groupTag = groupTag;
}
public String getNeedExp() {
return needExp;
}
public void setNeedExp(String needExp) {
this.needExp = needExp;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getStaffId() {
return staffId;
}
public void setStaffId(String staffId) {
this.staffId = staffId;
}
public String getDepartId() {
return departId;
}
public void setDepartId(String departId) {
this.departId = departId;
}
public String getUpdateDate() {
return updateDate;
}
public void setUpdateDate(String updateDate) {
this.updateDate = updateDate;
}
public Integer getPackageId() {
return packageId;
}
public void setPackageId(Integer packageId) {
this.packageId = packageId;
}
public Integer getStartOffset() {
return startOffset;
}
public void setStartOffset(Integer startOffset) {
this.startOffset = startOffset;
}
public Integer getEndOffset() {
return endOffset;
}
public void setEndOffset(Integer endOffset) {
this.endOffset = endOffset;
}
public Integer getMainProdId() {
return mainProdId;
}
public void setMainProdId(Integer mainProdId) {
this.mainProdId = mainProdId;
}
public double getFee() {
return fee;
}
public void setFee(double fee) {
this.fee = fee;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
}
| apache-2.0 |
apache/groovy | src/main/java/org/codehaus/groovy/ast/expr/BinaryExpression.java | 4427 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.codehaus.groovy.ast.expr;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.ast.GroovyCodeVisitor;
import org.codehaus.groovy.ast.Variable;
import org.codehaus.groovy.syntax.Token;
import org.codehaus.groovy.syntax.Types;
/**
* Represents two expressions and an operation
*/
public class BinaryExpression extends Expression {
private Expression leftExpression;
private Expression rightExpression;
private final Token operation;
private boolean safe = false;
public BinaryExpression(Expression leftExpression,
Token operation,
Expression rightExpression) {
this.leftExpression = leftExpression;
this.operation = operation;
this.rightExpression = rightExpression;
}
public BinaryExpression(Expression leftExpression,
Token operation,
Expression rightExpression,
boolean safe) {
this(leftExpression, operation, rightExpression);
this.safe = safe;
}
@Override
public String toString() {
return super.toString() + "[" + leftExpression + operation + rightExpression + "]";
}
@Override
public void visit(GroovyCodeVisitor visitor) {
visitor.visitBinaryExpression(this);
}
@Override
public Expression transformExpression(ExpressionTransformer transformer) {
Expression ret = new BinaryExpression(transformer.transform(leftExpression), operation, transformer.transform(rightExpression), safe);
ret.setSourcePosition(this);
ret.copyNodeMetaData(this);
return ret;
}
public Expression getLeftExpression() {
return leftExpression;
}
public void setLeftExpression(Expression leftExpression) {
this.leftExpression = leftExpression;
}
public void setRightExpression(Expression rightExpression) {
this.rightExpression = rightExpression;
}
public Token getOperation() {
return operation;
}
public Expression getRightExpression() {
return rightExpression;
}
@Override
public String getText() {
if (operation.getType() == Types.LEFT_SQUARE_BRACKET) {
return leftExpression.getText() + (safe ? "?" : "") + "[" + rightExpression.getText() + "]";
}
return "(" + leftExpression.getText() + " " + operation.getText() + " " + rightExpression.getText() + ")";
}
public boolean isSafe() {
return safe;
}
public void setSafe(boolean safe) {
this.safe = safe;
}
/**
* Creates an assignment expression in which the specified expression
* is written into the specified variable name.
*/
public static BinaryExpression newAssignmentExpression(Variable variable, Expression rhs) {
VariableExpression lhs = new VariableExpression(variable);
Token operator = Token.newPlaceholder(Types.ASSIGN);
return new BinaryExpression(lhs, operator, rhs);
}
/**
* Creates variable initialization expression in which the specified expression
* is written into the specified variable name.
*/
public static BinaryExpression newInitializationExpression(String variable, ClassNode type, Expression rhs) {
VariableExpression lhs = new VariableExpression(variable);
if (type != null) {
lhs.setType(type);
}
Token operator = Token.newPlaceholder(Types.ASSIGN);
return new BinaryExpression(lhs, operator, rhs);
}
}
| apache-2.0 |
nikhilvibhav/camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFtpPreMoveNoopIT.java | 3383 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote.integration;
import java.io.File;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Unit test to test preMove with noop option.
*/
public class FromFtpPreMoveNoopIT extends FtpServerTestSupport {
protected String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/movefile?password=admin&preMove=work&noop=true";
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
prepareFtpServer();
}
@Test
public void testPreMoveNoop() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Hello World this file will be moved");
mock.assertIsSatisfied();
// and file should be kept there
Thread.sleep(1000);
File file = ftpFile("movefile/work/hello.txt").toFile();
assertTrue(file.exists(), "The file should exists");
}
private void prepareFtpServer() throws Exception {
// prepares the FTP Server by creating a file on the server that we want
// to unit
// test that we can pool and store as a local file
Endpoint endpoint = context.getEndpoint(getFtpUrl());
Exchange exchange = endpoint.createExchange();
exchange.getIn().setBody("Hello World this file will be moved");
exchange.getIn().setHeader(Exchange.FILE_NAME, "hello.txt");
Producer producer = endpoint.createProducer();
producer.start();
producer.process(exchange);
producer.stop();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
from(getFtpUrl()).process(new Processor() {
public void process(Exchange exchange) throws Exception {
// assert the file is pre moved
File file = ftpFile("movefile/work/hello.txt").toFile();
assertTrue(file.exists(), "The file should have been moved");
}
}).to("mock:result");
}
};
}
}
| apache-2.0 |
ncalexan/mentat | sdks/android/Mentat/library/src/main/java/org/mozilla/mentat/RustError.java | 2606 | /* -*- Mode: Java; c-basic-offset: 4; tab-width: 20; indent-tabs-mode: nil; -*-
* Copyright 2018 Mozilla
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. */
package org.mozilla.mentat;
import android.util.Log;
import com.sun.jna.Pointer;
import com.sun.jna.Structure;
import java.util.Arrays;
import java.util.List;
/**
* Represents a C struct containing a {@link Pointer}s and String that map to a Rust Result.
* A RustResult will contain either an ok value, OR an err value, or neither - never both.
*/
public class RustError extends Structure {
public static class ByReference extends RustError implements Structure.ByReference {
}
public static class ByValue extends RustError implements Structure.ByValue {
}
// It's probably a mistake to touch this, but it needs to be public for JNA
public Pointer message;
/**
* Does this represent success?
*/
public boolean isSuccess() {
return this.message == null;
}
/**
* Does this represent failure?
*/
public boolean isFailure() {
return this.message != null;
}
/**
* Get and consume the error message, or null if there is none.
*/
public String consumeErrorMessage() {
String result = this.getErrorMessage();
if (this.message != null) {
JNA.INSTANCE.rust_c_string_destroy(this.message);
this.message = null;
}
return result;
}
/**
* Get the error message or null if there is none.
*/
public String getErrorMessage() {
return this.message == null ? null : this.message.getString(0, "utf8");
}
@Override
protected List<String> getFieldOrder() {
return Arrays.asList("message");
}
@Override
protected void finalize() {
if (this.message != null) {
JNA.INSTANCE.rust_c_string_destroy(this.message);
this.message = null;
}
}
/* package-local */
void logAndConsumeError(String tag) {
if (this.isFailure()) {
Log.e(tag, this.consumeErrorMessage());
}
}
} | apache-2.0 |
bbossgroups/bbossgroups-3.5 | bboss-persistent/test/com/frameworkset/orm/engine/model/HypersonicDomainTest.java | 6265 | /*
* Copyright 2008 biaoping.yin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.frameworkset.orm.engine.model;
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import junit.framework.TestCase;
import com.frameworkset.orm.engine.transform.XmlToAppData;
/**
* Tests for domain handling (for HSQLDB formerly known as Hypersonic).
*
* @author <a href="mailto:mkalen@apache.org">Martin Kalén</a>
* @version $Id: HypersonicDomainTest.java,v 1.1 2005/05/20 20:19:18 tfischer Exp $
*/
public class HypersonicDomainTest extends TestCase
{
private XmlToAppData xmlToAppData;
private Database db;
public HypersonicDomainTest(String name)
{
super(name);
}
protected void setUp() throws Exception
{
super.setUp();
xmlToAppData = new XmlToAppData("hypersonic", "defaultpackage");
db = xmlToAppData.parseFile(
"src/com/frameworkset/orm/engine/model/domaintest-schema.xml");
}
protected void tearDown() throws Exception
{
xmlToAppData = null;
super.tearDown();
}
/**
* test if the tables get the package name from the properties file
*/
public void testDomainColumn() throws Exception
{
Table table = db.getTable("product");
Column name = table.getColumn("name");
assertEquals("VARCHAR", name.getDomain().getSqlType());
assertEquals("40", name.getSize());
assertEquals("name VARCHAR(40) ", name.getSqlString());
Column price = table.getColumn("price");
assertEquals("NUMERIC", price.getTorqueType());
assertEquals("NUMERIC", price.getDomain().getSqlType());
assertEquals("10", price.getSize());
assertEquals("2", price.getScale());
assertEquals("0", price.getDefaultValue());
assertEquals("(10,2)", price.printSize());
assertEquals("price NUMERIC(10,2) default 0 ", price.getSqlString());
}
/**
* test if the tables get the package name from the properties file
*/
public void testExtendedDomainColumn() throws Exception
{
Table table = db.getTable("article");
Column price = table.getColumn("price");
assertEquals("NUMERIC", price.getTorqueType());
assertEquals("NUMERIC", price.getDomain().getSqlType());
assertEquals("12", price.getSize());
assertEquals("2", price.getScale());
assertEquals("1000", price.getDefaultValue());
assertEquals("(12,2)", price.printSize());
assertEquals("price NUMERIC(12,2) default 1000 ", price.getSqlString());
}
public void testDecimalColumn() throws Exception
{
Table table = db.getTable("article");
Column col = table.getColumn("decimal_col");
assertEquals("DECIMAL", col.getTorqueType());
assertEquals("DECIMAL", col.getDomain().getSqlType());
assertEquals("10", col.getSize());
assertEquals("3", col.getScale());
assertEquals("(10,3)", col.printSize());
assertEquals("decimal_col DECIMAL(10,3) ", col.getSqlString());
}
public void testDateColumn() throws Exception
{
Table table = db.getTable("article");
Column col = table.getColumn("date_col");
assertEquals("DATE", col.getTorqueType());
assertEquals("DATE", col.getDomain().getSqlType());
assertEquals("", col.printSize());
assertEquals("date_col DATE ", col.getSqlString());
}
public void testNativeAutoincrement() throws Exception
{
Table table = db.getTable("native");
Column col = table.getColumn("native_id");
assertEquals("IDENTITY", col.getAutoIncrementString());
assertEquals("native_id INTEGER NOT NULL IDENTITY", col.getSqlString());
col = table.getColumn("name");
assertEquals("", col.getAutoIncrementString());
}
public void testIdBrokerAutoincrement() throws Exception
{
Table table = db.getTable("article");
Column col = table.getColumn("article_id");
assertEquals("", col.getAutoIncrementString());
assertEquals("article_id INTEGER NOT NULL ", col.getSqlString());
col = table.getColumn("name");
assertEquals("", col.getAutoIncrementString());
}
public void testBooleanint() throws Exception
{
Table table = db.getTable("types");
Column col = table.getColumn("cbooleanint");
assertEquals("", col.getAutoIncrementString());
assertEquals("BOOLEANINT", col.getTorqueType());
assertEquals("INTEGER", col.getDomain().getSqlType());
assertEquals("cbooleanint INTEGER ", col.getSqlString());
}
public void testBlob() throws Exception
{
Table table = db.getTable("types");
Column col = table.getColumn("cblob");
assertEquals("", col.getAutoIncrementString());
assertEquals("BLOB", col.getTorqueType());
assertEquals("BINARY", col.getDomain().getSqlType());
assertEquals("cblob BINARY ", col.getSqlString());
}
}
| apache-2.0 |
siosio/intellij-community | python/gen/com/jetbrains/python/console/protocol/ColHeader.java | 24653 | /**
* Autogenerated by Thrift Compiler (0.13.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.jetbrains.python.console.protocol;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.13.0)", date = "2021-04-15")
public class ColHeader implements org.apache.thrift.TBase<ColHeader, ColHeader._Fields>, java.io.Serializable, Cloneable, Comparable<ColHeader> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColHeader");
private static final org.apache.thrift.protocol.TField LABEL_FIELD_DESC = new org.apache.thrift.protocol.TField("label", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField FORMAT_FIELD_DESC = new org.apache.thrift.protocol.TField("format", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField MAX_FIELD_DESC = new org.apache.thrift.protocol.TField("max", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final org.apache.thrift.protocol.TField MIN_FIELD_DESC = new org.apache.thrift.protocol.TField("min", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ColHeaderStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ColHeaderTupleSchemeFactory();
public @org.apache.thrift.annotation.Nullable java.lang.String label; // required
public @org.apache.thrift.annotation.Nullable java.lang.String type; // required
public @org.apache.thrift.annotation.Nullable java.lang.String format; // required
public @org.apache.thrift.annotation.Nullable java.lang.String max; // required
public @org.apache.thrift.annotation.Nullable java.lang.String min; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
LABEL((short)1, "label"),
TYPE((short)2, "type"),
FORMAT((short)3, "format"),
MAX((short)4, "max"),
MIN((short)5, "min");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // LABEL
return LABEL;
case 2: // TYPE
return TYPE;
case 3: // FORMAT
return FORMAT;
case 4: // MAX
return MAX;
case 5: // MIN
return MIN;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.LABEL, new org.apache.thrift.meta_data.FieldMetaData("label", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.FORMAT, new org.apache.thrift.meta_data.FieldMetaData("format", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.MAX, new org.apache.thrift.meta_data.FieldMetaData("max", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.MIN, new org.apache.thrift.meta_data.FieldMetaData("min", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ColHeader.class, metaDataMap);
}
public ColHeader() {
}
public ColHeader(
java.lang.String label,
java.lang.String type,
java.lang.String format,
java.lang.String max,
java.lang.String min)
{
this();
this.label = label;
this.type = type;
this.format = format;
this.max = max;
this.min = min;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ColHeader(ColHeader other) {
if (other.isSetLabel()) {
this.label = other.label;
}
if (other.isSetType()) {
this.type = other.type;
}
if (other.isSetFormat()) {
this.format = other.format;
}
if (other.isSetMax()) {
this.max = other.max;
}
if (other.isSetMin()) {
this.min = other.min;
}
}
public ColHeader deepCopy() {
return new ColHeader(this);
}
@Override
public void clear() {
this.label = null;
this.type = null;
this.format = null;
this.max = null;
this.min = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getLabel() {
return this.label;
}
public ColHeader setLabel(@org.apache.thrift.annotation.Nullable java.lang.String label) {
this.label = label;
return this;
}
public void unsetLabel() {
this.label = null;
}
/** Returns true if field label is set (has been assigned a value) and false otherwise */
public boolean isSetLabel() {
return this.label != null;
}
public void setLabelIsSet(boolean value) {
if (!value) {
this.label = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getType() {
return this.type;
}
public ColHeader setType(@org.apache.thrift.annotation.Nullable java.lang.String type) {
this.type = type;
return this;
}
public void unsetType() {
this.type = null;
}
/** Returns true if field type is set (has been assigned a value) and false otherwise */
public boolean isSetType() {
return this.type != null;
}
public void setTypeIsSet(boolean value) {
if (!value) {
this.type = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getFormat() {
return this.format;
}
public ColHeader setFormat(@org.apache.thrift.annotation.Nullable java.lang.String format) {
this.format = format;
return this;
}
public void unsetFormat() {
this.format = null;
}
/** Returns true if field format is set (has been assigned a value) and false otherwise */
public boolean isSetFormat() {
return this.format != null;
}
public void setFormatIsSet(boolean value) {
if (!value) {
this.format = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getMax() {
return this.max;
}
public ColHeader setMax(@org.apache.thrift.annotation.Nullable java.lang.String max) {
this.max = max;
return this;
}
public void unsetMax() {
this.max = null;
}
/** Returns true if field max is set (has been assigned a value) and false otherwise */
public boolean isSetMax() {
return this.max != null;
}
public void setMaxIsSet(boolean value) {
if (!value) {
this.max = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getMin() {
return this.min;
}
public ColHeader setMin(@org.apache.thrift.annotation.Nullable java.lang.String min) {
this.min = min;
return this;
}
public void unsetMin() {
this.min = null;
}
/** Returns true if field min is set (has been assigned a value) and false otherwise */
public boolean isSetMin() {
return this.min != null;
}
public void setMinIsSet(boolean value) {
if (!value) {
this.min = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case LABEL:
if (value == null) {
unsetLabel();
} else {
setLabel((java.lang.String)value);
}
break;
case TYPE:
if (value == null) {
unsetType();
} else {
setType((java.lang.String)value);
}
break;
case FORMAT:
if (value == null) {
unsetFormat();
} else {
setFormat((java.lang.String)value);
}
break;
case MAX:
if (value == null) {
unsetMax();
} else {
setMax((java.lang.String)value);
}
break;
case MIN:
if (value == null) {
unsetMin();
} else {
setMin((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case LABEL:
return getLabel();
case TYPE:
return getType();
case FORMAT:
return getFormat();
case MAX:
return getMax();
case MIN:
return getMin();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case LABEL:
return isSetLabel();
case TYPE:
return isSetType();
case FORMAT:
return isSetFormat();
case MAX:
return isSetMax();
case MIN:
return isSetMin();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof ColHeader)
return this.equals((ColHeader)that);
return false;
}
public boolean equals(ColHeader that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_label = true && this.isSetLabel();
boolean that_present_label = true && that.isSetLabel();
if (this_present_label || that_present_label) {
if (!(this_present_label && that_present_label))
return false;
if (!this.label.equals(that.label))
return false;
}
boolean this_present_type = true && this.isSetType();
boolean that_present_type = true && that.isSetType();
if (this_present_type || that_present_type) {
if (!(this_present_type && that_present_type))
return false;
if (!this.type.equals(that.type))
return false;
}
boolean this_present_format = true && this.isSetFormat();
boolean that_present_format = true && that.isSetFormat();
if (this_present_format || that_present_format) {
if (!(this_present_format && that_present_format))
return false;
if (!this.format.equals(that.format))
return false;
}
boolean this_present_max = true && this.isSetMax();
boolean that_present_max = true && that.isSetMax();
if (this_present_max || that_present_max) {
if (!(this_present_max && that_present_max))
return false;
if (!this.max.equals(that.max))
return false;
}
boolean this_present_min = true && this.isSetMin();
boolean that_present_min = true && that.isSetMin();
if (this_present_min || that_present_min) {
if (!(this_present_min && that_present_min))
return false;
if (!this.min.equals(that.min))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetLabel()) ? 131071 : 524287);
if (isSetLabel())
hashCode = hashCode * 8191 + label.hashCode();
hashCode = hashCode * 8191 + ((isSetType()) ? 131071 : 524287);
if (isSetType())
hashCode = hashCode * 8191 + type.hashCode();
hashCode = hashCode * 8191 + ((isSetFormat()) ? 131071 : 524287);
if (isSetFormat())
hashCode = hashCode * 8191 + format.hashCode();
hashCode = hashCode * 8191 + ((isSetMax()) ? 131071 : 524287);
if (isSetMax())
hashCode = hashCode * 8191 + max.hashCode();
hashCode = hashCode * 8191 + ((isSetMin()) ? 131071 : 524287);
if (isSetMin())
hashCode = hashCode * 8191 + min.hashCode();
return hashCode;
}
@Override
public int compareTo(ColHeader other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetLabel()).compareTo(other.isSetLabel());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetLabel()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.label, other.label);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetType()).compareTo(other.isSetType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetFormat()).compareTo(other.isSetFormat());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFormat()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.format, other.format);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetMax()).compareTo(other.isSetMax());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMax()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.max, other.max);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(isSetMin()).compareTo(other.isSetMin());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMin()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.min, other.min);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ColHeader(");
boolean first = true;
sb.append("label:");
if (this.label == null) {
sb.append("null");
} else {
sb.append(this.label);
}
first = false;
if (!first) sb.append(", ");
sb.append("type:");
if (this.type == null) {
sb.append("null");
} else {
sb.append(this.type);
}
first = false;
if (!first) sb.append(", ");
sb.append("format:");
if (this.format == null) {
sb.append("null");
} else {
sb.append(this.format);
}
first = false;
if (!first) sb.append(", ");
sb.append("max:");
if (this.max == null) {
sb.append("null");
} else {
sb.append(this.max);
}
first = false;
if (!first) sb.append(", ");
sb.append("min:");
if (this.min == null) {
sb.append("null");
} else {
sb.append(this.min);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ColHeaderStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ColHeaderStandardScheme getScheme() {
return new ColHeaderStandardScheme();
}
}
private static class ColHeaderStandardScheme extends org.apache.thrift.scheme.StandardScheme<ColHeader> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ColHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // LABEL
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.label = iprot.readString();
struct.setLabelIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.type = iprot.readString();
struct.setTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // FORMAT
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.format = iprot.readString();
struct.setFormatIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // MAX
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.max = iprot.readString();
struct.setMaxIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // MIN
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.min = iprot.readString();
struct.setMinIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ColHeader struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.label != null) {
oprot.writeFieldBegin(LABEL_FIELD_DESC);
oprot.writeString(struct.label);
oprot.writeFieldEnd();
}
if (struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeString(struct.type);
oprot.writeFieldEnd();
}
if (struct.format != null) {
oprot.writeFieldBegin(FORMAT_FIELD_DESC);
oprot.writeString(struct.format);
oprot.writeFieldEnd();
}
if (struct.max != null) {
oprot.writeFieldBegin(MAX_FIELD_DESC);
oprot.writeString(struct.max);
oprot.writeFieldEnd();
}
if (struct.min != null) {
oprot.writeFieldBegin(MIN_FIELD_DESC);
oprot.writeString(struct.min);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ColHeaderTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ColHeaderTupleScheme getScheme() {
return new ColHeaderTupleScheme();
}
}
private static class ColHeaderTupleScheme extends org.apache.thrift.scheme.TupleScheme<ColHeader> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ColHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetLabel()) {
optionals.set(0);
}
if (struct.isSetType()) {
optionals.set(1);
}
if (struct.isSetFormat()) {
optionals.set(2);
}
if (struct.isSetMax()) {
optionals.set(3);
}
if (struct.isSetMin()) {
optionals.set(4);
}
oprot.writeBitSet(optionals, 5);
if (struct.isSetLabel()) {
oprot.writeString(struct.label);
}
if (struct.isSetType()) {
oprot.writeString(struct.type);
}
if (struct.isSetFormat()) {
oprot.writeString(struct.format);
}
if (struct.isSetMax()) {
oprot.writeString(struct.max);
}
if (struct.isSetMin()) {
oprot.writeString(struct.min);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ColHeader struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(5);
if (incoming.get(0)) {
struct.label = iprot.readString();
struct.setLabelIsSet(true);
}
if (incoming.get(1)) {
struct.type = iprot.readString();
struct.setTypeIsSet(true);
}
if (incoming.get(2)) {
struct.format = iprot.readString();
struct.setFormatIsSet(true);
}
if (incoming.get(3)) {
struct.max = iprot.readString();
struct.setMaxIsSet(true);
}
if (incoming.get(4)) {
struct.min = iprot.readString();
struct.setMinIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
akdasari/SparkCore | spark-framework/src/main/java/org/sparkcommerce/core/order/dao/OrderDao.java | 1556 | /*
* #%L
* SparkCommerce Framework
* %%
* Copyright (C) 2009 - 2013 Spark Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.sparkcommerce.core.order.dao;
import java.util.List;
import org.sparkcommerce.core.order.domain.Order;
import org.sparkcommerce.core.order.service.type.OrderStatus;
import org.sparkcommerce.profile.core.domain.Customer;
public interface OrderDao {
Order readOrderById(Long orderId);
Order readOrderById(Long orderId, boolean refresh);
List<Order> readOrdersForCustomer(Customer customer, OrderStatus orderStatus);
List<Order> readOrdersForCustomer(Long id);
Order readNamedOrderForCustomer(Customer customer, String name);
Order readCartForCustomer(Customer customer);
Order save(Order order);
void delete(Order order);
Order submitOrder(Order cartOrder);
Order create();
Order createNewCartForCustomer(Customer customer);
Order readOrderByOrderNumber(String orderNumber);
Order updatePrices(Order order);
}
| apache-2.0 |
fcrepo4/fcrepo4 | fcrepo-kernel-impl/src/test/java/org/fcrepo/kernel/impl/observer/ResourceOperationEventBuilderTest.java | 7830 | /*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.impl.observer;
import org.fcrepo.kernel.api.identifiers.FedoraId;
import org.fcrepo.kernel.api.observer.Event;
import org.fcrepo.kernel.api.observer.EventType;
import org.fcrepo.kernel.impl.operations.DeleteResourceOperationFactoryImpl;
import org.fcrepo.kernel.impl.operations.NonRdfSourceOperationFactoryImpl;
import org.fcrepo.kernel.impl.operations.RdfSourceOperationFactoryImpl;
import org.fcrepo.kernel.impl.operations.VersionResourceOperationFactoryImpl;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.util.Set;
import static org.fcrepo.kernel.api.RdfLexicon.RDF_SOURCE;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* @author pwinckles
*/
public class ResourceOperationEventBuilderTest {
private static final FedoraId FEDORA_ID = FedoraId.create("/test");
private static final String USER = "user1";
private static final String BASE_URL = "http://locahost/rest";
@Test
public void buildCreateEventFromCreateRdfOperation() {
final var operation = new RdfSourceOperationFactoryImpl()
.createBuilder(FEDORA_ID, RDF_SOURCE.toString())
.userPrincipal(USER)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(BASE_URL)
.build();
assertDefaultEvent(event, EventType.RESOURCE_CREATION);
}
@Test
public void buildCreateEventFromCreateNonRdfOperation() {
final var fedoraId = FedoraId.create("/test/ab/c");
final var user = "user2";
final var operation = new NonRdfSourceOperationFactoryImpl()
.createInternalBinaryBuilder(fedoraId, new ByteArrayInputStream(new byte[]{}))
.userPrincipal(user)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(fedoraId, operation)
.withBaseUrl(BASE_URL)
.build();
assertEquals(fedoraId, event.getFedoraId());
assertEquals(fedoraId.getFullIdPath(), event.getPath());
assertEquals(user, event.getUserID());
assertThat(event.getTypes(), contains(EventType.RESOURCE_CREATION));
assertNotNull(event.getEventID());
assertNotNull(event.getDate());
}
@Test
public void buildCreateEventFromVersionOperation() {
final var operation = new VersionResourceOperationFactoryImpl().createBuilder(FEDORA_ID)
.userPrincipal(USER)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(BASE_URL)
.build();
assertDefaultEvent(event, EventType.RESOURCE_MODIFICATION);
}
@Test
public void buildDeleteEventFromDeleteOperation() {
final var operation = new DeleteResourceOperationFactoryImpl().deleteBuilder(FEDORA_ID)
.userPrincipal(USER)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(BASE_URL)
.build();
assertDefaultEvent(event, EventType.RESOURCE_DELETION);
}
@Test
public void buildUpdateEventFromUpdateRdfOperation() {
final var operation = new RdfSourceOperationFactoryImpl().updateBuilder(FEDORA_ID)
.userPrincipal(USER)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(BASE_URL)
.build();
assertDefaultEvent(event, EventType.RESOURCE_MODIFICATION);
}
@Test
public void buildUpdateEventFromUpdateNonRdfOperation() {
final var operation = new NonRdfSourceOperationFactoryImpl()
.updateInternalBinaryBuilder(FEDORA_ID, new ByteArrayInputStream(new byte[]{}))
.userPrincipal(USER)
.build();
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(BASE_URL)
.build();
assertDefaultEvent(event, EventType.RESOURCE_MODIFICATION);
}
@Test
public void mergeValidObjects() {
final var createOperation = new RdfSourceOperationFactoryImpl()
.createBuilder(FEDORA_ID, RDF_SOURCE.toString())
.userPrincipal(USER)
.build();
final var createEventBuilder = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, createOperation)
.withBaseUrl(BASE_URL);
final var updateOperation = new NonRdfSourceOperationFactoryImpl()
.updateInternalBinaryBuilder(FEDORA_ID, new ByteArrayInputStream(new byte[]{}))
.userPrincipal(USER)
.build();
final var updateEventBuilder = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, updateOperation)
.withBaseUrl(BASE_URL);
final var updateEvent = updateEventBuilder.build();
final var merged = createEventBuilder.merge(updateEventBuilder).build();
assertEquals(FEDORA_ID, merged.getFedoraId());
assertEquals(FEDORA_ID.getFullIdPath(), merged.getPath());
assertEquals(USER, merged.getUserID());
assertThat(merged.getTypes(), containsInAnyOrder(EventType.RESOURCE_CREATION, EventType.RESOURCE_MODIFICATION));
assertEquals(updateEvent.getDate(), merged.getDate());
}
@Test
public void populateOtherEventFields() {
final var operation = new NonRdfSourceOperationFactoryImpl()
.updateInternalBinaryBuilder(FEDORA_ID, new ByteArrayInputStream(new byte[]{}))
.userPrincipal(USER)
.build();
final var baseUrl = "http://localhost/rest";
final var userAgent = "user-agent";
final var resourceTypes = Set.of("resource-type");
final var event = ResourceOperationEventBuilder.fromResourceOperation(FEDORA_ID, operation)
.withBaseUrl(baseUrl)
.withUserAgent(userAgent)
.withResourceTypes(resourceTypes)
.build();
assertEquals(baseUrl, event.getBaseUrl());
assertEquals(userAgent, event.getUserAgent());
assertEquals(resourceTypes, event.getResourceTypes());
}
private void assertDefaultEvent(final Event event, final EventType type) {
assertEquals(FEDORA_ID, event.getFedoraId());
assertEquals(FEDORA_ID.getFullIdPath(), event.getPath());
assertEquals(USER, event.getUserID());
assertThat(event.getTypes(), contains(type));
assertNotNull(event.getEventID());
assertNotNull(event.getDate());
}
}
| apache-2.0 |
SpectraLogic/ds3_java_sdk | ds3-sdk/src/main/java/com/spectralogic/ds3client/models/DataPlacementRuleState.java | 925 | /*
* ******************************************************************************
* Copyright 2014-2019 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
package com.spectralogic.ds3client.models;
public enum DataPlacementRuleState {
NORMAL,
INCLUSION_IN_PROGRESS
} | apache-2.0 |
johncarl81/transfuse | transfuse-core/src/main/java/org/androidtransfuse/gen/variableBuilder/InjectionNodeBuilder.java | 1041 | /**
* Copyright 2011-2015 John Ericksen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.androidtransfuse.gen.variableBuilder;
import org.androidtransfuse.adapter.ASTBase;
import org.androidtransfuse.analysis.AnalysisContext;
import org.androidtransfuse.model.InjectionNode;
import org.androidtransfuse.model.InjectionSignature;
/**
* @author John Ericksen
*/
public interface InjectionNodeBuilder {
InjectionNode buildInjectionNode(ASTBase target, InjectionSignature signature, AnalysisContext context);
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/engine/store/entry/GfxdRegionEntryFactory.java | 1164 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.engine.store.entry;
import com.gemstone.gemfire.internal.cache.RegionEntryFactory;
/**
* TODO: merge: Currently versioning is unsupported in GemFireXD. Change it to use
* timestamp based one as being designed for Cedar.
*/
public abstract class GfxdRegionEntryFactory implements RegionEntryFactory {
public RegionEntryFactory makeVersioned() {
throw new UnsupportedOperationException(
"unsupported versioned entries for GemFireXD");
}
}
| apache-2.0 |
project-ncl/pnc | pnc-mock/src/main/java/org/jboss/pnc/mock/model/MockUser.java | 1060 | /**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.mock.model;
import org.jboss.pnc.model.User;
/**
* @author <a href="mailto:matejonnet@gmail.com">Matej Lazar</a>
*/
public class MockUser {
public static User newTestUser(Integer id) {
User user = User.Builder.newBuilder().id(id).firstName("Poseidon").lastName("Neptune").build();
return user;
}
}
| apache-2.0 |
gkatsikas/onos | apps/openstacktelemetry/api/src/main/java/org/onosproject/openstacktelemetry/api/config/TelemetryConfig.java | 4544 | /*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacktelemetry.api.config;
import org.onosproject.net.Annotations;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* An interface for telemetry config.
*/
public interface TelemetryConfig extends Annotations {
/**
* Telemetry configuration type.
*/
enum ConfigType {
/**
* Indicates KAFKA telemetry config.
*/
KAFKA,
/**
* Indicates GRPC telemetry config.
*/
GRPC,
/**
* Indicates REST telemetry config.
*/
REST,
/**
* Indicates InfluxDB telemetry config.
*/
INFLUXDB,
/**
* Indicates prometheus telemetry config.
*/
PROMETHEUS,
/**
* Indicates unknown telemetry config.
*/
UNKNOWN
}
enum Status {
/**
* Signifies that the service is in enable status.
*/
ENABLED,
/**
* Signifies that the service is in disable status.
*/
DISABLED,
/**
* Signifies that the service is in pending status.
*/
PENDING,
/**
* Signifies that the service is in unknown status.
*/
UNKNOWN,
}
/**
* Returns the telemetry configuration name.
*
* @return configuration name
*/
String name();
/**
* Returns the telemetry configuration type.
*
* @return configuration type
*/
ConfigType type();
/**
* Returns all the parent configurations from which this configuration inherits
* properties.
*
* @return list of parent configurations
*/
List<TelemetryConfig> parents();
/**
* Returns the off-platform application manufacturer name.
*
* @return manufacturer name
*/
String manufacturer();
/**
* Returns the off-platform application software version.
*
* @return software version
*/
String swVersion();
/**
* Returns the service status.
*
* @return service status
*/
Status status();
/**
* Returns the set of annotations as map of key/value properties.
*
* @return map of properties
*/
Map<String, String> properties();
/**
* Gets the value of the given property name.
*
* @param name property name
* @return the value of the property,
* or null if the property is not defined in this configuration nor
* in any of its ancestors
*/
String getProperty(String name);
/**
* Get the value of the given property name.
*
* @param name property name
* @param defaultValue to use if the property is not defined in this configuration
* nor in any of its ancestors
* @return the value of the property,
* or null if the property is not defined in this configuration nor
* in any of its ancestors
*/
default String getProperty(String name, String defaultValue) {
return Optional.ofNullable(getProperty(name)).orElse(defaultValue);
}
/**
* Merges the specified config properties into this one, giving preference to
* the other config when dealing with conflicts.
*
* @param other other configuration
* @return merged configuration
*/
TelemetryConfig merge(TelemetryConfig other);
/**
* Obtains the cloned instance with updated properties.
*
* @param properties telemetry config properties
* @return a cloned instance
*/
TelemetryConfig updateProperties(Map<String, String> properties);
/**
* Obtains the cloned instance with updated status.
*
* @param status service status
* @return a cloned instance
*/
TelemetryConfig updateStatus(Status status);
}
| apache-2.0 |
gcoders/gerrit | gerrit-pgm/src/main/java/com/google/gerrit/pgm/util/SiteProgram.java | 9336 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm.util;
import static com.google.inject.Scopes.SINGLETON;
import static com.google.inject.Stage.PRODUCTION;
import com.google.common.collect.Lists;
import com.google.gerrit.common.Die;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.GerritServerConfigModule;
import com.google.gerrit.server.config.SitePath;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.LocalDiskRepositoryManager;
import com.google.gerrit.server.schema.DataSourceModule;
import com.google.gerrit.server.schema.DataSourceProvider;
import com.google.gerrit.server.schema.DataSourceType;
import com.google.gerrit.server.schema.DatabaseModule;
import com.google.gerrit.server.schema.SchemaModule;
import com.google.gerrit.server.securestore.SecureStoreClassName;
import com.google.gwtorm.server.OrmException;
import com.google.inject.AbstractModule;
import com.google.inject.Binding;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.TypeLiteral;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import com.google.inject.spi.Message;
import com.google.inject.util.Providers;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.FS;
import org.kohsuke.args4j.Option;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import javax.sql.DataSource;
public abstract class SiteProgram extends AbstractProgram {
@Option(name = "--site-path", aliases = {"-d"}, usage = "Local directory containing site data")
private void setSitePath(String path) {
sitePath = Paths.get(path);
}
protected Provider<DataSource> dsProvider;
private Path sitePath = Paths.get(".");
protected SiteProgram() {
}
protected SiteProgram(Path sitePath, final Provider<DataSource> dsProvider) {
this.sitePath = sitePath;
this.dsProvider = dsProvider;
}
/** @return the site path specified on the command line. */
protected Path getSitePath() {
return sitePath;
}
/** Ensures we are running inside of a valid site, otherwise throws a Die. */
protected void mustHaveValidSite() throws Die {
if (!Files.exists(sitePath.resolve("etc").resolve("gerrit.config"))) {
throw die("not a Gerrit site: '" + getSitePath() + "'\n"
+ "Perhaps you need to run init first?");
}
}
/** @return provides database connectivity and site path. */
protected Injector createDbInjector(final DataSourceProvider.Context context) {
final Path sitePath = getSitePath();
final List<Module> modules = new ArrayList<>();
Module sitePathModule = new AbstractModule() {
@Override
protected void configure() {
bind(Path.class).annotatedWith(SitePath.class).toInstance(sitePath);
bind(String.class).annotatedWith(SecureStoreClassName.class)
.toProvider(Providers.of(getConfiguredSecureStoreClass()));
}
};
modules.add(sitePathModule);
modules.add(new LifecycleModule() {
@Override
protected void configure() {
bind(DataSourceProvider.Context.class).toInstance(context);
if (dsProvider != null) {
bind(Key.get(DataSource.class, Names.named("ReviewDb")))
.toProvider(dsProvider)
.in(SINGLETON);
if (LifecycleListener.class.isAssignableFrom(dsProvider.getClass())) {
listener().toInstance((LifecycleListener) dsProvider);
}
} else {
bind(Key.get(DataSource.class, Names.named("ReviewDb")))
.toProvider(SiteLibraryBasedDataSourceProvider.class)
.in(SINGLETON);
listener().to(SiteLibraryBasedDataSourceProvider.class);
}
}
});
Module configModule = new GerritServerConfigModule();
modules.add(configModule);
Injector cfgInjector = Guice.createInjector(sitePathModule, configModule);
Config cfg = cfgInjector.getInstance(Key.get(Config.class, GerritServerConfig.class));
String dbType;
if (dsProvider != null) {
dbType = getDbType(dsProvider);
} else {
dbType = cfg.getString("database", null, "type");
}
final DataSourceType dst = Guice.createInjector(new DataSourceModule(), configModule,
sitePathModule).getInstance(
Key.get(DataSourceType.class, Names.named(dbType.toLowerCase())));
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(DataSourceType.class).toInstance(dst);
}});
modules.add(new DatabaseModule());
modules.add(new SchemaModule());
modules.add(new LocalDiskRepositoryManager.Module());
try {
return Guice.createInjector(PRODUCTION, modules);
} catch (CreationException ce) {
final Message first = ce.getErrorMessages().iterator().next();
Throwable why = first.getCause();
if (why instanceof SQLException) {
throw die("Cannot connect to SQL database", why);
}
if (why instanceof OrmException && why.getCause() != null
&& "Unable to determine driver URL".equals(why.getMessage())) {
why = why.getCause();
if (isCannotCreatePoolException(why)) {
throw die("Cannot connect to SQL database", why.getCause());
}
throw die("Cannot connect to SQL database", why);
}
final StringBuilder buf = new StringBuilder();
if (why != null) {
buf.append(why.getMessage());
why = why.getCause();
} else {
buf.append(first.getMessage());
}
while (why != null) {
buf.append("\n caused by ");
buf.append(why.toString());
why = why.getCause();
}
throw die(buf.toString(), new RuntimeException("DbInjector failed", ce));
}
}
protected final String getConfiguredSecureStoreClass() {
Module m = new AbstractModule() {
@Override
protected void configure() {
bind(Path.class).annotatedWith(SitePath.class).toInstance(getSitePath());
bind(SitePaths.class);
}
};
Injector i = Guice.createInjector(m);
SitePaths site = i.getInstance(SitePaths.class);
FileBasedConfig cfg =
new FileBasedConfig(site.gerrit_config.toFile(), FS.DETECTED);
if (!cfg.getFile().exists()) {
return null;
}
try {
cfg.load();
return cfg.getString("gerrit", null, "secureStoreClass");
} catch (IOException | ConfigInvalidException e) {
throw new ProvisionException(e.getMessage(), e);
}
}
private String getDbType(Provider<DataSource> dsProvider) {
String dbProductName;
try (Connection conn = dsProvider.get().getConnection()) {
dbProductName = conn.getMetaData().getDatabaseProductName().toLowerCase();
} catch (SQLException e) {
throw new RuntimeException(e);
}
List<Module> modules = Lists.newArrayList();
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(Path.class).annotatedWith(SitePath.class).toInstance(getSitePath());
}
});
modules.add(new GerritServerConfigModule());
modules.add(new DataSourceModule());
Injector i = Guice.createInjector(modules);
List<Binding<DataSourceType>> dsTypeBindings =
i.findBindingsByType(new TypeLiteral<DataSourceType>() {});
for (Binding<DataSourceType> binding : dsTypeBindings) {
Annotation annotation = binding.getKey().getAnnotation();
if (annotation instanceof Named) {
if (((Named) annotation).value().toLowerCase().contains(dbProductName)) {
return ((Named) annotation).value();
}
}
}
throw new IllegalStateException(String.format(
"Cannot guess database type from the database product name '%s'",
dbProductName));
}
@SuppressWarnings("deprecation")
private static boolean isCannotCreatePoolException(Throwable why) {
return why instanceof org.apache.commons.dbcp.SQLNestedException
&& why.getCause() != null
&& why.getMessage().startsWith(
"Cannot create PoolableConnectionFactory");
}
}
| apache-2.0 |
Deepnekroz/kaa | server/transports/http/transport/src/main/java/org/kaaproject/kaa/server/transports/http/transport/commands/SyncCommandFactory.java | 2046 | /**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.transports.http.transport.commands;
import io.netty.handler.codec.http.HttpRequest;
import io.netty.handler.codec.http.HttpResponse;
import org.kaaproject.kaa.common.endpoint.CommonEPConstans;
import org.kaaproject.kaa.server.common.server.KaaCommandProcessor;
import org.kaaproject.kaa.server.common.server.KaaCommandProcessorFactory;
/*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class SyncCommandFactory implements KaaCommandProcessorFactory<HttpRequest, HttpResponse>{
@Override
public String getCommandName() {
return CommonEPConstans.SYNC_COMMAND;
}
@Override
public KaaCommandProcessor<HttpRequest, HttpResponse> createCommandProcessor() {
return setupCommand(new SyncCommand());
}
protected SyncCommand setupCommand(SyncCommand command) {
return command;
}
}
| apache-2.0 |
nengxu/OrientDB | core/src/main/java/com/orientechnologies/orient/core/command/traverse/OTraverseMultiValueProcess.java | 1948 | /*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.command.traverse;
import java.util.Iterator;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.impl.ODocument;
public class OTraverseMultiValueProcess extends OTraverseAbstractProcess<Iterator<Object>> {
protected Object value;
protected int index = -1;
public OTraverseMultiValueProcess(final OTraverse iCommand, final Iterator<Object> iTarget) {
super(iCommand, iTarget);
}
public OIdentifiable process() {
while (target.hasNext()) {
value = target.next();
index++;
if (value instanceof OIdentifiable) {
final ORecord<?> rec = ((OIdentifiable) value).getRecord();
if (rec instanceof ODocument) {
final OTraverseRecordProcess subProcess = new OTraverseRecordProcess(command, (ODocument) rec);
final OIdentifiable subValue = subProcess.process();
if (subValue != null)
return subValue;
}
}
}
return drop();
}
@Override
public String getStatus() {
return null;
}
@Override
public String toString() {
return "[idx:" + index + "]";
}
} | apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/Abilities/Songs/Skill_Buffoonery.java | 7063 | package com.planet_ink.coffee_mud.Abilities.Songs;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Skill_Buffoonery extends BardSkill
{
@Override public String ID() { return "Skill_Buffoonery"; }
private final static String localizedName = CMLib.lang().L("Buffoonery");
@Override public String name() { return localizedName; }
@Override protected int canAffectCode(){return 0;}
@Override protected int canTargetCode(){return CAN_MOBS;}
@Override public int abstractQuality(){return Ability.QUALITY_MALICIOUS;}
private static final String[] triggerStrings =I(new String[] {"BUFFOONERY"});
@Override public String[] triggerStrings(){return triggerStrings;}
@Override public int classificationCode(){return Ability.ACODE_SKILL|Ability.DOMAIN_FOOLISHNESS;}
@Override public int usageType(){return USAGE_MOVEMENT;}
protected Vector getFreeWearingPositions(MOB target)
{
final Vector V=new Vector();
final Wearable.CODES codes = Wearable.CODES.instance();
final boolean[] pos=new boolean[codes.all_ordered().length];
for(int i=0;i<pos.length;i++)
if(target.freeWearPositions(codes.all_ordered()[i],(short)0,(short)0)>0)
pos[i]=false;
else
pos[i]=true;
for(int i=0;i<pos.length;i++)
if(!pos[i])
V.addElement(Long.valueOf(codes.all_ordered()[i]));
return V;
}
protected boolean freePosition(MOB target)
{
return getFreeWearingPositions(target).size()>0;
}
public String correctItem(MOB mob)
{
for(int i=0;i<mob.numItems();i++)
{
final Item I=mob.getItem(i);
if((I!=null)
&&(CMLib.flags().canBeSeenBy(I,mob))
&&(I.amWearingAt(Wearable.IN_INVENTORY))
&&(!((((I instanceof Armor)&&(I.basePhyStats().armor()>1))
||((I instanceof Weapon)&&(I.basePhyStats().damage()>1))))))
return I.Name();
}
return null;
}
public Item targetItem(MOB target)
{
final Vector V=new Vector();
for(int i=0;i<target.numItems();i++)
{
final Item I2=target.getItem(i);
if((!I2.amWearingAt(Wearable.IN_INVENTORY))
&&(((I2 instanceof Weapon)&&(I2.basePhyStats().damage()>1))
||((I2 instanceof Armor)&&(I2.basePhyStats().armor()>1)))
&&(I2.container()==null))
V.addElement(I2);
}
if(V.size()>0)
return (Item)V.elementAt(CMLib.dice().roll(1,V.size(),-1));
return null;
}
@Override
public int castingQuality(MOB mob, Physical target)
{
if(mob!=null)
{
final String parm=correctItem(mob);
if(parm==null)
return Ability.QUALITY_INDIFFERENT;
if(target instanceof MOB)
{
final Item targetItem=targetItem((MOB)target);
if(targetItem==null)
{
if(!freePosition((MOB)target))
return Ability.QUALITY_INDIFFERENT;
}
}
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
if(commands.size()<2)
{
if(mob.isMonster()&&(commands.size()==1))
{
final String parm=correctItem(mob);
if(parm!=null)
commands.addElement(parm);
}
if(commands.size()<2)
{
mob.tell(L("You must specify a target, and what item to swap on the target!"));
return false;
}
}
final Item I=mob.findItem(null,(String)commands.lastElement());
if((I==null)||(!CMLib.flags().canBeSeenBy(I,mob)))
{
mob.tell(L("You don't seem to have '@x1'.",((String)commands.lastElement())));
return false;
}
if(((I instanceof Armor)&&(I.basePhyStats().armor()>1))
||((I instanceof Weapon)&&(I.basePhyStats().damage()>1)))
{
mob.tell(L("@x1 is not buffoonish enough!",I.name(mob)));
return false;
}
commands.removeElementAt(commands.size()-1);
final MOB target=getTarget(mob,commands,givenTarget);
if(target==null)
return false;
final Item targetItem=targetItem(target);
if(targetItem==null)
{
if(!freePosition(target))
{
mob.tell(L("@x1 has no free wearing positions!",target.name(mob)));
return false;
}
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
int levelDiff=target.phyStats().level()-mob.phyStats().level();
final boolean success=proficiencyCheck(mob,0,auto);
if(levelDiff>0)
levelDiff=-(levelDiff*((!CMLib.flags().canBeSeenBy(mob,target))?5:15));
else
levelDiff=-(levelDiff*((!CMLib.flags().canBeSeenBy(mob,target))?1:2));
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,(CMMsg.MSG_NOISYMOVEMENT|CMMsg.MASK_DELICATE|CMMsg.MASK_MALICIOUS)|(auto?CMMsg.MASK_ALWAYS:0),auto?"":L("<S-NAME> do(es) buffoonery to <T-NAMESELF>.")); if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
long position=-1;
if(targetItem!=null)
{
position=targetItem.rawWornCode();
targetItem.unWear();
}
else
{
final Vector free=getFreeWearingPositions(target);
if(free.size()<1)
{
mob.tell(L("@x1 has no free wearing positions!",target.name(mob)));
return false;
}
if((free.contains(Long.valueOf(Wearable.WORN_WIELD)))
&&((I instanceof Weapon)||(!(I instanceof Armor))))
position=Wearable.WORN_WIELD;
else
position=((Long)free.elementAt(CMLib.dice().roll(1,free.size(),-1))).longValue();
}
if(position>=0)
{
I.unWear();
target.moveItemTo(I);
I.wearAt(position);
}
}
}
else
return beneficialVisualFizzle(mob,target,L("<S-NAME> attempt(s) buffoonery on <T-NAMESELF>, but fail(s)."));
return success;
}
}
| apache-2.0 |
kurtharriger/spring-osgi | samples/weather/weather-service-test/src/main/java/org/springframework/osgi/samples/weather/test/WeatherListener.java | 1089 | /*
* Copyright 2006-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.springframework.osgi.samples.weather.test;
import org.springframework.osgi.service.importer.OsgiServiceLifecycleListener;
import java.util.Map;
/**
*/
public class WeatherListener implements OsgiServiceLifecycleListener
{
public void bind(Object obj, Map properties) {
System.out.println("BOUND: " + obj);
}
public void unbind(Object obj, Map properties) {
System.out.println("REBOUND: " + obj);
}
}
| apache-2.0 |
newkek/incubator-tinkerpop | gremlin-test/src/main/java/org/apache/tinkerpop/gremlin/structure/io/IoCustomTest.java | 8131 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.structure.io;
import org.apache.commons.configuration.Configuration;
import org.apache.tinkerpop.gremlin.AbstractGremlinTest;
import org.apache.tinkerpop.gremlin.FeatureRequirement;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONIo;
import org.apache.tinkerpop.gremlin.structure.io.graphson.GraphSONVersion;
import org.apache.tinkerpop.gremlin.structure.io.graphson.TypeInfo;
import org.apache.tinkerpop.gremlin.structure.io.util.CustomId;
import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
import org.apache.tinkerpop.shaded.jackson.databind.module.SimpleModule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Arrays;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import static org.apache.tinkerpop.gremlin.structure.Graph.Features.ElementFeatures.FEATURE_ANY_IDS;
import static org.apache.tinkerpop.gremlin.structure.Graph.Features.ElementFeatures.FEATURE_USER_SUPPLIED_IDS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
@RunWith(Parameterized.class)
public class IoCustomTest extends AbstractGremlinTest {
@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> data() {
final SimpleModule moduleV1d0 = new SimpleModule();
moduleV1d0.addSerializer(CustomId.class, new CustomId.CustomIdJacksonSerializerV1d0());
final SimpleModule moduleV2d0 = new CustomId.CustomIdTinkerPopJacksonModule();
return Arrays.asList(new Object[][]{
{"graphson-v1-embedded", true,
(Function<Graph, GraphReader>) g -> g.io(IoCore.graphson()).reader().mapper(g.io(GraphSONIo.build(GraphSONVersion.V1_0)).mapper().addCustomModule(moduleV1d0).embedTypes(true).create()).create(),
(Function<Graph, GraphWriter>) g -> g.io(IoCore.graphson()).writer().mapper(g.io(GraphSONIo.build(GraphSONVersion.V1_0)).mapper().addCustomModule(moduleV1d0).embedTypes(true).create()).create()},
{"graphson-v2-embedded", true,
(Function<Graph, GraphReader>) g -> g.io(IoCore.graphson()).reader().mapper(g.io(GraphSONIo.build(GraphSONVersion.V2_0)).mapper().addCustomModule(moduleV2d0).typeInfo(TypeInfo.PARTIAL_TYPES).create()).create(),
(Function<Graph, GraphWriter>) g -> g.io(IoCore.graphson()).writer().mapper(g.io(GraphSONIo.build(GraphSONVersion.V2_0)).mapper().addCustomModule(moduleV2d0).typeInfo(TypeInfo.PARTIAL_TYPES).create()).create()},
{"gryo", true,
(Function<Graph, GraphReader>) g -> g.io(IoCore.gryo()).reader().mapper(g.io(IoCore.gryo()).mapper().addCustom(CustomId.class).create()).create(),
(Function<Graph, GraphWriter>) g -> g.io(IoCore.gryo()).writer().mapper(g.io(IoCore.gryo()).mapper().addCustom(CustomId.class).create()).create()}
});
}
@Parameterized.Parameter(value = 0)
public String ioType;
@Parameterized.Parameter(value = 1)
public boolean assertIdDirectly;
@Parameterized.Parameter(value = 2)
public Function<Graph, GraphReader> readerMaker;
@Parameterized.Parameter(value = 3)
public Function<Graph, GraphWriter> writerMaker;
@Test
@FeatureRequirement(featureClass = Graph.Features.EdgeFeatures.class, feature = Graph.Features.EdgeFeatures.FEATURE_ADD_EDGES)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES)
@FeatureRequirement(featureClass = Graph.Features.EdgePropertyFeatures.class, feature = Graph.Features.EdgePropertyFeatures.FEATURE_SERIALIZABLE_VALUES)
public void shouldSupportUUID() throws Exception {
final UUID id = UUID.randomUUID();
final Vertex v1 = graph.addVertex(T.label, "person");
final Vertex v2 = graph.addVertex(T.label, "person");
final Edge e = v1.addEdge("friend", v2, "uuid", id);
try (final ByteArrayOutputStream os = new ByteArrayOutputStream()) {
final GraphWriter writer = writerMaker.apply(graph);
writer.writeEdge(os, e);
final AtomicBoolean called = new AtomicBoolean(false);
final GraphReader reader = readerMaker.apply(graph);
try (final ByteArrayInputStream bais = new ByteArrayInputStream(os.toByteArray())) {
reader.readEdge(bais, edge -> {
final Edge detachedEdge = (Edge) edge;
assertEquals(e.id(), assertIdDirectly ? detachedEdge.id() : graph.edges(detachedEdge.id().toString()).next().id());
assertEquals(v1.id(), assertIdDirectly ? detachedEdge.outVertex().id() : graph.vertices(detachedEdge.outVertex().id().toString()).next().id());
assertEquals(v2.id(), assertIdDirectly ? detachedEdge.inVertex().id() : graph.vertices(detachedEdge.inVertex().id().toString()).next().id());
assertEquals(v1.label(), detachedEdge.outVertex().label());
assertEquals(v2.label(), detachedEdge.inVertex().label());
assertEquals(e.label(), detachedEdge.label());
assertEquals(e.keys().size(), IteratorUtils.count(detachedEdge.properties()));
assertEquals(id, detachedEdge.value("uuid"));
called.set(true);
return null;
});
}
assertTrue(called.get());
}
}
@Test
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = Graph.Features.VertexFeatures.FEATURE_ADD_VERTICES)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = FEATURE_USER_SUPPLIED_IDS)
@FeatureRequirement(featureClass = Graph.Features.VertexFeatures.class, feature = FEATURE_ANY_IDS)
public void shouldProperlySerializeCustomId() throws Exception {
graph.addVertex(T.id, new CustomId("vertex", UUID.fromString("AF4B5965-B176-4552-B3C1-FBBE2F52C305")));
final GraphWriter writer = writerMaker.apply(graph);
final GraphReader reader = readerMaker.apply(graph);
final Configuration configuration = graphProvider.newGraphConfiguration("readGraph", this.getClass(), name.getMethodName(), null);
graphProvider.clear(configuration);
final Graph g1 = graphProvider.openTestGraph(configuration);
GraphMigrator.migrateGraph(graph, g1, reader, writer);
final Vertex onlyVertex = g1.traversal().V().next();
final CustomId id = (CustomId) onlyVertex.id();
assertEquals("vertex", id.getCluster());
assertEquals(UUID.fromString("AF4B5965-B176-4552-B3C1-FBBE2F52C305"), id.getElementId());
// need to manually close the "g1" instance
graphProvider.clear(g1, configuration);
}
}
| apache-2.0 |
MaxRau/CoffeeMud | com/planet_ink/coffee_mud/Items/Weapons/StdWeapon.java | 14151 | package com.planet_ink.coffee_mud.Items.Weapons;
import com.planet_ink.coffee_mud.Items.Basic.StdItem;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2001-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class StdWeapon extends StdItem implements Weapon, AmmunitionWeapon
{
@Override public String ID(){ return "StdWeapon";}
protected int weaponType=TYPE_NATURAL;
protected int weaponClassification=CLASS_NATURAL;
protected boolean useExtendedMissString=false;
protected int minRange=0;
protected int maxRange=0;
protected int ammoCapacity=0;
protected long lastReloadTime=0;
public StdWeapon()
{
super();
setName("weapon");
setDisplayText(" sits here.");
setDescription("This is a deadly looking weapon.");
wornLogicalAnd=false;
properWornBitmap=Wearable.WORN_HELD|Wearable.WORN_WIELD;
basePhyStats().setAttackAdjustment(0);
basePhyStats().setDamage(0);
basePhyStats().setAbility(0);
baseGoldValue=15;
material=RawMaterial.RESOURCE_STEEL;
setUsesRemaining(100);
recoverPhyStats();
}
@Override public int weaponType(){return weaponType;}
@Override public int weaponClassification(){return weaponClassification;}
@Override public void setWeaponType(int newType){weaponType=newType;}
@Override public void setWeaponClassification(int newClassification){weaponClassification=newClassification;}
@Override
public String secretIdentity()
{
String id=super.secretIdentity();
if(phyStats().ability()>0)
id=name()+" +"+phyStats().ability()+((id.length()>0)?"\n":"")+id;
else
if(phyStats().ability()<0)
id=name()+" "+phyStats().ability()+((id.length()>0)?"\n":"")+id;
return id+"\n\rAttack: "+phyStats().attackAdjustment()+", Damage: "+phyStats().damage();
}
@Override
public void affectPhyStats(Physical affected, PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
if(amWearingAt(Wearable.WORN_WIELD))
{
if(phyStats().attackAdjustment()!=0)
affectableStats.setAttackAdjustment(affectableStats.attackAdjustment()+(phyStats().attackAdjustment()));
if(phyStats().damage()!=0)
affectableStats.setDamage(affectableStats.damage()+phyStats().damage());
}
}
@Override
public void recoverPhyStats()
{
super.recoverPhyStats();
if(phyStats().damage()!=0)
{
final int ability=super.wornLogicalAnd ? (phyStats().ability()*CMath.numberOfSetBits(super.myWornCode)) : phyStats().ability();
phyStats().setDamage(phyStats().damage()+(ability*2));
phyStats().setAttackAdjustment(phyStats().attackAdjustment()+(ability*10));
}
if((subjectToWearAndTear())&&(usesRemaining()<100))
phyStats().setDamage(((int)Math.round(CMath.mul(phyStats().damage(),CMath.div(usesRemaining(),100)))));
}
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
super.executeMsg(myHost,msg);
if(msg.amITarget(this))
{
switch(msg.targetMinor())
{
case CMMsg.TYP_LOOK:
case CMMsg.TYP_EXAMINE:
if(CMLib.flags().canBeSeenBy(this,msg.source()))
{
if(requiresAmmunition())
msg.source().tell(L("@x1 remaining: @x2/@x3.",ammunitionType(),""+ammunitionRemaining(),""+ammunitionCapacity()));
if((subjectToWearAndTear())&&(usesRemaining()<100))
msg.source().tell(weaponHealth());
}
break;
case CMMsg.TYP_RELOAD:
if(msg.tool() instanceof Ammunition)
{
boolean recover=false;
final Ammunition I=(Ammunition)msg.tool();
int howMuchToTake=ammunitionCapacity();
if(I.usesRemaining()<howMuchToTake)
howMuchToTake=I.usesRemaining();
setAmmoRemaining(howMuchToTake);
I.setUsesRemaining(I.usesRemaining()-howMuchToTake);
final LinkedList<Ability> removeThese=new LinkedList<Ability>();
for(final Enumeration<Ability> a=effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)&&(!A.isSavable())&&(A.invoker()==null))
removeThese.add(A);
}
for(final Ability A : removeThese)
delEffect(A);
for(final Enumeration<Ability> a=I.effects();a.hasMoreElements();)
{
Ability A=a.nextElement();
if((A!=null)&&(A.isSavable())&&(fetchEffect(A.ID())==null))
{
A=(Ability)A.copyOf();
A.setInvoker(null);
A.setSavable(false);
addEffect(A);
recover=true;
}
}
if(I.usesRemaining()<=0)
I.destroy();
if(recover)
recoverOwner();
}
break;
case CMMsg.TYP_UNLOAD:
if(msg.tool() instanceof Ammunition)
{
final Ammunition ammo=(Ammunition)msg.tool();
for(final Enumeration<Ability> a=effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)&&(!A.isSavable())&&(A.invoker()==null))
{
final Ability ammoA=(Ability)A.copyOf();
ammo.addNonUninvokableEffect(ammoA);
}
}
setAmmoRemaining(0);
}
break;
}
}
else
if((msg.tool()==this)
&&(msg.targetMinor()==CMMsg.TYP_WEAPONATTACK)
&&(weaponClassification()==Weapon.CLASS_THROWN))
msg.addTrailerMsg(CMClass.getMsg(msg.source(),this,CMMsg.MSG_DROP,null));
if((msg.targetMinor()==CMMsg.TYP_DAMAGE)
&&(msg.tool()==this)
&&(amWearingAt(Wearable.WORN_WIELD))
&&(weaponClassification()!=Weapon.CLASS_NATURAL)
&&(weaponType()!=Weapon.TYPE_NATURAL)
&&(msg.target() instanceof MOB)
&&((msg.value())>0)
&&(owner() instanceof MOB)
&&(msg.amISource((MOB)owner())))
{
final MOB ownerM=(MOB)owner();
final int hurt=(msg.value());
final MOB tmob=(MOB)msg.target();
if((hurt>(tmob.maxState().getHitPoints()/10)||(hurt>50))
&&(tmob.curState().getHitPoints()>hurt))
{
if((!tmob.isMonster())
&&(CMLib.dice().rollPercentage()==1)
&&(CMLib.dice().rollPercentage()>(tmob.charStats().getStat(CharStats.STAT_CONSTITUTION)*4))
&&(!CMSecurity.isDisabled(CMSecurity.DisFlag.AUTODISEASE)))
{
Ability A=null;
if(subjectToWearAndTear()
&&(usesRemaining()<25)
&&((material()&RawMaterial.MATERIAL_MASK)==RawMaterial.MATERIAL_METAL))
{
if(CMLib.dice().rollPercentage()>50)
A=CMClass.getAbility("Disease_Lockjaw");
else
A=CMClass.getAbility("Disease_Tetanus");
}
else
A=CMClass.getAbility("Disease_Infection");
if((A!=null)&&(tmob.fetchEffect(A.ID())==null))
A.invoke(msg.source(),tmob,true,phyStats().level());
}
}
if((subjectToWearAndTear())
&&(CMLib.dice().rollPercentage()==1)
&&(msg.source().rangeToTarget()==0)
&&(CMLib.dice().rollPercentage()>((phyStats().level()/2)+(10*phyStats().ability())+(CMLib.flags().isABonusItems(this)?20:0)))
&&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_ENERGY)
&&((material()&RawMaterial.MATERIAL_MASK)!=RawMaterial.MATERIAL_GAS))
{
CMLib.combat().postItemDamage(ownerM, this, null, 1, CMMsg.TYP_JUSTICE, null);
}
}
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!super.okMessage(myHost,msg))
return false;
if((msg.targetMinor()==CMMsg.TYP_WEAPONATTACK)
&&(msg.tool()==this)
&&(requiresAmmunition())
&&(ammunitionCapacity()>0))
{
if(ammunitionRemaining()>ammunitionCapacity())
setAmmoRemaining(ammunitionCapacity());
if(ammunitionRemaining()<=0)
{
if(lastReloadTime != msg.source().lastTickedDateTime())
{
msg.source().tell(L("@x1 is out of @x2.",name(),ammunitionType()));
if((msg.source().isMine(this))
&&(msg.source().location()!=null)
&&(CMLib.flags().aliveAwakeMobile(msg.source(),true)))
{
lastReloadTime=msg.source().lastTickedDateTime();
if((!msg.source().isMonster())||inventoryAmmoCheck(msg.source()))
msg.source().enqueCommand(CMParms.parse("LOAD ALL \"$"+name()+"$\""), 0, 0);
else
msg.source().enqueCommand(CMParms.parse("REMOVE \"$"+name()+"$\""), 0, 0);
}
}
return false;
}
else
setUsesRemaining(usesRemaining()-1);
}
return true;
}
protected boolean inventoryAmmoCheck(MOB M)
{
if(M==null)
return false;
for(int i=0;i<M.numItems();i++)
{
final Item I=M.getItem(i);
if((I!=null)&&(I instanceof Ammunition)&&(((Ammunition)I).ammunitionType().equalsIgnoreCase(ammunitionType())))
return true;
}
return false;
}
@Override
public void setUsesRemaining(int newUses)
{
if(newUses==Integer.MAX_VALUE)
newUses=100;
super.setUsesRemaining(newUses);
}
protected String weaponHealth()
{
if(usesRemaining()>=100)
return "";
else
if(usesRemaining()>=95)
return name()+" looks slightly used ("+usesRemaining()+"%)";
else
if(usesRemaining()>=85)
{
switch(weaponClassification())
{
case Weapon.CLASS_AXE:
case Weapon.CLASS_DAGGER:
case Weapon.CLASS_EDGED:
case Weapon.CLASS_POLEARM:
case Weapon.CLASS_SWORD:
return name()+" is somewhat dull ("+usesRemaining()+"%)";
default:
return name()+" is somewhat worn ("+usesRemaining()+"%)";
}
}
else
if(usesRemaining()>=75)
{
switch(weaponClassification())
{
case Weapon.CLASS_AXE:
case Weapon.CLASS_DAGGER:
case Weapon.CLASS_EDGED:
case Weapon.CLASS_POLEARM:
case Weapon.CLASS_SWORD:
return name()+" is dull ("+usesRemaining()+"%)";
default:
return name()+" is worn ("+usesRemaining()+"%)";
}
}
else
if(usesRemaining()>50)
{
switch(weaponClassification())
{
case Weapon.CLASS_AXE:
case Weapon.CLASS_DAGGER:
case Weapon.CLASS_EDGED:
case Weapon.CLASS_POLEARM:
case Weapon.CLASS_SWORD:
return name()+" has some notches and chinks ("+usesRemaining()+"%)";
default:
return name()+" is damaged ("+usesRemaining()+"%)";
}
}
else
if(usesRemaining()>25)
return name()+" is heavily damaged ("+usesRemaining()+"%)";
else
return name()+" is so damaged, it is practically harmless ("+usesRemaining()+"%)";
}
@Override
public String missString()
{
return CMLib.combat().standardMissString(weaponType,weaponClassification,name(),useExtendedMissString);
}
@Override
public String hitString(int damageAmount)
{
return CMLib.combat().standardHitString(weaponType, weaponClassification,damageAmount,name());
}
@Override
public int minRange()
{
if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMINRANGE))
return 0;
return minRange;
}
@Override
public int maxRange()
{
if(CMath.bset(phyStats().sensesMask(),PhyStats.SENSE_ITEMNOMAXRANGE))
return 100;
return maxRange;
}
@Override public void setRanges(int min, int max){minRange=min;maxRange=max;}
@Override
public boolean requiresAmmunition()
{
if((readableText()==null)||(this instanceof Wand))
return false;
return readableText().length()>0;
}
@Override
public void setAmmunitionType(String ammo)
{
if(!(this instanceof Wand))
setReadableText(ammo);
}
@Override
public String ammunitionType()
{
return readableText();
}
@Override
public int ammunitionRemaining()
{
return usesRemaining();
}
@Override
public void setAmmoRemaining(int amount)
{
final int oldAmount=ammunitionRemaining();
if(amount==Integer.MAX_VALUE)
amount=20;
setUsesRemaining(amount);
final ItemPossessor myOwner=owner;
if((oldAmount>0)
&&(amount==0)
&&(myOwner instanceof MOB)
&&(ammunitionCapacity()>0))
{
boolean recover=false;
for(final Enumeration<Ability> a=effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)&&(!A.isSavable())&&(A.invoker()==null))
{
recover=true;
delEffect(A);
}
}
if(recover)
recoverOwner();
}
}
@Override public int ammunitionCapacity(){return ammoCapacity;}
@Override public void setAmmoCapacity(int amount){ammoCapacity=amount;}
@Override
public int value()
{
if((subjectToWearAndTear())&&(usesRemaining()<1000))
return (int)Math.round(CMath.mul(super.value(),CMath.div(usesRemaining(),100)));
return super.value();
}
@Override
public boolean subjectToWearAndTear()
{
return((!requiresAmmunition())
&&(!(this instanceof Wand))
&&(usesRemaining()<=1000)
&&(usesRemaining()>=0));
}
public void recoverOwner()
{
final ItemPossessor myOwner=owner;
if(myOwner instanceof MOB)
{
((MOB)myOwner).recoverCharStats();
((MOB)myOwner).recoverMaxState();
((MOB)myOwner).recoverPhyStats();
}
else
if(myOwner!=null)
myOwner.recoverPhyStats();
}
}
| apache-2.0 |
LeapAppServices/LAS-SDK-CloudCode-Java | cloud-code-base/src/main/java/com/maxleap/code/assist/classes/WalletLog.java | 1299 | package com.maxleap.code.assist.classes;
import com.maxleap.code.assist.Path;
import com.maxleap.las.sdk.MLObject;
import com.fasterxml.jackson.annotation.JsonInclude;
/**
* 钱包日志
* User:poplar
* Date:15-6-2
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@Path("/walletLogs")
public class WalletLog extends MLObject {
//用户ID
private String uId;
//钱包ID
private String wId;
//0增加,1减少
private int opType;
//0其它购买,1发票购买
private int payType;
//数量
private Double amount;
//货币种类
private String coinId;
public String getuId() {
return uId;
}
public void setuId(String uId) {
this.uId = uId;
}
public String getwId() {
return wId;
}
public void setwId(String wId) {
this.wId = wId;
}
public int getOpType() {
return opType;
}
public void setOpType(int opType) {
this.opType = opType;
}
public int getPayType() {
return payType;
}
public void setPayType(int payType) {
this.payType = payType;
}
public Double getAmount() {
return amount;
}
public void setAmount(Double amount) {
this.amount = amount;
}
public String getCoinId() {
return coinId;
}
public void setCoinId(String coinId) {
this.coinId = coinId;
}
}
| apache-2.0 |
MaTriXy/gce2retrofit | gce2retrofit/src/main/java/com/sqisland/gce2retrofit/Method.java | 465 | package com.sqisland.gce2retrofit;
import com.google.gson.JsonObject;
import java.util.List;
public class Method {
public String id;
public String path;
public String httpMethod;
public String description;
public JsonObject parameters;
public List<String> parameterOrder;
public RequestOrResponse request;
public RequestOrResponse response;
public static class RequestOrResponse {
public String $ref;
public String parameterName;
}
} | apache-2.0 |
trasukg/river-qa-2.2 | src/com/sun/jini/outrigger/ConstrainableJavaSpaceAdmin.java | 6585 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.outrigger;
import java.rmi.RemoteException;
import net.jini.core.entry.Entry;
import net.jini.core.transaction.Transaction;
import net.jini.core.transaction.TransactionException;
import net.jini.core.constraint.MethodConstraints;
import net.jini.space.JavaSpace05;
/**
* Sub-interface of <code>JavaSpaceAdmin</code> that
* adds a method that allows iterators to be created with
* a given set of constraints.<p>
*
* @deprecated The {@link JavaSpace05#contents JavaSpace05.contents}
* method can be used to view the space's contents.
*
* @author Sun Microsystems, Inc.
* @since 2.0
*/
public interface ConstrainableJavaSpaceAdmin extends JavaSpaceAdmin {
/**
* Return an <code>AdminIterator</code> that will iterate over all
* the entries in the space that match the given template and are
* visible under the given transaction.
* <p>
* The interactions between other operations on the space and
* the returned iterator are undefined
* <p>
* Note, because this is a convenience method for
* <code>contents(Entry, Transaction, int,
* MethodConstraints)</code> the constraints associated with
* <code>contents(Entry, Transaction, int,
* MethodConstraints)</code> are used for any calls though this
* method, not the constraints associated with this method.
*
* @param tmpl The iterator should return only entries that match
* tmpl
* @param txn The iterator should return only entries that match
* this transaction
* @throws RemoteException if communications with the
* server is necessary and it can not be completed.
* @throws TransactionException if there is a problem with
* <code>txn</code>.
* @throws SecurityException If the space is performing
* access control and it can not be confirmed
* that the subject making this call has permission
* to create an <code>AdminIterator</code> with
* the specified template and transaction.
*/
AdminIterator contents(Entry tmpl, Transaction txn)
throws TransactionException, RemoteException;
/**
* Return an <code>AdminIterator</code> that will iterate over all
* the entries in the space that match the given template and are
* visible under the given transaction.
* <p>
* The interactions between other operations on the space and
* the returned iterator are undefined
* <p>
* Note, because this is a convenience method for
* <code>contents(Entry, Transaction, int,
* MethodConstraints)</code> the constraints associated with
* <code>contents(Entry, Transaction, int,
* MethodConstraints)</code> are used for any calls though this
* method, not the constraints associated with this method.
*
* @param tmpl The iterator should return only entries that match
* tmpl
* @param txn The iterator should return only entries that match
* this transaction
* @param fetchSize advice on how many entries to fetch when the iterator
* has to go to the server for more entries.
* @throws RemoteException if communications with the
* server is necessary and it can not be completed.
* @throws TransactionException if there is a problem with
* <code>txn</code>.
* @throws SecurityException If the space is performing
* access control and it can not be confirmed
* that the subject making this call has permission
* to create an <code>AdminIterator</code> with
* the specified template and transaction.
* @throws IllegalArgumentException if fetchSize is
* not postive, or <code>USE_DEFUALT</code>.
*/
AdminIterator contents(Entry tmpl, Transaction txn, int fetchSize)
throws TransactionException, RemoteException;
/**
* Return an <code>AdminIterator</code> that will iterate over all
* the entries in the space that match the given template and are
* visible under the given transaction. The returned iterator
* will support proxy trust verification and will enforce
* the specified <code>MethodConstraints</code>.
* <p>
* The interactions between other operations on the space and
* the returned iterator are undefined
* <p>
* @param tmpl The iterator should return only entries that match
* tmpl
* @param txn The iterator should return only entries that match
* this transaction
* @param fetchSize advice on how many entries to fetch when the
* iterator has to go to the server for more entries.
* @param constrains the <code>MethodConstraints</code> the
* returned proxy should enforce.
* @return An object that can be used to iterate over entries
* in the space.
* @throws RemoteException if communications with the
* server is necessary and it can not be completed.
* @throws TransactionException if there is a problem with
* <code>txn</code>.
* @throws SecurityException If the space is performing
* access control and it can not be confirmed
* that the subject making this call has permission
* to create an <code>AdminIterator</code> with
* the specified template and transaction.
* @throws IllegalArgumentException if fetchSize is
* not postive, or <code>USE_DEFUALT</code>.
*/
AdminIterator contents(Entry tmpl, Transaction txn, int fetchSize,
MethodConstraints constrains)
throws TransactionException, RemoteException;
}
| apache-2.0 |
speedy01/Openfire | xmppserver/src/main/java/org/jivesoftware/openfire/websocket/OpenfireWebSocketServlet.java | 4106 | /*
* Copyright (C) 2015 Tom Evans. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.websocket;
import java.text.MessageFormat;
import org.eclipse.jetty.websocket.common.extensions.compress.PerMessageDeflateExtension;
import org.eclipse.jetty.websocket.servlet.ServletUpgradeRequest;
import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse;
import org.eclipse.jetty.websocket.servlet.WebSocketCreator;
import org.eclipse.jetty.websocket.servlet.WebSocketServlet;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.session.ClientSession;
import org.jivesoftware.openfire.session.LocalSession;
import org.jivesoftware.util.JiveGlobals;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This Servlet enables XMPP over WebSocket (RFC 7395) for Openfire.
*
* The Jetty WebSocketServlet serves as a base class and enables easy integration into the
* BOSH (http-bind) web context. Each WebSocket request received at the "/ws/" URI will be
* forwarded to this plugin/servlet, which will in turn create a new {@link XmppWebSocket}
* for each new connection.
*/
public class OpenfireWebSocketServlet extends WebSocketServlet {
private static final long serialVersionUID = 7281841492829464605L;
private static final Logger Log = LoggerFactory.getLogger(OpenfireWebSocketServlet.class);
@Override
public void destroy()
{
// terminate any active websocket sessions
SessionManager sm = XMPPServer.getInstance().getSessionManager();
for (ClientSession session : sm.getSessions()) {
if (session instanceof LocalSession) {
Object ws = ((LocalSession) session).getSessionData("ws");
if (ws != null && (Boolean) ws) {
Log.debug( "Closing session as websocket servlet is being destroyed: {}", session );
session.close();
}
}
}
super.destroy();
}
@Override
public void configure(WebSocketServletFactory factory)
{
if (XmppWebSocket.isCompressionEnabled()) {
factory.getExtensionFactory().register("permessage-deflate", PerMessageDeflateExtension.class);
}
final int messageSize = JiveGlobals.getIntProperty("xmpp.parser.buffer.size", 1048576);
factory.getPolicy().setMaxTextMessageBufferSize(messageSize * 5);
factory.getPolicy().setMaxTextMessageSize(messageSize);
factory.setCreator(new WebSocketCreator() {
@Override
public Object createWebSocket(ServletUpgradeRequest req, ServletUpgradeResponse resp)
{
try {
for (String subprotocol : req.getSubProtocols())
{
if ("xmpp".equals(subprotocol))
{
resp.setAcceptedSubProtocol(subprotocol);
return new XmppWebSocket();
}
}
} catch (Exception e) {
Log.warn(MessageFormat.format("Unable to load websocket factory: {0} ({1})", e.getClass().getName(), e.getMessage()));
}
Log.warn("Failed to create websocket for {}:{} make a request at {}", req.getRemoteAddress(), req.getRemotePort(), req.getRequestPath() );
return null;
}
});
}
}
| apache-2.0 |
freeVM/freeVM | enhanced/java/classlib/modules/awt/src/main/java/common/org/apache/harmony/awt/gl/Crossing.java | 27766 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Denis M. Kishenko
*/
package org.apache.harmony.awt.gl;
import java.awt.Shape;
import java.awt.geom.PathIterator;
public class Crossing {
/**
* Allowable tolerance for bounds comparison
*/
static final double DELTA = 1E-5;
/**
* If roots have distance less then <code>ROOT_DELTA</code> they are double
*/
static final double ROOT_DELTA = 1E-10;
/**
* Rectangle cross segment
*/
public static final int CROSSING = 255;
/**
* Unknown crossing result
*/
static final int UNKNOWN = 254;
/**
* Solves quadratic equation
* @param eqn - the coefficients of the equation
* @param res - the roots of the equation
* @return a number of roots
*/
public static int solveQuad(double eqn[], double res[]) {
double a = eqn[2];
double b = eqn[1];
double c = eqn[0];
int rc = 0;
if (a == 0.0) {
if (b == 0.0) {
return -1;
}
res[rc++] = -c / b;
} else {
double d = b * b - 4.0 * a * c;
// d < 0.0
if (d < 0.0) {
return 0;
}
d = Math.sqrt(d);
res[rc++] = (- b + d) / (a * 2.0);
// d != 0.0
if (d != 0.0) {
res[rc++] = (- b - d) / (a * 2.0);
}
}
return fixRoots(res, rc);
}
/**
* Solves cubic equation
* @param eqn - the coefficients of the equation
* @param res - the roots of the equation
* @return a number of roots
*/
public static int solveCubic(double eqn[], double res[]) {
double d = eqn[3];
if (d == 0) {
return solveQuad(eqn, res);
}
double a = eqn[2] / d;
double b = eqn[1] / d;
double c = eqn[0] / d;
int rc = 0;
double Q = (a * a - 3.0 * b) / 9.0;
double R = (2.0 * a * a * a - 9.0 * a * b + 27.0 * c) / 54.0;
double Q3 = Q * Q * Q;
double R2 = R * R;
double n = - a / 3.0;
if (R2 < Q3) {
double t = Math.acos(R / Math.sqrt(Q3)) / 3.0;
double p = 2.0 * Math.PI / 3.0;
double m = -2.0 * Math.sqrt(Q);
res[rc++] = m * Math.cos(t) + n;
res[rc++] = m * Math.cos(t + p) + n;
res[rc++] = m * Math.cos(t - p) + n;
} else {
// Debug.println("R2 >= Q3 (" + R2 + "/" + Q3 + ")");
double A = Math.pow(Math.abs(R) + Math.sqrt(R2 - Q3), 1.0 / 3.0);
if (R > 0.0) {
A = -A;
}
// if (A == 0.0) {
if (-ROOT_DELTA < A && A < ROOT_DELTA) {
res[rc++] = n;
} else {
double B = Q / A;
res[rc++] = A + B + n;
// if (R2 == Q3) {
double delta = R2 - Q3;
if (-ROOT_DELTA < delta && delta < ROOT_DELTA) {
res[rc++] = - (A + B) / 2.0 + n;
}
}
}
return fixRoots(res, rc);
}
/**
* Excludes double roots. Roots are double if they lies enough close with each other.
* @param res - the roots
* @param rc - the roots count
* @return new roots count
*/
static int fixRoots(double res[], int rc) {
int tc = 0;
for(int i = 0; i < rc; i++) {
out: {
for(int j = i + 1; j < rc; j++) {
if (isZero(res[i] - res[j])) {
break out;
}
}
res[tc++] = res[i];
}
}
return tc;
}
/**
* QuadCurve class provides basic functionality to find curve crossing and calculating bounds
*/
public static class QuadCurve {
double ax, ay, bx, by;
double Ax, Ay, Bx, By;
public QuadCurve(double x1, double y1, double cx, double cy, double x2, double y2) {
ax = x2 - x1;
ay = y2 - y1;
bx = cx - x1;
by = cy - y1;
Bx = bx + bx; // Bx = 2.0 * bx
Ax = ax - Bx; // Ax = ax - 2.0 * bx
By = by + by; // By = 2.0 * by
Ay = ay - By; // Ay = ay - 2.0 * by
}
int cross(double res[], int rc, double py1, double py2) {
int cross = 0;
for (int i = 0; i < rc; i++) {
double t = res[i];
// CURVE-OUTSIDE
if (t < -DELTA || t > 1 + DELTA) {
continue;
}
// CURVE-START
if (t < DELTA) {
if (py1 < 0.0 && (bx != 0.0 ? bx : ax - bx) < 0.0) {
cross--;
}
continue;
}
// CURVE-END
if (t > 1 - DELTA) {
if (py1 < ay && (ax != bx ? ax - bx : bx) > 0.0) {
cross++;
}
continue;
}
// CURVE-INSIDE
double ry = t * (t * Ay + By);
// ry = t * t * Ay + t * By
if (ry > py2) {
double rxt = t * Ax + bx;
// rxt = 2.0 * t * Ax + Bx = 2.0 * t * Ax + 2.0 * bx
if (rxt > -DELTA && rxt < DELTA) {
continue;
}
cross += rxt > 0.0 ? 1 : -1;
}
} // for
return cross;
}
int solvePoint(double res[], double px) {
double eqn[] = {-px, Bx, Ax};
return solveQuad(eqn, res);
}
int solveExtrem(double res[]) {
int rc = 0;
if (Ax != 0.0) {
res[rc++] = - Bx / (Ax + Ax);
}
if (Ay != 0.0) {
res[rc++] = - By / (Ay + Ay);
}
return rc;
}
int addBound(double bound[], int bc, double res[], int rc, double minX, double maxX, boolean changeId, int id) {
for(int i = 0; i < rc; i++) {
double t = res[i];
if (t > -DELTA && t < 1 + DELTA) {
double rx = t * (t * Ax + Bx);
if (minX <= rx && rx <= maxX) {
bound[bc++] = t;
bound[bc++] = rx;
bound[bc++] = t * (t * Ay + By);
bound[bc++] = id;
if (changeId) {
id++;
}
}
}
}
return bc;
}
}
/**
* CubicCurve class provides basic functionality to find curve crossing and calculating bounds
*/
public static class CubicCurve {
double ax, ay, bx, by, cx, cy;
double Ax, Ay, Bx, By, Cx, Cy;
double Ax3, Bx2;
public CubicCurve(double x1, double y1, double cx1, double cy1, double cx2, double cy2, double x2, double y2) {
ax = x2 - x1;
ay = y2 - y1;
bx = cx1 - x1;
by = cy1 - y1;
cx = cx2 - x1;
cy = cy2 - y1;
Cx = bx + bx + bx; // Cx = 3.0 * bx
Bx = cx + cx + cx - Cx - Cx; // Bx = 3.0 * cx - 6.0 * bx
Ax = ax - Bx - Cx; // Ax = ax - 3.0 * cx + 3.0 * bx
Cy = by + by + by; // Cy = 3.0 * by
By = cy + cy + cy - Cy - Cy; // By = 3.0 * cy - 6.0 * by
Ay = ay - By - Cy; // Ay = ay - 3.0 * cy + 3.0 * by
Ax3 = Ax + Ax + Ax;
Bx2 = Bx + Bx;
}
int cross(double res[], int rc, double py1, double py2) {
int cross = 0;
for (int i = 0; i < rc; i++) {
double t = res[i];
// CURVE-OUTSIDE
if (t < -DELTA || t > 1 + DELTA) {
continue;
}
// CURVE-START
if (t < DELTA) {
if (py1 < 0.0 && (bx != 0.0 ? bx : (cx != bx ? cx - bx : ax - cx)) < 0.0) {
cross--;
}
continue;
}
// CURVE-END
if (t > 1 - DELTA) {
if (py1 < ay && (ax != cx ? ax - cx : (cx != bx ? cx - bx : bx)) > 0.0) {
cross++;
}
continue;
}
// CURVE-INSIDE
double ry = t * (t * (t * Ay + By) + Cy);
// ry = t * t * t * Ay + t * t * By + t * Cy
if (ry > py2) {
double rxt = t * (t * Ax3 + Bx2) + Cx;
// rxt = 3.0 * t * t * Ax + 2.0 * t * Bx + Cx
if (rxt > -DELTA && rxt < DELTA) {
rxt = t * (Ax3 + Ax3) + Bx2;
// rxt = 6.0 * t * Ax + 2.0 * Bx
if (rxt < -DELTA || rxt > DELTA) {
// Inflection point
continue;
}
rxt = ax;
}
cross += rxt > 0.0 ? 1 : -1;
}
} //for
return cross;
}
int solvePoint(double res[], double px) {
double eqn[] = {-px, Cx, Bx, Ax};
return solveCubic(eqn, res);
}
int solveExtremX(double res[]) {
double eqn[] = {Cx, Bx2, Ax3};
return solveQuad(eqn, res);
}
int solveExtremY(double res[]) {
double eqn[] = {Cy, By + By, Ay + Ay + Ay};
return solveQuad(eqn, res);
}
int addBound(double bound[], int bc, double res[], int rc, double minX, double maxX, boolean changeId, int id) {
for(int i = 0; i < rc; i++) {
double t = res[i];
if (t > -DELTA && t < 1 + DELTA) {
double rx = t * (t * (t * Ax + Bx) + Cx);
if (minX <= rx && rx <= maxX) {
bound[bc++] = t;
bound[bc++] = rx;
bound[bc++] = t * (t * (t * Ay + By) + Cy);
bound[bc++] = id;
if (changeId) {
id++;
}
}
}
}
return bc;
}
}
/**
* Returns how many times ray from point (x,y) cross line.
*/
public static int crossLine(double x1, double y1, double x2, double y2, double x, double y) {
// LEFT/RIGHT/UP/EMPTY
if ((x < x1 && x < x2) ||
(x > x1 && x > x2) ||
(y > y1 && y > y2) ||
(x1 == x2))
{
return 0;
}
// DOWN
if (y < y1 && y < y2) {
} else {
// INSIDE
if ((y2 - y1) * (x - x1) / (x2 - x1) <= y - y1) {
// INSIDE-UP
return 0;
}
}
// START
if (x == x1) {
return x1 < x2 ? 0 : -1;
}
// END
if (x == x2) {
return x1 < x2 ? 1 : 0;
}
// INSIDE-DOWN
return x1 < x2 ? 1 : -1;
}
/**
* Returns how many times ray from point (x,y) cross quard curve
*/
public static int crossQuad(double x1, double y1, double cx, double cy, double x2, double y2, double x, double y) {
// LEFT/RIGHT/UP/EMPTY
if ((x < x1 && x < cx && x < x2) ||
(x > x1 && x > cx && x > x2) ||
(y > y1 && y > cy && y > y2) ||
(x1 == cx && cx == x2))
{
return 0;
}
// DOWN
if (y < y1 && y < cy && y < y2 && x != x1 && x != x2) {
if (x1 < x2) {
return x1 < x && x < x2 ? 1 : 0;
}
return x2 < x && x < x1 ? -1 : 0;
}
// INSIDE
QuadCurve c = new QuadCurve(x1, y1, cx, cy, x2, y2);
double px = x - x1;
double py = y - y1;
double res[] = new double[3];
int rc = c.solvePoint(res, px);
return c.cross(res, rc, py, py);
}
/**
* Returns how many times ray from point (x,y) cross cubic curve
*/
public static int crossCubic(double x1, double y1, double cx1, double cy1, double cx2, double cy2, double x2, double y2, double x, double y) {
// LEFT/RIGHT/UP/EMPTY
if ((x < x1 && x < cx1 && x < cx2 && x < x2) ||
(x > x1 && x > cx1 && x > cx2 && x > x2) ||
(y > y1 && y > cy1 && y > cy2 && y > y2) ||
(x1 == cx1 && cx1 == cx2 && cx2 == x2))
{
return 0;
}
// DOWN
if (y < y1 && y < cy1 && y < cy2 && y < y2 && x != x1 && x != x2) {
if (x1 < x2) {
return x1 < x && x < x2 ? 1 : 0;
}
return x2 < x && x < x1 ? -1 : 0;
}
// INSIDE
CubicCurve c = new CubicCurve(x1, y1, cx1, cy1, cx2, cy2, x2, y2);
double px = x - x1;
double py = y - y1;
double res[] = new double[3];
int rc = c.solvePoint(res, px);
return c.cross(res, rc, py, py);
}
/**
* Returns how many times ray from point (x,y) cross path
*/
public static int crossPath(PathIterator p, double x, double y) {
int cross = 0;
double mx, my, cx, cy;
mx = my = cx = cy = 0.0;
double coords[] = new double[6];
while (!p.isDone()) {
switch (p.currentSegment(coords)) {
case PathIterator.SEG_MOVETO:
if (cx != mx || cy != my) {
cross += crossLine(cx, cy, mx, my, x, y);
}
mx = cx = coords[0];
my = cy = coords[1];
break;
case PathIterator.SEG_LINETO:
cross += crossLine(cx, cy, cx = coords[0], cy = coords[1], x, y);
break;
case PathIterator.SEG_QUADTO:
cross += crossQuad(cx, cy, coords[0], coords[1], cx = coords[2], cy = coords[3], x, y);
break;
case PathIterator.SEG_CUBICTO:
cross += crossCubic(cx, cy, coords[0], coords[1], coords[2], coords[3], cx = coords[4], cy = coords[5], x, y);
break;
case PathIterator.SEG_CLOSE:
if (cy != my || cx != mx) {
cross += crossLine(cx, cy, cx = mx, cy = my, x, y);
}
break;
}
// checks if the point (x,y) is the vertex of shape with PathIterator p
if (x == cx && y == cy) {
cross = 0;
cy = my;
break;
}
p.next();
}
if (cy != my) {
cross += crossLine(cx, cy, mx, my, x, y);
}
return cross;
}
/**
* Returns how many times ray from point (x,y) cross shape
*/
public static int crossShape(Shape s, double x, double y) {
if (!s.getBounds2D().contains(x, y)) {
return 0;
}
return crossPath(s.getPathIterator(null), x, y);
}
/**
* Returns true if value enough small
*/
public static boolean isZero(double val) {
return -DELTA < val && val < DELTA;
}
/**
* Sort bound array
*/
static void sortBound(double bound[], int bc) {
for(int i = 0; i < bc - 4; i += 4) {
int k = i;
for(int j = i + 4; j < bc; j += 4) {
if (bound[k] > bound[j]) {
k = j;
}
}
if (k != i) {
double tmp = bound[i];
bound[i] = bound[k];
bound[k] = tmp;
tmp = bound[i + 1];
bound[i + 1] = bound[k + 1];
bound[k + 1] = tmp;
tmp = bound[i + 2];
bound[i + 2] = bound[k + 2];
bound[k + 2] = tmp;
tmp = bound[i + 3];
bound[i + 3] = bound[k + 3];
bound[k + 3] = tmp;
}
}
}
/**
* Returns are bounds intersect or not intersect rectangle
*/
static int crossBound(double bound[], int bc, double py1, double py2) {
// LEFT/RIGHT
if (bc == 0) {
return 0;
}
// Check Y coordinate
int up = 0;
int down = 0;
for(int i = 2; i < bc; i += 4) {
if (bound[i] < py1) {
up++;
continue;
}
if (bound[i] > py2) {
down++;
continue;
}
return CROSSING;
}
// UP
if (down == 0) {
return 0;
}
if (up != 0) {
// bc >= 2
sortBound(bound, bc);
boolean sign = bound[2] > py2;
for(int i = 6; i < bc; i += 4) {
boolean sign2 = bound[i] > py2;
if (sign != sign2 && bound[i + 1] != bound[i - 3]) {
return CROSSING;
}
sign = sign2;
}
}
return UNKNOWN;
}
/**
* Returns how many times rectangle stripe cross line or the are intersect
*/
public static int intersectLine(double x1, double y1, double x2, double y2, double rx1, double ry1, double rx2, double ry2) {
// LEFT/RIGHT/UP
if ((rx2 < x1 && rx2 < x2) ||
(rx1 > x1 && rx1 > x2) ||
(ry1 > y1 && ry1 > y2))
{
return 0;
}
// DOWN
if (ry2 < y1 && ry2 < y2) {
} else {
// INSIDE
if (x1 == x2) {
return CROSSING;
}
// Build bound
double bx1, bx2;
if (x1 < x2) {
bx1 = x1 < rx1 ? rx1 : x1;
bx2 = x2 < rx2 ? x2 : rx2;
} else {
bx1 = x2 < rx1 ? rx1 : x2;
bx2 = x1 < rx2 ? x1 : rx2;
}
double k = (y2 - y1) / (x2 - x1);
double by1 = k * (bx1 - x1) + y1;
double by2 = k * (bx2 - x1) + y1;
// BOUND-UP
if (by1 < ry1 && by2 < ry1) {
return 0;
}
// BOUND-DOWN
if (by1 > ry2 && by2 > ry2) {
} else {
return CROSSING;
}
}
// EMPTY
if (x1 == x2) {
return 0;
}
// CURVE-START
if (rx1 == x1) {
return x1 < x2 ? 0 : -1;
}
// CURVE-END
if (rx1 == x2) {
return x1 < x2 ? 1 : 0;
}
if (x1 < x2) {
return x1 < rx1 && rx1 < x2 ? 1 : 0;
}
return x2 < rx1 && rx1 < x1 ? -1 : 0;
}
/**
* Returns how many times rectangle stripe cross quad curve or the are intersect
*/
public static int intersectQuad(double x1, double y1, double cx, double cy, double x2, double y2, double rx1, double ry1, double rx2, double ry2) {
// LEFT/RIGHT/UP ------------------------------------------------------
if ((rx2 < x1 && rx2 < cx && rx2 < x2) ||
(rx1 > x1 && rx1 > cx && rx1 > x2) ||
(ry1 > y1 && ry1 > cy && ry1 > y2))
{
return 0;
}
// DOWN ---------------------------------------------------------------
if (ry2 < y1 && ry2 < cy && ry2 < y2 && rx1 != x1 && rx1 != x2) {
if (x1 < x2) {
return x1 < rx1 && rx1 < x2 ? 1 : 0;
}
return x2 < rx1 && rx1 < x1 ? -1 : 0;
}
// INSIDE -------------------------------------------------------------
QuadCurve c = new QuadCurve(x1, y1, cx, cy, x2, y2);
double px1 = rx1 - x1;
double py1 = ry1 - y1;
double px2 = rx2 - x1;
double py2 = ry2 - y1;
double res1[] = new double[3];
double res2[] = new double[3];
int rc1 = c.solvePoint(res1, px1);
int rc2 = c.solvePoint(res2, px2);
// INSIDE-LEFT/RIGHT
if (rc1 == 0 && rc2 == 0) {
return 0;
}
// Build bound --------------------------------------------------------
double minX = px1 - DELTA;
double maxX = px2 + DELTA;
double bound[] = new double[28];
int bc = 0;
// Add roots
bc = c.addBound(bound, bc, res1, rc1, minX, maxX, false, 0);
bc = c.addBound(bound, bc, res2, rc2, minX, maxX, false, 1);
// Add extremal points`
rc2 = c.solveExtrem(res2);
bc = c.addBound(bound, bc, res2, rc2, minX, maxX, true, 2);
// Add start and end
if (rx1 < x1 && x1 < rx2) {
bound[bc++] = 0.0;
bound[bc++] = 0.0;
bound[bc++] = 0.0;
bound[bc++] = 4;
}
if (rx1 < x2 && x2 < rx2) {
bound[bc++] = 1.0;
bound[bc++] = c.ax;
bound[bc++] = c.ay;
bound[bc++] = 5;
}
// End build bound ----------------------------------------------------
int cross = crossBound(bound, bc, py1, py2);
if (cross != UNKNOWN) {
return cross;
}
return c.cross(res1, rc1, py1, py2);
}
/**
* Returns how many times rectangle stripe cross cubic curve or the are intersect
*/
public static int intersectCubic(double x1, double y1, double cx1, double cy1, double cx2, double cy2, double x2, double y2, double rx1, double ry1, double rx2, double ry2) {
// LEFT/RIGHT/UP
if ((rx2 < x1 && rx2 < cx1 && rx2 < cx2 && rx2 < x2) ||
(rx1 > x1 && rx1 > cx1 && rx1 > cx2 && rx1 > x2) ||
(ry1 > y1 && ry1 > cy1 && ry1 > cy2 && ry1 > y2))
{
return 0;
}
// DOWN
if (ry2 < y1 && ry2 < cy1 && ry2 < cy2 && ry2 < y2 && rx1 != x1 && rx1 != x2) {
if (x1 < x2) {
return x1 < rx1 && rx1 < x2 ? 1 : 0;
}
return x2 < rx1 && rx1 < x1 ? -1 : 0;
}
// INSIDE
CubicCurve c = new CubicCurve(x1, y1, cx1, cy1, cx2, cy2, x2, y2);
double px1 = rx1 - x1;
double py1 = ry1 - y1;
double px2 = rx2 - x1;
double py2 = ry2 - y1;
double res1[] = new double[3];
double res2[] = new double[3];
int rc1 = c.solvePoint(res1, px1);
int rc2 = c.solvePoint(res2, px2);
// LEFT/RIGHT
if (rc1 == 0 && rc2 == 0) {
return 0;
}
double minX = px1 - DELTA;
double maxX = px2 + DELTA;
// Build bound --------------------------------------------------------
double bound[] = new double[40];
int bc = 0;
// Add roots
bc = c.addBound(bound, bc, res1, rc1, minX, maxX, false, 0);
bc = c.addBound(bound, bc, res2, rc2, minX, maxX, false, 1);
// Add extrimal points
rc2 = c.solveExtremX(res2);
bc = c.addBound(bound, bc, res2, rc2, minX, maxX, true, 2);
rc2 = c.solveExtremY(res2);
bc = c.addBound(bound, bc, res2, rc2, minX, maxX, true, 4);
// Add start and end
if (rx1 < x1 && x1 < rx2) {
bound[bc++] = 0.0;
bound[bc++] = 0.0;
bound[bc++] = 0.0;
bound[bc++] = 6;
}
if (rx1 < x2 && x2 < rx2) {
bound[bc++] = 1.0;
bound[bc++] = c.ax;
bound[bc++] = c.ay;
bound[bc++] = 7;
}
// End build bound ----------------------------------------------------
int cross = crossBound(bound, bc, py1, py2);
if (cross != UNKNOWN) {
return cross;
}
return c.cross(res1, rc1, py1, py2);
}
/**
* Returns how many times rectangle stripe cross path or the are intersect
*/
public static int intersectPath(PathIterator p, double x, double y, double w, double h) {
int cross = 0;
int count;
double mx, my, cx, cy;
mx = my = cx = cy = 0.0;
double coords[] = new double[6];
double rx1 = x;
double ry1 = y;
double rx2 = x + w;
double ry2 = y + h;
while (!p.isDone()) {
count = 0;
switch (p.currentSegment(coords)) {
case PathIterator.SEG_MOVETO:
if (cx != mx || cy != my) {
count = intersectLine(cx, cy, mx, my, rx1, ry1, rx2, ry2);
}
mx = cx = coords[0];
my = cy = coords[1];
break;
case PathIterator.SEG_LINETO:
count = intersectLine(cx, cy, cx = coords[0], cy = coords[1], rx1, ry1, rx2, ry2);
break;
case PathIterator.SEG_QUADTO:
count = intersectQuad(cx, cy, coords[0], coords[1], cx = coords[2], cy = coords[3], rx1, ry1, rx2, ry2);
break;
case PathIterator.SEG_CUBICTO:
count = intersectCubic(cx, cy, coords[0], coords[1], coords[2], coords[3], cx = coords[4], cy = coords[5], rx1, ry1, rx2, ry2);
break;
case PathIterator.SEG_CLOSE:
if (cy != my || cx != mx) {
count = intersectLine(cx, cy, mx, my, rx1, ry1, rx2, ry2);
}
cx = mx;
cy = my;
break;
}
if (count == CROSSING) {
return CROSSING;
}
cross += count;
p.next();
}
if (cy != my) {
count = intersectLine(cx, cy, mx, my, rx1, ry1, rx2, ry2);
if (count == CROSSING) {
return CROSSING;
}
cross += count;
}
return cross;
}
/**
* Returns how many times rectangle stripe cross shape or the are intersect
*/
public static int intersectShape(Shape s, double x, double y, double w, double h) {
if (!s.getBounds2D().intersects(x, y, w, h)) {
return 0;
}
return intersectPath(s.getPathIterator(null), x, y, w, h);
}
/**
* Returns true if cross count correspond inside location for non zero path rule
*/
public static boolean isInsideNonZero(int cross) {
return cross != 0;
}
/**
* Returns true if cross count correspond inside location for even-odd path rule
*/
public static boolean isInsideEvenOdd(int cross) {
return (cross & 1) != 0;
}
} | apache-2.0 |
fjy/druid | processing/src/main/java/io/druid/query/topn/TopNQueryEngine.java | 5578 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.topn;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicates;
import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences;
import com.metamx.common.logger.Logger;
import io.druid.collections.StupidPool;
import io.druid.granularity.QueryGranularity;
import io.druid.query.Result;
import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.Filter;
import io.druid.segment.Capabilities;
import io.druid.segment.Cursor;
import io.druid.segment.SegmentMissingException;
import io.druid.segment.StorageAdapter;
import io.druid.segment.column.Column;
import io.druid.segment.filter.Filters;
import org.joda.time.Interval;
import java.nio.ByteBuffer;
import java.util.List;
/**
*/
public class TopNQueryEngine
{
private static final Logger log = new Logger(TopNQueryEngine.class);
private final StupidPool<ByteBuffer> bufferPool;
public TopNQueryEngine(StupidPool<ByteBuffer> bufferPool)
{
this.bufferPool = bufferPool;
}
public Sequence<Result<TopNResultValue>> query(final TopNQuery query, final StorageAdapter adapter)
{
if (adapter == null) {
throw new SegmentMissingException(
"Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped."
);
}
final List<Interval> queryIntervals = query.getQuerySegmentSpec().getIntervals();
final Filter filter = Filters.convertDimensionFilters(query.getDimensionsFilter());
final QueryGranularity granularity = query.getGranularity();
final Function<Cursor, Result<TopNResultValue>> mapFn = getMapFn(query, adapter);
Preconditions.checkArgument(
queryIntervals.size() == 1, "Can only handle a single interval, got[%s]", queryIntervals
);
return Sequences.filter(
Sequences.map(
adapter.makeCursors(filter, queryIntervals.get(0), granularity, query.isDescending()),
new Function<Cursor, Result<TopNResultValue>>()
{
@Override
public Result<TopNResultValue> apply(Cursor input)
{
log.debug("Running over cursor[%s]", adapter.getInterval(), input.getTime());
return mapFn.apply(input);
}
}
),
Predicates.<Result<TopNResultValue>>notNull()
);
}
private Function<Cursor, Result<TopNResultValue>> getMapFn(TopNQuery query, final StorageAdapter adapter)
{
final Capabilities capabilities = adapter.getCapabilities();
final String dimension = query.getDimensionSpec().getDimension();
final int cardinality = adapter.getDimensionCardinality(dimension);
int numBytesPerRecord = 0;
for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) {
numBytesPerRecord += aggregatorFactory.getMaxIntermediateSize();
}
final TopNAlgorithmSelector selector = new TopNAlgorithmSelector(cardinality, numBytesPerRecord);
query.initTopNAlgorithmSelector(selector);
final TopNAlgorithm topNAlgorithm;
if (
selector.isHasExtractionFn() &&
// TimeExtractionTopNAlgorithm can work on any single-value dimension of type long.
// Once we have arbitrary dimension types following check should be replaced by checking
// that the column is of type long and single-value.
dimension.equals(Column.TIME_COLUMN_NAME)
) {
// A special TimeExtractionTopNAlgorithm is required, since DimExtractionTopNAlgorithm
// currently relies on the dimension cardinality to support lexicographic sorting
topNAlgorithm = new TimeExtractionTopNAlgorithm(capabilities, query);
} else if (selector.isHasExtractionFn()) {
topNAlgorithm = new DimExtractionTopNAlgorithm(capabilities, query);
} else if (selector.isAggregateAllMetrics()) {
topNAlgorithm = new PooledTopNAlgorithm(capabilities, query, bufferPool);
} else if (selector.isAggregateTopNMetricFirst() || query.getContextValue("doAggregateTopNMetricFirst", false)) {
topNAlgorithm = new AggregateTopNMetricFirstAlgorithm(capabilities, query, bufferPool);
} else {
topNAlgorithm = new PooledTopNAlgorithm(capabilities, query, bufferPool);
}
return new TopNMapFn(query, topNAlgorithm);
}
public static boolean canApplyExtractionInPost(TopNQuery query)
{
return query.getDimensionSpec() != null
&& query.getDimensionSpec().getExtractionFn() != null
&& ExtractionFn.ExtractionType.ONE_TO_ONE.equals(query.getDimensionSpec().getExtractionFn().getExtractionType())
&& query.getTopNMetricSpec().canBeOptimizedUnordered();
}
}
| apache-2.0 |
tommy-kw/AndroidDesignSupportLibrarySample | DrawerLayout/app/src/main/java/tokyo/tommy_kw/drawerlayout/BottomSheetUtil.java | 698 | package tokyo.tommy_kw.drawerlayout;
import android.support.design.widget.BottomSheetBehavior;
import android.view.View;
/**
* Created by tommy on 2016/03/14.
*/
public class BottomSheetUtil {
public static void setState(View bottomSheet, int state) {
BottomSheetBehavior sheetBehavior = BottomSheetBehavior.from(bottomSheet);
sheetBehavior.setState(state);
}
public static void setBottomSheetCallback(View bottomSheet,
BottomSheetBehavior.BottomSheetCallback callback) {
BottomSheetBehavior sheetBehavior = BottomSheetBehavior.from(bottomSheet);
sheetBehavior.setBottomSheetCallback(callback);
}
}
| apache-2.0 |
paplorinc/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/template/macro/ReplaceSpacesWithUnderscoresMacro.java | 999 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.template.Expression;
import com.intellij.codeInsight.template.ExpressionContext;
import com.intellij.codeInsight.template.Result;
import com.intellij.codeInsight.template.TextResult;
import org.jetbrains.annotations.NotNull;
public class ReplaceSpacesWithUnderscoresMacro extends MacroBase {
public ReplaceSpacesWithUnderscoresMacro() {
super("spacesToUnderscores", CodeInsightBundle.message("macro.spacesToUnderscores.string"));
}
@Override
protected Result calculateResult(@NotNull Expression[] params, ExpressionContext context, boolean quick) {
final String text = getTextResult(params, context);
if (text != null) {
return new TextResult(text.replace(' ', '_'));
}
return null;
}
} | apache-2.0 |
Darsstar/framework | uitest/src/test/java/com/vaadin/tests/components/textfield/EnumTextFieldTest.java | 2051 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.tests.components.textfield;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.openqa.selenium.Keys;
import com.vaadin.testbench.elements.TextFieldElement;
import com.vaadin.tests.tb3.SingleBrowserTest;
public class EnumTextFieldTest extends SingleBrowserTest {
@Test
public void validValues() {
openTestURL();
$(TextFieldElement.class).first().clear();
$(TextFieldElement.class).first().sendKeys("Value", Keys.TAB);
assertEquals("3. Value (valid)", getLogRow(0));
$(TextFieldElement.class).first().clear();
$(TextFieldElement.class).first().sendKeys("VaLuE");
$(TextFieldElement.class).first().sendKeys(Keys.TAB);
assertEquals("5. Value (valid)", getLogRow(0));
$(TextFieldElement.class).first().clear();
$(TextFieldElement.class).first().sendKeys("The last value");
$(TextFieldElement.class).first().sendKeys(Keys.TAB);
assertEquals("7. The last value (valid)", getLogRow(0));
$(TextFieldElement.class).first().clear();
assertEquals("8. null (valid)", getLogRow(0));
}
@Test
public void invalidValue() {
openTestURL();
$(TextFieldElement.class).first().clear();
$(TextFieldElement.class).first().sendKeys("bar");
$(TextFieldElement.class).first().sendKeys(Keys.TAB);
assertEquals("3. bar (INVALID)", getLogRow(0));
}
}
| apache-2.0 |
magicDGS/gatk | src/test/java/org/broadinstitute/hellbender/tools/spark/ParallelCopyGCSDirectoryIntoHDFSSparkIntegrationTest.java | 6717 | package org.broadinstitute.hellbender.tools.spark;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.broadinstitute.hellbender.CommandLineProgramTest;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.gcs.BucketUtils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.testutils.ArgumentsBuilder;
import org.broadinstitute.hellbender.testutils.IntegrationTestSpec;
import org.broadinstitute.hellbender.testutils.MiniClusterUtils;
import org.testng.Assert;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class ParallelCopyGCSDirectoryIntoHDFSSparkIntegrationTest extends CommandLineProgramTest {
@Override
public String getTestedToolName() {
return ParallelCopyGCSDirectoryIntoHDFSSpark.class.getSimpleName();
}
@Test(groups = {"spark", "bucket"})
public void testCopyLargeFile() throws Exception {
MiniDFSCluster cluster = null;
try {
final Configuration conf = new Configuration();
// set the minicluster to have a very low block size so that we can test transferring a file in chunks without actually needing to move a big file
conf.set("dfs.blocksize", "1048576");
cluster = MiniClusterUtils.getMiniCluster(conf);
// copy a multi-block file
final Path tempPath = MiniClusterUtils.getTempPath(cluster, "test", "dir");
final String gcpInputPath = getGCPTestInputPath() + "huge/CEUTrio.HiSeq.WGS.b37.NA12878.chr1_4.bam.bai";
String args =
"--" + ParallelCopyGCSDirectoryIntoHDFSSpark.INPUT_GCS_PATH_LONG_NAME + " " + gcpInputPath +
" --" + ParallelCopyGCSDirectoryIntoHDFSSpark.OUTPUT_HDFS_DIRECTORY_LONG_NAME + " " + tempPath;
ArgumentsBuilder ab = new ArgumentsBuilder().add(args);
IntegrationTestSpec spec = new IntegrationTestSpec(
ab.getString(),
Collections.emptyList());
spec.executeTest("testCopyLargeFile-" + args, this);
final long fileSizeOnGCS = Files.size(IOUtils.getPath(gcpInputPath));
final String hdfsPath = tempPath + "/" + "CEUTrio.HiSeq.WGS.b37.NA12878.chr1_4.bam.bai";
org.apache.hadoop.fs.Path outputHdfsDirectoryPath = new org.apache.hadoop.fs.Path(tempPath.toUri());
try(FileSystem fs = outputHdfsDirectoryPath.getFileSystem(conf)) {
long chunkSize = ParallelCopyGCSDirectoryIntoHDFSSpark.getChunkSize(fs);
Assert.assertTrue(fileSizeOnGCS > chunkSize);
}
Assert.assertEquals(BucketUtils.fileSize(hdfsPath),
fileSizeOnGCS);
final File tempDir = createTempDir("ParallelCopy");
BucketUtils.copyFile(hdfsPath, tempDir + "fileFromHDFS.bam.bai");
Assert.assertEquals(Utils.calculateFileMD5(new File(tempDir + "fileFromHDFS.bam.bai")), "1a6baa5332e98ef1358ac0fb36f46aaf");
} finally {
MiniClusterUtils.stopCluster(cluster);
}
}
@DataProvider(name = "directoryCopy")
public Object[][] getDirectoryParams() {
final String gcpInputPath = getGCPTestInputPath() + "parallel_copy/";
final List<Object[]> tests = new ArrayList<>();
tests.add(new Object[]{gcpInputPath, null, new String[] { "foo.txt", "bar.txt"}, new String[] { "d3b07384d113edec49eaa6238ad5ff00", "c157a79031e1c40f85931829bc5fc552"}});
tests.add(new Object[]{gcpInputPath, "foo*", new String[] { "foo.txt" }, new String[] { "d3b07384d113edec49eaa6238ad5ff00" }});
return tests.toArray(new Object[][]{});
}
@Test(groups = {"spark", "bucket"}, dataProvider = "directoryCopy")
public void testCopyDirectory(final String gcpInputPath,
final String glob,
final String[] expectedFilesCopied,
final String[] expectedMD5s) throws Exception {
MiniDFSCluster cluster = null;
try {
final Configuration conf = new Configuration();
// set the minicluster to have a very low block size so that we can test transferring a file in chunks without actually needing to move a big file
conf.set("dfs.blocksize", "1048576");
cluster = MiniClusterUtils.getMiniCluster(conf);
// copy a directory
final Path tempPath = MiniClusterUtils.getTempPath(cluster, "test", "dir");
// directory contains two small files named foo.txt and bar.txt
String args =
"--" + ParallelCopyGCSDirectoryIntoHDFSSpark.INPUT_GCS_PATH_LONG_NAME + " " + gcpInputPath +
(glob == null ? "" : " --" + ParallelCopyGCSDirectoryIntoHDFSSpark.INPUT_GLOB + " " + glob) +
" --" + ParallelCopyGCSDirectoryIntoHDFSSpark.OUTPUT_HDFS_DIRECTORY_LONG_NAME + " " + tempPath;
ArgumentsBuilder ab = new ArgumentsBuilder().add(args);
IntegrationTestSpec spec = new IntegrationTestSpec(
ab.getString(),
Collections.emptyList());
spec.executeTest("testCopyDirectory-" + args, this);
org.apache.hadoop.fs.Path outputHdfsDirectoryPath = new org.apache.hadoop.fs.Path(tempPath.toUri());
final File tempDir = createTempDir("ParallelCopyDir");
int filesFound = 0;
try(FileSystem fs = outputHdfsDirectoryPath.getFileSystem(conf)) {
final RemoteIterator<LocatedFileStatus> hdfsCopies = fs.listFiles(outputHdfsDirectoryPath, false);
while (hdfsCopies.hasNext()) {
final FileStatus next = hdfsCopies.next();
final Path path = next.getPath();
BucketUtils.copyFile(path.toString(), tempDir + "/" + path.getName());
filesFound ++;
}
}
Assert.assertEquals(filesFound, expectedFilesCopied.length);
for (int i = 0; i < expectedFilesCopied.length; i++) {
String fileName = expectedFilesCopied[i];
String md5 = expectedMD5s[i];
Assert.assertEquals(Utils.calculateFileMD5(new File(tempDir + "/" + fileName)), md5);
}
} finally {
MiniClusterUtils.stopCluster(cluster);
}
}
}
| bsd-3-clause |
axinging/chromium-crosswalk | chrome/android/webapk/shell_apk/javatests/src/org/chromium/webapk/shell_apk/DexLoaderTest.java | 12381 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.webapk.shell_apk;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.PackageManager.NameNotFoundException;
import android.os.FileObserver;
import android.os.IBinder;
import android.os.RemoteException;
import android.test.InstrumentationTestCase;
import android.test.suitebuilder.annotation.MediumTest;
import dalvik.system.DexFile;
import org.chromium.base.FileUtils;
import org.chromium.content.browser.test.util.CallbackHelper;
import org.chromium.webapk.shell_apk.test.dex_optimizer.IDexOptimizerService;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
public class DexLoaderTest extends InstrumentationTestCase {
/**
* Package of APK to load dex file from and package which provides DexOptimizerService.
*/
private static final String DEX_OPTIMIZER_SERVICE_PACKAGE =
"org.chromium.webapk.shell_apk.test.dex_optimizer";
/**
* Class which implements DexOptimizerService.
*/
private static final String DEX_OPTIMIZER_SERVICE_CLASS_NAME =
"org.chromium.webapk.shell_apk.test.dex_optimizer.DexOptimizerServiceImpl";
/**
* Name of the dex file in DexOptimizer.apk.
*/
private static final String DEX_ASSET_NAME = "canary.dex";
/**
* Class to load to check whether dex is valid.
*/
private static final String CANARY_CLASS_NAME =
"org.chromium.webapk.shell_apk.test.canary.Canary";
private Context mContext;
private Context mRemoteContext;
private File mLocalDexDir;
private IDexOptimizerService mDexOptimizerService;
private ServiceConnection mServiceConnection;
/**
* Monitors read files and modified files in the directory passed to the constructor.
*/
private static class FileMonitor extends FileObserver {
public ArrayList<String> mReadPaths = new ArrayList<String>();
public ArrayList<String> mModifiedPaths = new ArrayList<String>();
public FileMonitor(File directory) {
super(directory.getPath());
}
@Override
public void onEvent(int event, String path) {
switch (event) {
case FileObserver.ACCESS:
mReadPaths.add(path);
break;
case FileObserver.CREATE:
case FileObserver.DELETE:
case FileObserver.DELETE_SELF:
case FileObserver.MODIFY:
mModifiedPaths.add(path);
break;
default:
break;
}
}
}
@Override
protected void setUp() {
mContext = getInstrumentation().getTargetContext();
mRemoteContext = getRemoteContext(mContext);
mLocalDexDir = mContext.getDir("dex", Context.MODE_PRIVATE);
if (mLocalDexDir.exists()) {
FileUtils.recursivelyDeleteFile(mLocalDexDir);
if (mLocalDexDir.exists()) {
fail("Could not delete local dex directory.");
}
}
connectToDexOptimizerService();
try {
if (!mDexOptimizerService.deleteDexDirectory()) {
fail("Could not delete remote dex directory.");
}
} catch (RemoteException e) {
e.printStackTrace();
fail("Remote crashed during setup.");
}
}
@Override
public void tearDown() throws Exception {
mContext.unbindService(mServiceConnection);
super.tearDown();
}
/**
* Test that {@DexLoader#load()} can create a ClassLoader from a dex and optimized dex in
* another app's data directory.
*/
@MediumTest
public void testLoadFromRemoteDataDir() {
// Extract the dex file into another app's data directory and optimize the dex.
String remoteDexFilePath = null;
try {
remoteDexFilePath = mDexOptimizerService.extractAndOptimizeDex();
} catch (RemoteException e) {
e.printStackTrace();
fail("Remote crashed.");
}
if (remoteDexFilePath == null) {
fail("Could not extract and optimize dex.");
}
// Check that the Android OS knows about the optimized dex file for
// {@link remoteDexFilePath}.
File remoteDexFile = new File(remoteDexFilePath);
assertFalse(isDexOptNeeded(remoteDexFile));
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, remoteDexFile, mLocalDexDir);
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
// Check that {@link DexLoader#load()} did not use the fallback path.
assertFalse(mLocalDexDir.exists());
}
/**
* That that {@link DexLoader#load()} falls back to extracting the dex from the APK to the
* local data directory and creating the ClassLoader from the extracted dex if creating the
* ClassLoader from the cached data in the remote Context's data directory fails.
*/
@MediumTest
public void testLoadFromLocalDataDir() {
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, null, mLocalDexDir);
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
// Check that the dex file was extracted to the local data directory and that a directory
// was created for the optimized dex.
assertTrue(mLocalDexDir.exists());
File[] localDexDirFiles = mLocalDexDir.listFiles();
assertNotNull(localDexDirFiles);
Arrays.sort(localDexDirFiles);
assertEquals(2, localDexDirFiles.length);
assertEquals(DEX_ASSET_NAME, localDexDirFiles[0].getName());
assertFalse(localDexDirFiles[0].isDirectory());
assertEquals("optimized", localDexDirFiles[1].getName());
assertTrue(localDexDirFiles[1].isDirectory());
}
/**
* Test that {@link DexLoader#load()} does not extract the dex file from the APK if the dex file
* was extracted in a previous call to {@link DexLoader#load()}
*/
@MediumTest
public void testPreviouslyLoadedFromLocalDataDir() {
assertTrue(mLocalDexDir.mkdir());
{
// Load dex the first time. This should extract the dex file from the APK's assets and
// generate the optimized dex file.
FileMonitor localDexDirMonitor = new FileMonitor(mLocalDexDir);
localDexDirMonitor.startWatching();
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, null, mLocalDexDir);
localDexDirMonitor.stopWatching();
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
assertTrue(localDexDirMonitor.mReadPaths.contains(DEX_ASSET_NAME));
assertTrue(localDexDirMonitor.mModifiedPaths.contains(DEX_ASSET_NAME));
}
{
// Load dex a second time. We should use the already extracted dex file.
FileMonitor localDexDirMonitor = new FileMonitor(mLocalDexDir);
localDexDirMonitor.startWatching();
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, null, mLocalDexDir);
localDexDirMonitor.stopWatching();
// The returned ClassLoader should be valid.
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
// We should not have modified any files and have used the already extracted dex file.
assertTrue(localDexDirMonitor.mReadPaths.contains(DEX_ASSET_NAME));
assertTrue(localDexDirMonitor.mModifiedPaths.isEmpty());
}
}
/**
* Test that {@link DexLoader#load()} re-extracts the dex file from the APK after a call to
* {@link DexLoader#deleteCachedDexes()}.
*/
@MediumTest
public void testLoadAfterDeleteCachedDexes() {
assertTrue(mLocalDexDir.mkdir());
{
// Load dex the first time. This should extract the dex file from the APK's assets and
// generate the optimized dex file.
FileMonitor localDexDirMonitor = new FileMonitor(mLocalDexDir);
localDexDirMonitor.startWatching();
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, null, mLocalDexDir);
localDexDirMonitor.stopWatching();
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
assertTrue(localDexDirMonitor.mReadPaths.contains(DEX_ASSET_NAME));
assertTrue(localDexDirMonitor.mModifiedPaths.contains(DEX_ASSET_NAME));
}
DexLoader.deleteCachedDexes(mLocalDexDir);
{
// Load dex a second time.
FileMonitor localDexDirMonitor = new FileMonitor(mLocalDexDir);
localDexDirMonitor.startWatching();
ClassLoader loader = DexLoader.load(
mRemoteContext, DEX_ASSET_NAME, CANARY_CLASS_NAME, null, mLocalDexDir);
localDexDirMonitor.stopWatching();
// The returned ClassLoader should be valid.
assertNotNull(loader);
assertTrue(canLoadCanaryClass(loader));
// We should have re-extracted the dex from the APK's assets.
assertTrue(localDexDirMonitor.mReadPaths.contains(DEX_ASSET_NAME));
assertTrue(localDexDirMonitor.mModifiedPaths.contains(DEX_ASSET_NAME));
}
}
/**
* Connects to the DexOptimizerService.
*/
private void connectToDexOptimizerService() {
Intent intent = new Intent();
intent.setComponent(
new ComponentName(DEX_OPTIMIZER_SERVICE_PACKAGE, DEX_OPTIMIZER_SERVICE_CLASS_NAME));
final CallbackHelper connectedCallback = new CallbackHelper();
mServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
mDexOptimizerService = IDexOptimizerService.Stub.asInterface(service);
connectedCallback.notifyCalled();
}
@Override
public void onServiceDisconnected(ComponentName name) {}
};
try {
mContext.bindService(intent, mServiceConnection, Context.BIND_AUTO_CREATE);
} catch (SecurityException e) {
e.printStackTrace();
fail();
}
try {
connectedCallback.waitForCallback(0);
} catch (Exception e) {
e.printStackTrace();
fail("Could not connect to remote.");
}
}
/**
* Returns the Context of the APK which provides DexOptimizerService.
* @param context The test application's Context.
* @return Context of the APK whcih provide DexOptimizerService.
*/
private Context getRemoteContext(Context context) {
try {
return context.getApplicationContext().createPackageContext(
DEX_OPTIMIZER_SERVICE_PACKAGE,
Context.CONTEXT_IGNORE_SECURITY | Context.CONTEXT_INCLUDE_CODE);
} catch (NameNotFoundException e) {
e.printStackTrace();
fail("Could not get remote context");
return null;
}
}
/** Returns whether the Android OS thinks that a dex file needs to be re-optimized */
private boolean isDexOptNeeded(File dexFile) {
try {
return DexFile.isDexOptNeeded(dexFile.getPath());
} catch (Exception e) {
e.printStackTrace();
fail();
return false;
}
}
/** Returns whether the ClassLoader can load {@link CANARY_CLASS_NAME} */
private boolean canLoadCanaryClass(ClassLoader loader) {
try {
loader.loadClass(CANARY_CLASS_NAME);
return true;
} catch (Exception e) {
return false;
}
}
}
| bsd-3-clause |
Dufgui/Force.com-Toolkit-for-Android | Sforce-Android-Toolkit/com/sforce/android/soap/partner/DeleteSoapRequest.java | 2177 | package com.sforce.android.soap.partner;
import java.io.StringWriter;
import java.util.HashMap;
import org.xmlpull.v1.XmlSerializer;
import android.util.Xml;
public class DeleteSoapRequest implements Request{
static final String ENV="http://schemas.xmlsoap.org/soap/envelope/";
static final String URN="urn:partner.soap.sforce.com";
static final String SOAPENV="soapenv";
static final String URN_STRING="urn";
static final String ENVELOPE="Envelope";
static final String HEADER="Header";
static final String SESSION_HEADER="SessionHeader";
static final String SESSION_ID="sessionId";
static final String BODY="Body";
static final String DELETE="delete";
static final String IDS="ids";
static final String SEPARATOR=",";
private String soapRequest=null;
public DeleteSoapRequest(HashMap<String, String> requestFields) {
this.soapRequest=createSoapRequest(requestFields);
}
public String getRequest() {
return this.soapRequest;
}
public void setRequest(String soapRequest) {
this.soapRequest=soapRequest;
}
private String createSoapRequest(HashMap<String, String> requestFields){
XmlSerializer serializer = Xml.newSerializer();
StringWriter writer = new StringWriter();
try {
serializer.setOutput(writer);
serializer.setPrefix(SOAPENV, ENV);
serializer.setPrefix(URN_STRING, URN);
serializer.startTag(ENV, ENVELOPE);
serializer.startTag(ENV, HEADER);
serializer.startTag(ENV, SESSION_HEADER);
serializer.startTag(URN, SESSION_ID);
serializer.text(requestFields.get(SESSION_ID));
serializer.endTag(URN, SESSION_ID);
serializer.endTag(ENV, SESSION_HEADER);
serializer.endTag(ENV, HEADER);
serializer.startTag(ENV, BODY);
serializer.startTag(URN, DELETE);
String[] fields = (requestFields.get(IDS)).split(SEPARATOR);
for (String id:fields){
serializer.startTag(URN, IDS);
serializer.text(id.trim());
serializer.endTag(URN, IDS);
}
serializer.endTag(URN, DELETE);
serializer.endTag(ENV, BODY);
serializer.endTag(ENV, ENVELOPE);
serializer.endDocument();
return writer.toString();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
| bsd-3-clause |
g-votte/eclipse-collections | eclipse-collections/src/main/java/org/eclipse/collections/impl/block/procedure/primitive/CollectCharProcedure.java | 1549 | /*
* Copyright (c) 2016 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl.block.procedure.primitive;
import org.eclipse.collections.api.block.function.primitive.CharFunction;
import org.eclipse.collections.api.block.procedure.Procedure;
import org.eclipse.collections.api.collection.primitive.MutableCharCollection;
/**
* Applies a charFunction to an object and adds the result to a target char collection.
*/
public final class CollectCharProcedure<T> implements Procedure<T>
{
private static final long serialVersionUID = 1L;
private final CharFunction<? super T> charFunction;
private final MutableCharCollection charCollection;
public CollectCharProcedure(CharFunction<? super T> charFunction, MutableCharCollection targetCollection)
{
this.charFunction = charFunction;
this.charCollection = targetCollection;
}
@Override
public void value(T object)
{
char value = this.charFunction.charValueOf(object);
this.charCollection.add(value);
}
public MutableCharCollection getCharCollection()
{
return this.charCollection;
}
}
| bsd-3-clause |