index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.io.EOFException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.Protocol;
import org.apache.avro.Protocol.Message;
import org.apache.avro.ipc.generic.GenericResponder;
/**
* A socket-based server implementation. This uses a simple, non-standard wire
* protocol and is not intended for production services.
*
* @deprecated use {@link SaslSocketServer} instead.
*/
@Deprecated
public class SocketServer extends Thread implements Server {
private static final Logger LOG = LoggerFactory.getLogger(SocketServer.class);
private Responder responder;
private ServerSocketChannel channel;
private ThreadGroup group;
public SocketServer(Responder responder, SocketAddress addr) throws IOException {
String name = "SocketServer on " + addr;
this.responder = responder;
this.group = new ThreadGroup(name);
this.channel = ServerSocketChannel.open();
channel.socket().bind(addr);
setName(name);
setDaemon(true);
}
@Override
public int getPort() {
return channel.socket().getLocalPort();
}
@Override
public void run() {
LOG.info("starting " + channel.socket().getInetAddress());
try {
while (true) {
try {
new Connection(channel.accept());
} catch (ClosedChannelException e) {
return;
} catch (IOException e) {
LOG.warn("unexpected error", e);
throw new RuntimeException(e);
}
}
} finally {
LOG.info("stopping " + channel.socket().getInetAddress());
try {
channel.close();
} catch (IOException e) {
}
}
}
@Override
public void close() {
this.interrupt();
group.interrupt();
}
/**
* Creates an appropriate {@link Transceiver} for this server. Returns a
* {@link SocketTransceiver} by default.
*/
protected Transceiver getTransceiver(SocketChannel channel) throws IOException {
return new SocketTransceiver(channel);
}
private class Connection implements Runnable {
SocketChannel channel;
Transceiver xc;
public Connection(SocketChannel channel) throws IOException {
this.channel = channel;
Thread thread = new Thread(group, this);
thread.setName("Connection to " + channel.socket().getRemoteSocketAddress());
thread.setDaemon(true);
thread.start();
}
@Override
public void run() {
try {
try {
this.xc = getTransceiver(channel);
while (true) {
xc.writeBuffers(responder.respond(xc.readBuffers(), xc));
}
} catch (EOFException | ClosedChannelException e) {
} finally {
xc.close();
}
} catch (IOException e) {
LOG.warn("unexpected error", e);
}
}
}
public static void main(String[] arg) throws Exception {
Responder responder = new GenericResponder(Protocol.parse("{\"protocol\": \"X\"}")) {
@Override
public Object respond(Message message, Object request) throws Exception {
throw new AvroRemoteException("no messages!");
}
};
SocketServer server = new SocketServer(responder, new InetSocketAddress(0));
server.start();
System.out.println("server started on port: " + server.getPort());
server.join();
}
}
| 7,500 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/CallFuture.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* A Future implementation for RPCs.
*/
public class CallFuture<T> implements Future<T>, Callback<T> {
private final CountDownLatch latch = new CountDownLatch(1);
private final Callback<T> chainedCallback;
private T result = null;
private Throwable error = null;
/**
* Creates a CallFuture.
*/
public CallFuture() {
this(null);
}
/**
* Creates a CallFuture with a chained Callback which will be invoked when this
* CallFuture's Callback methods are invoked.
*
* @param chainedCallback the chained Callback to set.
*/
public CallFuture(Callback<T> chainedCallback) {
this.chainedCallback = chainedCallback;
}
/**
* Sets the RPC response, and unblocks all threads waiting on {@link #get()} or
* {@link #get(long, TimeUnit)}.
*
* @param result the RPC result to set.
*/
@Override
public void handleResult(T result) {
this.result = result;
latch.countDown();
if (chainedCallback != null) {
chainedCallback.handleResult(result);
}
}
/**
* Sets an error thrown during RPC execution, and unblocks all threads waiting
* on {@link #get()} or {@link #get(long, TimeUnit)}.
*
* @param error the RPC error to set.
*/
@Override
public void handleError(Throwable error) {
this.error = error;
latch.countDown();
if (chainedCallback != null) {
chainedCallback.handleError(error);
}
}
/**
* Gets the value of the RPC result without blocking. Using {@link #get()} or
* {@link #get(long, TimeUnit)} is usually preferred because these methods block
* until the result is available or an error occurs.
*
* @return the value of the response, or null if no result was returned or the
* RPC has not yet completed.
*/
public T getResult() {
return result;
}
/**
* Gets the error that was thrown during RPC execution. Does not block. Either
* {@link #get()} or {@link #get(long, TimeUnit)} should be called first because
* these methods block until the RPC has completed.
*
* @return the RPC error that was thrown, or null if no error has occurred or if
* the RPC has not yet completed.
*/
public Throwable getError() {
return error;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public T get() throws InterruptedException, ExecutionException {
latch.await();
if (error != null) {
throw new ExecutionException(error);
}
return result;
}
@Override
public T get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
if (latch.await(timeout, unit)) {
if (error != null) {
throw new ExecutionException(error);
}
return result;
} else {
throw new TimeoutException();
}
}
/**
* Waits for the CallFuture to complete without returning the result.
*
* @throws InterruptedException if interrupted.
*/
public void await() throws InterruptedException {
latch.await();
}
/**
* Waits for the CallFuture to complete without returning the result.
*
* @param timeout the maximum time to wait.
* @param unit the time unit of the timeout argument.
* @throws InterruptedException if interrupted.
* @throws TimeoutException if the wait timed out.
*/
public void await(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException {
if (!latch.await(timeout, unit)) {
throw new TimeoutException();
}
}
@Override
public boolean isDone() {
return latch.getCount() <= 0;
}
}
| 7,501 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Responder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.UnresolvedUnionException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.Protocol.Message;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.avro.util.ByteBufferOutputStream;
import org.apache.avro.util.Utf8;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
/** Base class for the server side of a protocol interaction. */
public abstract class Responder {
private static final Logger LOG = LoggerFactory.getLogger(Responder.class);
private static final Schema META = Schema.createMap(Schema.create(Schema.Type.BYTES));
private static final GenericDatumReader<Map<String, ByteBuffer>> META_READER = new GenericDatumReader<>(META);
private static final GenericDatumWriter<Map<String, ByteBuffer>> META_WRITER = new GenericDatumWriter<>(META);
private static final ThreadLocal<Protocol> REMOTE = new ThreadLocal<>();
private final Map<MD5, Protocol> protocols = new ConcurrentHashMap<>();
private final Protocol local;
private final MD5 localHash;
protected final List<RPCPlugin> rpcMetaPlugins;
protected Responder(Protocol local) {
this.local = local;
this.localHash = new MD5();
localHash.bytes(local.getMD5());
protocols.put(localHash, local);
this.rpcMetaPlugins = new CopyOnWriteArrayList<>();
}
/**
* Return the remote protocol. Accesses a {@link ThreadLocal} that's set around
* calls to {@link #respond(Protocol.Message, Object)}.
*/
public static Protocol getRemote() {
return REMOTE.get();
}
/** Return the local protocol. */
public Protocol getLocal() {
return local;
}
/**
* Adds a new plugin to manipulate per-call metadata. Plugins are executed in
* the order that they are added.
*
* @param plugin a plugin that will manipulate RPC metadata
*/
public void addRPCPlugin(RPCPlugin plugin) {
rpcMetaPlugins.add(plugin);
}
/**
* Called by a server to deserialize a request, compute and serialize a response
* or error.
*/
public List<ByteBuffer> respond(List<ByteBuffer> buffers) throws IOException {
return respond(buffers, null);
}
/**
* Called by a server to deserialize a request, compute and serialize a response
* or error. Transceiver is used by connection-based servers to track handshake
* status of connection.
*/
public List<ByteBuffer> respond(List<ByteBuffer> buffers, Transceiver connection) throws IOException {
Decoder in = DecoderFactory.get().binaryDecoder(new ByteBufferInputStream(buffers), null);
ByteBufferOutputStream bbo = new ByteBufferOutputStream();
BinaryEncoder out = EncoderFactory.get().binaryEncoder(bbo, null);
Exception error = null;
RPCContext context = new RPCContext();
List<ByteBuffer> payload = null;
List<ByteBuffer> handshake = null;
boolean wasConnected = connection != null && connection.isConnected();
try {
Protocol remote = handshake(in, out, connection);
out.flush();
if (remote == null) // handshake failed
return bbo.getBufferList();
handshake = bbo.getBufferList();
// read request using remote protocol specification
context.setRequestCallMeta(META_READER.read(null, in));
String messageName = in.readString(null).toString();
if (messageName.equals("")) // a handshake ping
return handshake;
Message rm = remote.getMessages().get(messageName);
if (rm == null)
throw new AvroRuntimeException("No such remote message: " + messageName);
Message m = getLocal().getMessages().get(messageName);
if (m == null)
throw new AvroRuntimeException("No message named " + messageName + " in " + getLocal());
Object request = readRequest(rm.getRequest(), m.getRequest(), in);
context.setMessage(rm);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.serverReceiveRequest(context);
}
// create response using local protocol specification
if ((m.isOneWay() != rm.isOneWay()) && wasConnected)
throw new AvroRuntimeException("Not both one-way: " + messageName);
Object response = null;
try {
REMOTE.set(remote);
response = respond(m, request);
context.setResponse(response);
} catch (Exception e) {
error = e;
context.setError(error);
LOG.warn("user error", e);
} finally {
REMOTE.set(null);
}
if (m.isOneWay() && wasConnected) // no response data
return null;
out.writeBoolean(error != null);
if (error == null)
writeResponse(m.getResponse(), response, out);
else
try {
writeError(m.getErrors(), error, out);
} catch (UnresolvedUnionException e) { // unexpected error
throw error;
}
} catch (Exception e) { // system error
LOG.warn("system error", e);
context.setError(e);
bbo = new ByteBufferOutputStream();
out = EncoderFactory.get().binaryEncoder(bbo, null);
out.writeBoolean(true);
writeError(Protocol.SYSTEM_ERRORS, new Utf8(e.toString()), out);
if (null == handshake) {
handshake = new ByteBufferOutputStream().getBufferList();
}
}
out.flush();
payload = bbo.getBufferList();
// Grab meta-data from plugins
context.setResponsePayload(payload);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.serverSendResponse(context);
}
META_WRITER.write(context.responseCallMeta(), out);
out.flush();
// Prepend handshake and append payload
bbo.prepend(handshake);
bbo.append(payload);
return bbo.getBufferList();
}
private SpecificDatumWriter<HandshakeResponse> handshakeWriter = new SpecificDatumWriter<>(HandshakeResponse.class);
private SpecificDatumReader<HandshakeRequest> handshakeReader = new SpecificDatumReader<>(HandshakeRequest.class);
private Protocol handshake(Decoder in, Encoder out, Transceiver connection) throws IOException {
if (connection != null && connection.isConnected())
return connection.getRemote();
HandshakeRequest request = handshakeReader.read(null, in);
Protocol remote = protocols.get(request.getClientHash());
if (remote == null && request.getClientProtocol() != null) {
remote = Protocol.parse(request.getClientProtocol().toString());
protocols.put(request.getClientHash(), remote);
}
HandshakeResponse response = new HandshakeResponse();
if (localHash.equals(request.getServerHash())) {
response.setMatch(remote == null ? HandshakeMatch.NONE : HandshakeMatch.BOTH);
} else {
response.setMatch(remote == null ? HandshakeMatch.NONE : HandshakeMatch.CLIENT);
}
if (response.getMatch() != HandshakeMatch.BOTH) {
response.setServerProtocol(local.toString());
response.setServerHash(localHash);
}
RPCContext context = new RPCContext();
context.setHandshakeRequest(request);
context.setHandshakeResponse(response);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.serverConnecting(context);
}
handshakeWriter.write(response, out);
if (connection != null && response.getMatch() != HandshakeMatch.NONE)
connection.setRemote(remote);
return remote;
}
/** Computes the response for a message. */
public abstract Object respond(Message message, Object request) throws Exception;
/** Reads a request message. */
public abstract Object readRequest(Schema actual, Schema expected, Decoder in) throws IOException;
/** Writes a response message. */
public abstract void writeResponse(Schema schema, Object response, Encoder out) throws IOException;
/** Writes an error message. */
public abstract void writeError(Schema schema, Object error, Encoder out) throws IOException;
}
| 7,502 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Ipc.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import org.slf4j.LoggerFactory;
/** IPC utilities, including client and server factories. */
public class Ipc {
private Ipc() {
} // no public ctor
static boolean warned = false;
/** Create a client {@link Transceiver} connecting to the provided URI. */
public static Transceiver createTransceiver(URI uri) throws IOException {
if ("http".equals(uri.getScheme()))
return new HttpTransceiver(uri.toURL());
else if ("avro".equals(uri.getScheme()))
return new SaslSocketTransceiver(new InetSocketAddress(uri.getHost(), uri.getPort()));
else
throw new IOException("unknown uri scheme: " + uri);
}
/**
* Create a {@link Server} listening at the named URI using the provided
* responder.
*/
public static Server createServer(Responder responder, URI uri) throws IOException {
if ("avro".equals(uri.getScheme())) {
return new SaslSocketServer(responder, new InetSocketAddress(uri.getHost(), uri.getPort()));
} else if ("http".equals(uri.getScheme())) {
if (!warned) {
LoggerFactory.getLogger(Ipc.class)
.error("Using Ipc.createServer to create http instances is deprecated. Create "
+ " an instance of org.apache.avro.ipc.jetty.HttpServer directly.");
warned = true;
}
try {
Class<?> cls = Class.forName("org.apache.avro.ipc.jetty.HttpServer");
return (Server) cls.getConstructor(Responder.class, Integer.TYPE).newInstance(responder, uri.getPort());
} catch (Throwable t) {
// ignore, exception will be thrown
}
}
throw new IOException("unknown uri scheme: " + uri);
}
}
| 7,503 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificResponder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.specific;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.avro.Schema;
import org.apache.avro.Protocol;
import org.apache.avro.Protocol.Message;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.ipc.generic.GenericResponder;
/** {@link org.apache.avro.ipc.Responder Responder} for generated interfaces. */
public class SpecificResponder extends GenericResponder {
private Object impl;
public SpecificResponder(Class iface, Object impl) {
this(iface, impl, new SpecificData(impl.getClass().getClassLoader()));
}
public SpecificResponder(Protocol protocol, Object impl) {
this(protocol, impl, new SpecificData(impl.getClass().getClassLoader()));
}
public SpecificResponder(Class iface, Object impl, SpecificData data) {
this(data.getProtocol(iface), impl, data);
}
public SpecificResponder(Protocol protocol, Object impl, SpecificData data) {
super(protocol, data);
this.impl = impl;
}
public SpecificData getSpecificData() {
return (SpecificData) getGenericData();
}
@Override
protected DatumWriter<Object> getDatumWriter(Schema schema) {
return new SpecificDatumWriter<>(schema, getSpecificData());
}
@Override
protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
return new SpecificDatumReader<>(actual, expected, getSpecificData());
}
@Override
public void writeError(Schema schema, Object error, Encoder out) throws IOException {
getDatumWriter(schema).write(error, out);
}
@Override
public Object respond(Message message, Object request) throws Exception {
int numParams = message.getRequest().getFields().size();
Object[] params = new Object[numParams];
Class[] paramTypes = new Class[numParams];
int i = 0;
try {
for (Schema.Field param : message.getRequest().getFields()) {
params[i] = ((GenericRecord) request).get(param.name());
paramTypes[i] = getSpecificData().getClass(param.schema());
i++;
}
Method method = impl.getClass().getMethod(message.getName(), paramTypes);
method.setAccessible(true);
return method.invoke(impl, params);
} catch (InvocationTargetException e) {
Throwable error = e.getTargetException();
if (error instanceof Exception) {
throw (Exception) error;
} else {
throw new AvroRuntimeException(error);
}
} catch (NoSuchMethodException | IllegalAccessException e) {
throw new AvroRuntimeException(e);
}
}
}
| 7,504 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/specific/SpecificRequestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.specific;
import java.io.IOException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Type;
import java.util.Arrays;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.Requestor;
import org.apache.avro.ipc.Callback;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
/** {@link org.apache.avro.ipc.Requestor Requestor} for generated interfaces. */
public class SpecificRequestor extends Requestor implements InvocationHandler {
SpecificData data;
public SpecificRequestor(Class<?> iface, Transceiver transceiver) throws IOException {
this(iface, transceiver, new SpecificData(iface.getClassLoader()));
}
protected SpecificRequestor(Protocol protocol, Transceiver transceiver) throws IOException {
this(protocol, transceiver, SpecificData.get());
}
public SpecificRequestor(Class<?> iface, Transceiver transceiver, SpecificData data) throws IOException {
this(data.getProtocol(iface), transceiver, data);
}
public SpecificRequestor(Protocol protocol, Transceiver transceiver, SpecificData data) throws IOException {
super(protocol, transceiver);
this.data = data;
}
public SpecificData getSpecificData() {
return data;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
String name = method.getName();
switch (name) {
case "hashCode":
return hashCode();
case "equals":
Object obj = args[0];
return (proxy == obj)
|| (obj != null && Proxy.isProxyClass(obj.getClass()) && this.equals(Proxy.getInvocationHandler(obj)));
case "toString":
String protocol = "unknown";
String remote = "unknown";
Class<?>[] interfaces = proxy.getClass().getInterfaces();
if (interfaces.length > 0) {
try {
protocol = Class.forName(interfaces[0].getName()).getSimpleName();
} catch (ClassNotFoundException e) {
}
InvocationHandler handler = Proxy.getInvocationHandler(proxy);
if (handler instanceof Requestor) {
try {
remote = ((Requestor) handler).getTransceiver().getRemoteName();
} catch (IOException e) {
}
}
}
return "Proxy[" + protocol + "," + remote + "]";
default:
try {
// Check if this is a callback-based RPC:
Type[] parameterTypes = method.getParameterTypes();
if ((parameterTypes.length > 0) && (parameterTypes[parameterTypes.length - 1] instanceof Class)
&& Callback.class.isAssignableFrom(((Class<?>) parameterTypes[parameterTypes.length - 1]))) {
// Extract the Callback from the end of of the argument list
Object[] finalArgs = Arrays.copyOf(args, args.length - 1);
Callback<?> callback = (Callback<?>) args[args.length - 1];
request(method.getName(), finalArgs, callback);
return null;
} else {
return request(method.getName(), args);
}
} catch (Exception e) {
// Check if this is a declared Exception:
for (Class<?> exceptionClass : method.getExceptionTypes()) {
if (exceptionClass.isAssignableFrom(e.getClass())) {
throw e;
}
}
// Next, check for RuntimeExceptions:
if (e instanceof RuntimeException) {
throw e;
}
// Not an expected Exception, so wrap it in AvroRuntimeException:
throw new AvroRuntimeException(e);
}
}
}
protected DatumWriter<Object> getDatumWriter(Schema schema) {
return new SpecificDatumWriter<>(schema, data);
}
@Deprecated // for compatibility in 1.5
protected DatumReader<Object> getDatumReader(Schema schema) {
return getDatumReader(schema, schema);
}
protected DatumReader<Object> getDatumReader(Schema writer, Schema reader) {
return new SpecificDatumReader<>(writer, reader, data);
}
@Override
public void writeRequest(Schema schema, Object request, Encoder out) throws IOException {
Object[] args = (Object[]) request;
int i = 0;
for (Schema.Field param : schema.getFields())
getDatumWriter(param.schema()).write(args[i++], out);
}
@Override
public Object readResponse(Schema writer, Schema reader, Decoder in) throws IOException {
return getDatumReader(writer, reader).read(null, in);
}
@Override
public Exception readError(Schema writer, Schema reader, Decoder in) throws IOException {
Object value = getDatumReader(writer, reader).read(null, in);
if (value instanceof Exception)
return (Exception) value;
return new AvroRuntimeException(value.toString());
}
/** Create a proxy instance whose methods invoke RPCs. */
public static <T> T getClient(Class<T> iface, Transceiver transceiver) throws IOException {
return getClient(iface, transceiver, new SpecificData(iface.getClassLoader()));
}
/** Create a proxy instance whose methods invoke RPCs. */
@SuppressWarnings("unchecked")
public static <T> T getClient(Class<T> iface, Transceiver transceiver, SpecificData data) throws IOException {
Protocol protocol = data.getProtocol(iface);
return (T) Proxy.newProxyInstance(data.getClassLoader(), new Class[] { iface },
new SpecificRequestor(protocol, transceiver, data));
}
/** Create a proxy instance whose methods invoke RPCs. */
@SuppressWarnings("unchecked")
public static <T> T getClient(Class<T> iface, SpecificRequestor requestor) throws IOException {
return (T) Proxy.newProxyInstance(requestor.data.getClassLoader(), new Class[] { iface }, requestor);
}
/** Return the remote protocol for a proxy. */
public static Protocol getRemote(Object proxy) throws IOException {
return ((Requestor) Proxy.getInvocationHandler(proxy)).getRemote();
}
}
| 7,505 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectResponder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.reflect;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Protocol;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
/** {@link org.apache.avro.ipc.Responder} for existing interfaces. */
public class ReflectResponder extends SpecificResponder {
public ReflectResponder(Class iface, Object impl) {
this(iface, impl, new ReflectData(impl.getClass().getClassLoader()));
}
public ReflectResponder(Protocol protocol, Object impl) {
this(protocol, impl, new ReflectData(impl.getClass().getClassLoader()));
}
public ReflectResponder(Class iface, Object impl, ReflectData data) {
this(data.getProtocol(iface), impl, data);
}
public ReflectResponder(Protocol protocol, Object impl, ReflectData data) {
super(protocol, impl, data);
}
public ReflectData getReflectData() {
return (ReflectData) getSpecificData();
}
@Override
protected DatumWriter<Object> getDatumWriter(Schema schema) {
return new ReflectDatumWriter<>(schema, getReflectData());
}
@Override
protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
return new ReflectDatumReader<>(actual, expected, getReflectData());
}
@Override
public void writeError(Schema schema, Object error, Encoder out) throws IOException {
if (error instanceof CharSequence)
error = error.toString(); // system error: convert
super.writeError(schema, error, out);
}
}
| 7,506 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/reflect/ReflectRequestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.reflect;
import java.io.IOException;
import java.lang.reflect.Proxy;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
/** A {@link org.apache.avro.ipc.Requestor} for existing interfaces. */
public class ReflectRequestor extends SpecificRequestor {
public ReflectRequestor(Class<?> iface, Transceiver transceiver) throws IOException {
this(iface, transceiver, new ReflectData(iface.getClassLoader()));
}
protected ReflectRequestor(Protocol protocol, Transceiver transceiver) throws IOException {
this(protocol, transceiver, ReflectData.get());
}
public ReflectRequestor(Class<?> iface, Transceiver transceiver, ReflectData data) throws IOException {
this(data.getProtocol(iface), transceiver, data);
}
public ReflectRequestor(Protocol protocol, Transceiver transceiver, ReflectData data) throws IOException {
super(protocol, transceiver, data);
}
public ReflectData getReflectData() {
return (ReflectData) getSpecificData();
}
@Override
protected DatumWriter<Object> getDatumWriter(Schema schema) {
return new ReflectDatumWriter<>(schema, getReflectData());
}
@Override
protected DatumReader<Object> getDatumReader(Schema writer, Schema reader) {
return new ReflectDatumReader<>(writer, reader, getReflectData());
}
/** Create a proxy instance whose methods invoke RPCs. */
public static <T> T getClient(Class<T> iface, Transceiver transceiver) throws IOException {
return getClient(iface, transceiver, new ReflectData(iface.getClassLoader()));
}
/** Create a proxy instance whose methods invoke RPCs. */
@SuppressWarnings("unchecked")
public static <T> T getClient(Class<T> iface, Transceiver transceiver, ReflectData reflectData) throws IOException {
Protocol protocol = reflectData.getProtocol(iface);
return (T) Proxy.newProxyInstance(reflectData.getClassLoader(), new Class[] { iface },
new ReflectRequestor(protocol, transceiver, reflectData));
}
/** Create a proxy instance whose methods invoke RPCs. */
@SuppressWarnings("unchecked")
public static <T> T getClient(Class<T> iface, ReflectRequestor rreq) throws IOException {
return (T) Proxy.newProxyInstance(rreq.getReflectData().getClassLoader(), new Class[] { iface }, rreq);
}
}
| 7,507 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericResponder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.generic;
import java.io.IOException;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.ipc.Responder;
/** {@link Responder} implementation for generic Java data. */
public abstract class GenericResponder extends Responder {
private GenericData data;
public GenericResponder(Protocol local) {
this(local, GenericData.get());
}
public GenericResponder(Protocol local, GenericData data) {
super(local);
this.data = data;
}
public GenericData getGenericData() {
return data;
}
protected DatumWriter<Object> getDatumWriter(Schema schema) {
return new GenericDatumWriter<>(schema, data);
}
protected DatumReader<Object> getDatumReader(Schema actual, Schema expected) {
return new GenericDatumReader<>(actual, expected, data);
}
@Override
public Object readRequest(Schema actual, Schema expected, Decoder in) throws IOException {
return getDatumReader(actual, expected).read(null, in);
}
@Override
public void writeResponse(Schema schema, Object response, Encoder out) throws IOException {
getDatumWriter(schema).write(response, out);
}
@Override
public void writeError(Schema schema, Object error, Encoder out) throws IOException {
if (error instanceof AvroRemoteException)
error = ((AvroRemoteException) error).getValue();
getDatumWriter(schema).write(error, out);
}
}
| 7,508 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/generic/GenericRequestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.generic;
import java.io.IOException;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.ipc.Requestor;
import org.apache.avro.ipc.Transceiver;
/** {@link Requestor} implementation for generic Java data. */
public class GenericRequestor extends Requestor {
GenericData data;
public GenericRequestor(Protocol protocol, Transceiver transceiver) throws IOException {
this(protocol, transceiver, GenericData.get());
}
public GenericRequestor(Protocol protocol, Transceiver transceiver, GenericData data) throws IOException {
super(protocol, transceiver);
this.data = data;
}
public GenericData getGenericData() {
return data;
}
@Override
public void writeRequest(Schema schema, Object request, Encoder out) throws IOException {
new GenericDatumWriter<>(schema, data).write(request, out);
}
@Override
public Object readResponse(Schema writer, Schema reader, Decoder in) throws IOException {
return new GenericDatumReader<>(writer, reader, data).read(null, in);
}
@Override
public Exception readError(Schema writer, Schema reader, Decoder in) throws IOException {
Object error = new GenericDatumReader<>(writer, reader, data).read(null, in);
if (error instanceof CharSequence)
return new AvroRuntimeException(error.toString()); // system error
return new AvroRemoteException(error);
}
}
| 7,509 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/FloatHistogram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
/**
* Specific implementation of histogram for floats, which also keeps track of
* basic summary statistics.
*
* @param <B>
*/
class FloatHistogram<B> extends Histogram<B, Float> {
private float runningSum;
private float runningSumOfSquares;
public FloatHistogram(Segmenter<B, Float> segmenter) {
super(segmenter);
}
@Override
public void add(Float value) {
super.add(value);
runningSum += value;
runningSumOfSquares += value * value;
}
public float getMean() {
if (totalCount == 0) {
return Float.NaN;
}
return runningSum / totalCount;
}
public float getUnbiasedStdDev() {
if (totalCount <= 1) {
return Float.NaN;
}
float mean = getMean();
return (float) Math.sqrt((runningSumOfSquares - totalCount * mean * mean) / (totalCount - 1));
}
}
| 7,510 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Stopwatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
/** Encapsulates the passing of time. */
class Stopwatch {
/** Encapsulates ticking time sources. */
interface Ticks {
/**
* Returns a number of "ticks" in nanoseconds. This should be monotonically
* non-decreasing.
*/
long ticks();
}
/** Default System time source. */
public final static Ticks SYSTEM_TICKS = new SystemTicks();
private Ticks ticks;
private long start;
private long elapsed = -1;
private boolean running;
public Stopwatch(Ticks ticks) {
this.ticks = ticks;
}
/** Returns seconds that have elapsed since start() */
public long elapsedNanos() {
if (running) {
return this.ticks.ticks() - start;
} else {
if (elapsed == -1)
throw new IllegalStateException();
return elapsed;
}
}
/** Starts the stopwatch. */
public void start() {
if (running)
throw new IllegalStateException();
start = ticks.ticks();
running = true;
}
/** Stops the stopwatch and calculates the elapsed time. */
public void stop() {
if (!running)
throw new IllegalStateException();
elapsed = ticks.ticks() - start;
running = false;
}
/** Implementation of Ticks using System.nanoTime(). */
private static class SystemTicks implements Ticks {
@Override
public long ticks() {
return System.nanoTime();
}
}
}
| 7,511 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsPlugin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.avro.Protocol.Message;
import org.apache.avro.ipc.RPCContext;
import org.apache.avro.ipc.RPCPlugin;
import org.apache.avro.ipc.stats.Histogram.Segmenter;
import org.apache.avro.ipc.stats.Stopwatch.Ticks;
/**
* Collects count and latency statistics about RPC calls. Keeps data for every
* method. Can be added to a Requestor (client) or Responder (server).
*
* This uses milliseconds as the standard unit of measure throughout the class,
* stored in floats.
*/
public class StatsPlugin extends RPCPlugin {
/** Static declaration of histogram buckets. */
public static final Segmenter<String, Float> LATENCY_SEGMENTER = new Histogram.TreeMapSegmenter<>(
new TreeSet<>(Arrays.asList(0f, 25f, 50f, 75f, 100f, 200f, 300f, 500f, 750f, 1000f, // 1 second
2000f, 5000f, 10000f, 60000f, // 1 minute
600000f)));
public static final Segmenter<String, Integer> PAYLOAD_SEGMENTER = new Histogram.TreeMapSegmenter<>(
new TreeSet<>(Arrays.asList(0, 25, 50, 75, 100, 200, 300, 500, 750, 1000, // 1 k
2000, 5000, 10000, 50000, 100000)));
/**
* Per-method histograms. Must be accessed while holding a lock.
*/
Map<Message, FloatHistogram<?>> methodTimings = new HashMap<>();
Map<Message, IntegerHistogram<?>> sendPayloads = new HashMap<>();
Map<Message, IntegerHistogram<?>> receivePayloads = new HashMap<>();
/** RPCs in flight. */
ConcurrentMap<RPCContext, Stopwatch> activeRpcs = new ConcurrentHashMap<>();
private Ticks ticks;
/** How long I've been alive */
public Date startupTime = new Date();
private Segmenter<?, Float> floatSegmenter;
private Segmenter<?, Integer> integerSegmenter;
/** Construct a plugin with custom Ticks and Segmenter implementations. */
public StatsPlugin(Ticks ticks, Segmenter<?, Float> floatSegmenter, Segmenter<?, Integer> integerSegmenter) {
this.floatSegmenter = floatSegmenter;
this.integerSegmenter = integerSegmenter;
this.ticks = ticks;
}
/**
* Construct a plugin with default (system) ticks, and default histogram
* segmentation.
*/
public StatsPlugin() {
this(Stopwatch.SYSTEM_TICKS, LATENCY_SEGMENTER, PAYLOAD_SEGMENTER);
}
/**
* Helper to get the size of an RPC payload.
*/
private int getPayloadSize(List<ByteBuffer> payload) {
if (payload == null) {
return 0;
}
int size = 0;
for (ByteBuffer bb : payload) {
size = size + bb.limit();
}
return size;
}
@Override
public void serverReceiveRequest(RPCContext context) {
Stopwatch t = new Stopwatch(ticks);
t.start();
this.activeRpcs.put(context, t);
synchronized (receivePayloads) {
IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
if (h == null) {
h = createNewIntegerHistogram();
receivePayloads.put(context.getMessage(), h);
}
h.add(getPayloadSize(context.getRequestPayload()));
}
}
@Override
public void serverSendResponse(RPCContext context) {
Stopwatch t = this.activeRpcs.remove(context);
t.stop();
publish(context, t);
synchronized (sendPayloads) {
IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
if (h == null) {
h = createNewIntegerHistogram();
sendPayloads.put(context.getMessage(), h);
}
h.add(getPayloadSize(context.getResponsePayload()));
}
}
@Override
public void clientSendRequest(RPCContext context) {
Stopwatch t = new Stopwatch(ticks);
t.start();
this.activeRpcs.put(context, t);
synchronized (sendPayloads) {
IntegerHistogram<?> h = sendPayloads.get(context.getMessage());
if (h == null) {
h = createNewIntegerHistogram();
sendPayloads.put(context.getMessage(), h);
}
h.add(getPayloadSize(context.getRequestPayload()));
}
}
@Override
public void clientReceiveResponse(RPCContext context) {
Stopwatch t = this.activeRpcs.remove(context);
t.stop();
publish(context, t);
synchronized (receivePayloads) {
IntegerHistogram<?> h = receivePayloads.get(context.getMessage());
if (h == null) {
h = createNewIntegerHistogram();
receivePayloads.put(context.getMessage(), h);
}
h.add(getPayloadSize(context.getRequestPayload()));
}
}
/** Adds timing to the histograms. */
private void publish(RPCContext context, Stopwatch t) {
Message message = context.getMessage();
if (message == null)
throw new IllegalArgumentException();
synchronized (methodTimings) {
FloatHistogram<?> h = methodTimings.get(context.getMessage());
if (h == null) {
h = createNewFloatHistogram();
methodTimings.put(context.getMessage(), h);
}
h.add(nanosToMillis(t.elapsedNanos()));
}
}
private FloatHistogram<?> createNewFloatHistogram() {
return new FloatHistogram<>(floatSegmenter);
}
private IntegerHistogram<?> createNewIntegerHistogram() {
return new IntegerHistogram<>(integerSegmenter);
}
/** Converts nanoseconds to milliseconds. */
static float nanosToMillis(long elapsedNanos) {
return elapsedNanos / 1000000.0f;
}
}
| 7,512 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/StatsServlet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import java.io.IOException;
import java.io.Writer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.Map.Entry;
import javax.servlet.ServletException;
import javax.servlet.UnavailableException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.apache.avro.Protocol.Message;
import org.apache.avro.ipc.RPCContext;
/**
* Exposes information provided by a StatsPlugin as a web page.
*
* This class follows the same synchronization conventions as StatsPlugin, to
* avoid requiring StatsPlugin to serve a copy of the data.
*/
public class StatsServlet extends HttpServlet {
private final StatsPlugin statsPlugin;
private VelocityEngine velocityEngine;
private static final SimpleDateFormat FORMATTER = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss");
public StatsServlet(StatsPlugin statsPlugin) throws UnavailableException {
this.statsPlugin = statsPlugin;
this.velocityEngine = new VelocityEngine();
// These two properties tell Velocity to use its own classpath-based loader
velocityEngine.addProperty("resource.loaders", "class");
velocityEngine.addProperty("resource.loader.class.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
velocityEngine.setProperty("runtime.strict_mode.enable", true);
String logChuteName = "org.apache.velocity.runtime.log.NullLogChute";
velocityEngine.setProperty("runtime.log.logsystem.class", logChuteName);
}
/*
* Helper class to store per-message data which is passed to templates.
*
* The template expects a list of charts, each of which is parameterized by map
* key-value string attributes.
*/
public class RenderableMessage { // Velocity brakes if not public
public String name;
public int numCalls;
public ArrayList<HashMap<String, String>> charts;
public RenderableMessage(String name) {
this.name = name;
this.charts = new ArrayList<>();
}
public ArrayList<HashMap<String, String>> getCharts() {
return this.charts;
}
public String getname() {
return this.name;
}
public int getNumCalls() {
return this.numCalls;
}
}
/*
* Surround each string in an array with quotation marks and escape existing
* quotes.
*
* This is useful when we have an array of strings that we want to turn into a
* javascript array declaration.
*/
protected static List<String> escapeStringArray(List<String> input) {
for (int i = 0; i < input.size(); i++) {
input.set(i, "\"" + input.get(i).replace("\"", "\\\"") + "\"");
}
return input;
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
resp.setContentType("text/html");
try {
writeStats(resp.getWriter());
} catch (Exception e) {
e.printStackTrace();
}
}
public void writeStats(Writer w) throws IOException {
VelocityContext context = new VelocityContext();
context.put("title", "Avro RPC Stats");
ArrayList<String> rpcs = new ArrayList<>(); // in flight rpcs
ArrayList<RenderableMessage> messages = new ArrayList<>();
for (Entry<RPCContext, Stopwatch> rpc : this.statsPlugin.activeRpcs.entrySet()) {
rpcs.add(renderActiveRpc(rpc.getKey(), rpc.getValue()));
}
// Get set of all seen messages
Set<Message> keys = null;
synchronized (this.statsPlugin.methodTimings) {
keys = this.statsPlugin.methodTimings.keySet();
for (Message m : keys) {
messages.add(renderMethod(m));
}
}
context.put("inFlightRpcs", rpcs);
context.put("messages", messages);
context.put("currTime", FORMATTER.format(new Date()));
context.put("startupTime", FORMATTER.format(statsPlugin.startupTime));
Template t;
try {
t = velocityEngine.getTemplate("org/apache/avro/ipc/stats/templates/statsview.vm");
} catch (Exception e) {
throw new IOException();
}
t.merge(context, w);
}
private String renderActiveRpc(RPCContext rpc, Stopwatch stopwatch) throws IOException {
String out = new String();
out += rpc.getMessage().getName() + ": " + formatMillis(StatsPlugin.nanosToMillis(stopwatch.elapsedNanos()));
return out;
}
private RenderableMessage renderMethod(Message message) {
RenderableMessage out = new RenderableMessage(message.getName());
synchronized (this.statsPlugin.methodTimings) {
FloatHistogram<?> hist = this.statsPlugin.methodTimings.get(message);
out.numCalls = hist.getCount();
HashMap<String, String> latencyBar = new HashMap<>();
// Fill in chart attributes for velocity
latencyBar.put("type", "bar");
latencyBar.put("title", "All-Time Latency");
latencyBar.put("units", "ms");
latencyBar.put("numCalls", Integer.toString(hist.getCount()));
latencyBar.put("avg", Float.toString(hist.getMean()));
latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
latencyBar.put("labelStr", Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
latencyBar.put("boundaryStr",
Arrays.toString(escapeStringArray(hist.getSegmenter().getBucketLabels()).toArray()));
latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
out.charts.add(latencyBar);
HashMap<String, String> latencyDot = new HashMap<>();
latencyDot.put("title", "Latency");
latencyDot.put("type", "dot");
latencyDot.put("dataStr", Arrays.toString(hist.getRecentAdditions().toArray()));
out.charts.add(latencyDot);
}
synchronized (this.statsPlugin.sendPayloads) {
IntegerHistogram<?> hist = this.statsPlugin.sendPayloads.get(message);
HashMap<String, String> latencyBar = new HashMap<>();
// Fill in chart attributes for velocity
latencyBar.put("type", "bar");
latencyBar.put("title", "All-Time Send Payload");
latencyBar.put("units", "ms");
latencyBar.put("numCalls", Integer.toString(hist.getCount()));
latencyBar.put("avg", Float.toString(hist.getMean()));
latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
latencyBar.put("labelStr", Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
latencyBar.put("boundaryStr",
Arrays.toString(escapeStringArray(hist.getSegmenter().getBucketLabels()).toArray()));
latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
out.charts.add(latencyBar);
HashMap<String, String> latencyDot = new HashMap<>();
latencyDot.put("title", "Send Payload");
latencyDot.put("type", "dot");
latencyDot.put("dataStr", Arrays.toString(hist.getRecentAdditions().toArray()));
out.charts.add(latencyDot);
}
synchronized (this.statsPlugin.receivePayloads) {
IntegerHistogram<?> hist = this.statsPlugin.receivePayloads.get(message);
HashMap<String, String> latencyBar = new HashMap<>();
// Fill in chart attributes for velocity
latencyBar.put("type", "bar");
latencyBar.put("title", "All-Time Receive Payload");
latencyBar.put("units", "ms");
latencyBar.put("numCalls", Integer.toString(hist.getCount()));
latencyBar.put("avg", Float.toString(hist.getMean()));
latencyBar.put("stdDev", Float.toString(hist.getUnbiasedStdDev()));
latencyBar.put("labelStr", Arrays.toString(hist.getSegmenter().getBoundaryLabels().toArray()));
latencyBar.put("boundaryStr",
Arrays.toString(escapeStringArray(hist.getSegmenter().getBucketLabels()).toArray()));
latencyBar.put("dataStr", Arrays.toString(hist.getHistogram()));
out.charts.add(latencyBar);
HashMap<String, String> latencyDot = new HashMap<>();
latencyDot.put("title", "Recv Payload");
latencyDot.put("type", "dot");
latencyDot.put("dataStr", Arrays.toString(hist.getRecentAdditions().toArray()));
out.charts.add(latencyDot);
}
return out;
}
private CharSequence formatMillis(float millis) {
return String.format("%.0fms", millis);
}
}
| 7,513 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/IntegerHistogram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
/**
* Specific implementation of histogram for integers, which also keeps track of
* basic summary statistics.
*
* @param <B>
*/
class IntegerHistogram<B> extends Histogram<B, Integer> {
private float runningSum;
private float runningSumOfSquares;
public IntegerHistogram(Segmenter<B, Integer> segmenter) {
super(segmenter);
}
@Override
public void add(Integer value) {
super.add(value);
runningSum += value;
runningSumOfSquares += value * value;
}
public float getMean() {
if (totalCount == 0) {
return -1;
}
return runningSum / (float) totalCount;
}
public float getUnbiasedStdDev() {
if (totalCount <= 1) {
return -1;
}
float mean = getMean();
return (float) Math.sqrt((runningSumOfSquares - totalCount * mean * mean) / (float) (totalCount - 1));
}
}
| 7,514 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/stats/Histogram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeMap;
/**
* Represents a histogram of values. This class uses a {@link Segmenter} to
* determine which bucket to place a given value into. Also stores the last
* MAX_HISTORY_SIZE entries which have been added to this histogram, in order.
*
* Note that Histogram, by itself, is not synchronized.
*
* @param <B> Bucket type. Often String, since buckets are typically used for
* their toString() representation.
* @param <T> Type of value
*/
class Histogram<B, T> {
/**
* How many recent additions we should track.
*/
public static final int MAX_HISTORY_SIZE = 20;
private Segmenter<B, T> segmenter;
private int[] counts;
protected int totalCount;
private LinkedList<T> recentAdditions;
/**
* Interface to determine which bucket to place a value in.
*
* Segmenters should be immutable, so many histograms can re-use the same
* segmenter.
*/
interface Segmenter<B, T> {
/** Number of buckets to use. */
int size();
/**
* Which bucket to place value in.
*
* @return Index of bucket for the value. At least 0 and less than size().
* @throws SegmenterException if value does not fit in a bucket.
*/
int segment(T value);
/**
* Returns an iterator of buckets. The order of iteration is consistent with the
* segment numbers.
*/
Iterator<B> getBuckets();
/**
* Returns a List of bucket boundaries. Useful for printing segmenters.
*/
List<String> getBoundaryLabels();
/**
* Returns the bucket labels as an array;
*/
List<String> getBucketLabels();
}
public static class SegmenterException extends RuntimeException {
public SegmenterException(String s) {
super(s);
}
}
public static class TreeMapSegmenter<T extends Comparable<T>> implements Segmenter<String, T> {
private TreeMap<T, Integer> index = new TreeMap<>();
public TreeMapSegmenter(SortedSet<T> leftEndpoints) {
if (leftEndpoints.isEmpty()) {
throw new IllegalArgumentException("Endpoints must not be empty: " + leftEndpoints);
}
int i = 0;
for (T t : leftEndpoints) {
index.put(t, i++);
}
}
@Override
public int segment(T value) {
Map.Entry<T, Integer> e = index.floorEntry(value);
if (e == null) {
throw new SegmenterException("Could not find bucket for: " + value);
}
return e.getValue();
}
@Override
public int size() {
return index.size();
}
private String rangeAsString(T a, T b) {
return String.format("[%s,%s)", a, b == null ? "infinity" : b);
}
@Override
public ArrayList<String> getBoundaryLabels() {
ArrayList<String> outArray = new ArrayList<>(index.keySet().size());
for (T obj : index.keySet()) {
outArray.add(obj.toString());
}
return outArray;
}
@Override
public ArrayList<String> getBucketLabels() {
ArrayList<String> outArray = new ArrayList<>(index.keySet().size());
Iterator<String> bucketsIt = this.getBuckets();
while (bucketsIt.hasNext()) {
outArray.add(bucketsIt.next());
}
return outArray;
}
@Override
public Iterator<String> getBuckets() {
return new Iterator<String>() {
Iterator<T> it = index.keySet().iterator();
T cur = it.next(); // there's always at least one element
int pos = 0;
@Override
public boolean hasNext() {
return (pos < index.keySet().size());
}
@Override
public String next() {
pos = pos + 1;
T left = cur;
cur = it.hasNext() ? it.next() : null;
return rangeAsString(left, cur);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
/**
* Creates a histogram using the specified segmenter.
*/
public Histogram(Segmenter<B, T> segmenter) {
this.segmenter = segmenter;
this.counts = new int[segmenter.size()];
this.recentAdditions = new LinkedList<>();
}
/** Tallies a value in the histogram. */
public void add(T value) {
int i = segmenter.segment(value);
counts[i]++;
totalCount++;
if (this.recentAdditions.size() > Histogram.MAX_HISTORY_SIZE) {
this.recentAdditions.pollLast();
}
this.recentAdditions.push(value);
}
/**
* Returns the underlying bucket values.
*/
public int[] getHistogram() {
return counts;
}
/**
* Returns the underlying segmenter used for this histogram.
*/
public Segmenter<B, T> getSegmenter() {
return this.segmenter;
}
/**
* Returns values recently added to this histogram. These are in reverse order
* (most recent first).
*/
public List<T> getRecentAdditions() {
return this.recentAdditions;
}
/** Returns the total count of entries. */
public int getCount() {
return totalCount;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
boolean first = true;
for (Entry<B> e : entries()) {
if (!first) {
sb.append(";");
} else {
first = false;
}
sb.append(e.bucket).append("=").append(e.count);
}
return sb.toString();
}
static class Entry<B> {
public Entry(B bucket, int count) {
this.bucket = bucket;
this.count = count;
}
B bucket;
int count;
}
private class EntryIterator implements Iterable<Entry<B>>, Iterator<Entry<B>> {
int i = 0;
Iterator<B> bucketNameIterator = segmenter.getBuckets();
@Override
public Iterator<Entry<B>> iterator() {
return this;
}
@Override
public boolean hasNext() {
return i < segmenter.size();
}
@Override
public Entry<B> next() {
return new Entry<>(bucketNameIterator.next(), counts[i++]);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
public Iterable<Entry<B>> entries() {
return new EntryIterator();
}
}
| 7,515 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/Perf.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf;
import java.io.PrintWriter;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.options.ChainedOptionsBuilder;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.TimeValue;
import org.openjdk.jmh.runner.options.WarmupMode;
/**
* Performance tests for various low level operations of Avro encoding and
* decoding.
*/
public final class Perf {
public static void main(String[] args) throws Exception {
Options options = new Options();
options.addOption(Option.builder().argName("measurementIterations").longOpt("mi").hasArg()
.desc("The number of measure iterations").numberOfArgs(1).build());
options.addOption(Option.builder().argName("warmupIterations").longOpt("wi").hasArg()
.desc("The number of warmup iterations").numberOfArgs(1).build());
options.addOption(Option.builder().argName("bulkWarmup").longOpt("bw").desc("Flag to enabled bulk warmup").build());
options.addOption(
Option.builder().argName("test").longOpt("test").hasArg().desc("The performance tests to run").build());
options.addOption(Option.builder().argName("help").longOpt("help").desc("Print the help menu").build());
final CommandLine cmd = new DefaultParser().parse(options, args);
if (cmd.hasOption("help")) {
final HelpFormatter formatter = new HelpFormatter();
final PrintWriter pw = new PrintWriter(System.out);
formatter.printUsage(pw, 80, "Perf", options);
pw.flush();
return;
}
String[] tests = cmd.getOptionValues("test");
if (tests == null || tests.length == 0) {
tests = new String[] { Perf.class.getPackage().getName() + ".*" };
}
final Integer measurementIterations = Integer.valueOf(cmd.getOptionValue("mi", "3"));
final Integer warmupIterations = Integer.valueOf(cmd.getOptionValue("wi", "3"));
final ChainedOptionsBuilder runOpt = new OptionsBuilder().mode(Mode.Throughput).timeout(TimeValue.seconds(60))
.warmupIterations(warmupIterations).measurementIterations(measurementIterations).forks(1).threads(1)
.shouldDoGC(true);
if (cmd.hasOption("builkWarmup")) {
runOpt.warmupMode(WarmupMode.BULK);
}
for (final String test : tests) {
runOpt.include(test);
}
new Runner(runOpt.build()).run();
}
}
| 7,516 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/BasicArrayState.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test;
public abstract class BasicArrayState extends BasicState {
public final int arraySize;
public BasicArrayState(final int arraySize) {
super();
this.arraySize = arraySize;
if (super.getBatchSize() % arraySize != 0) {
throw new IllegalArgumentException("Batch size must be divisible by array size");
}
}
@Override
public int getBatchSize() {
return super.getBatchSize() / arraySize;
}
public int getArraySize() {
return arraySize;
}
}
| 7,517 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/BasicState.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Random;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
public abstract class BasicState {
public static final int BATCH_SIZE = 10000;
private static final DecoderFactory DECODER_FACTORY = new DecoderFactory();
private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory();
private static final OutputStream NULL_OUTPUTSTREAM = new NullOutputStream();
private final Random random = new Random(13L);
private final int batchSize = BATCH_SIZE;
private BinaryDecoder reuseDecoder;
private BinaryEncoder reuseEncoder;
private BinaryEncoder reuseBlockingEncoder;
public BasicState() {
this.reuseDecoder = null;
}
protected Random getRandom() {
return this.random;
}
protected Decoder newDecoder(final byte[] buf) {
this.reuseDecoder = DECODER_FACTORY.binaryDecoder(buf, this.reuseDecoder);
return this.reuseDecoder;
}
protected Encoder newEncoder(boolean direct, OutputStream out) throws IOException {
this.reuseEncoder = (direct ? ENCODER_FACTORY.directBinaryEncoder(out, this.reuseEncoder)
: ENCODER_FACTORY.binaryEncoder(out, this.reuseEncoder));
return this.reuseEncoder;
}
protected Encoder newEncoder(int blockSize, OutputStream out) throws IOException {
this.reuseBlockingEncoder = ENCODER_FACTORY.configureBlockSize(blockSize).blockingBinaryEncoder(out,
this.reuseBlockingEncoder);
return this.reuseBlockingEncoder;
}
public int getBatchSize() {
return this.batchSize;
}
protected OutputStream getNullOutputStream() {
return NULL_OUTPUTSTREAM;
}
private static class NullOutputStream extends OutputStream {
@Override
public void write(int b) throws IOException {
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
}
@Override
public void write(byte[] b) throws IOException {
}
}
}
| 7,518 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/BigRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test;
import java.util.Random;
public class BigRecord {
public double d1;
public double d11;
public float f2;
public float f22;
public int f3;
public int f33;
public long f4;
public long f44;
public byte f5;
public byte f55;
public short f6;
public short f66;
public BigRecord() {
}
public BigRecord(final Random r) {
this.d1 = r.nextDouble();
this.d11 = r.nextDouble();
this.f2 = r.nextFloat();
this.f22 = r.nextFloat();
this.f3 = r.nextInt();
this.f33 = r.nextInt();
this.f4 = r.nextLong();
this.f44 = r.nextLong();
this.f5 = (byte) r.nextInt();
this.f55 = (byte) r.nextInt();
this.f6 = (short) r.nextInt();
this.f66 = (short) r.nextInt();
}
}
| 7,519 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/BasicRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test;
import java.util.Random;
public final class BasicRecord {
public double f1;
public double f2;
public double f3;
public int f4;
public int f5;
public int f6;
public BasicRecord() {
}
public BasicRecord(final Random r) {
f1 = r.nextDouble();
f2 = r.nextDouble();
f3 = r.nextDouble();
f4 = r.nextInt();
f5 = r.nextInt();
f6 = r.nextInt();
}
}
| 7,520 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/RecordWithPromotionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.ResolvingDecoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class RecordWithPromotionTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_PROMOTION = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"long\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"long\" },\n" + "{ \"name\": \"f6\", \"type\": \"long\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
ResolvingDecoder r = (ResolvingDecoder) d;
Field[] ff = r.readFieldOrder();
for (int i = 0; i < state.getBatchSize(); i++) {
for (int j = 0; j < ff.length; j++) {
Field f = ff[j];
switch (f.pos()) {
case 0:
case 1:
case 2:
r.readDouble();
break;
case 3:
case 4:
case 5:
r.readLong();
break;
}
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema writerSchema;
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema, super.newDecoder(this.testData));
}
}
}
| 7,521 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/RecordWithDefaultTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.ResolvingDecoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.apache.avro.util.Utf8;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class RecordWithDefaultTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_DEFAULT = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f7\", \"type\": \"string\", " + "\"default\": \"undefined\" },\n"
+ "{ \"name\": \"f8\", \"type\": \"string\"," + "\"default\": \"undefined\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
ResolvingDecoder r = (ResolvingDecoder) d;
Utf8 utf = new Utf8();
Field[] ff = r.readFieldOrder();
for (int i = 0; i < state.getBatchSize(); i++) {
for (int j = 0; j < ff.length; j++) {
Field f = ff[j];
switch (f.pos()) {
case 0:
case 1:
case 2:
r.readDouble();
break;
case 3:
case 4:
case 5:
r.readInt();
break;
case 6:
case 7:
r.readString(utf);
break;
}
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema writerSchema;
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema, super.newDecoder(this.testData));
}
}
}
| 7,522 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/ResolvingRecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class ResolvingRecordTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f4\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
for (int i = 0; i < state.getBatchSize(); i++) {
// TODO: Would expect this to be D,D,I,D,I,I to match read schema
d.readDouble();
d.readDouble();
d.readDouble();
d.readInt();
d.readInt();
d.readInt();
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema writerSchema;
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema, super.newDecoder(this.testData));
}
}
}
| 7,523 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/RecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class RecordTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state, final Blackhole blackHole) throws Exception {
final Decoder d = state.decoder;
double sd = 0.0;
int si = 0;
for (int i = 0; i < state.getBatchSize(); i++) {
sd += d.readDouble();
sd += d.readDouble();
sd += d.readDouble();
si += d.readInt();
si += d.readInt();
si += d.readInt();
}
blackHole.consume(sd);
blackHole.consume(si);
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(readerSchema, super.newDecoder(this.testData));
}
}
}
| 7,524 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/ValidatingRecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class ValidatingRecordTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (final BasicRecord r : state.testData) {
e.writeDouble(r.f1);
e.writeDouble(r.f2);
e.writeDouble(r.f3);
e.writeInt(r.f4);
e.writeInt(r.f5);
e.writeInt(r.f6);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
for (int i = 0; i < state.getBatchSize(); i++) {
d.readDouble();
d.readDouble();
d.readDouble();
d.readInt();
d.readInt();
d.readInt();
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private BasicRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new BasicRecord[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = new BasicRecord(super.getRandom());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(this.schema, super.newDecoder(this.testData));
}
}
}
| 7,525 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/record/RecordWithOutOfOrderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.record;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.ResolvingDecoder;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class RecordWithOutOfOrderTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f4\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
ResolvingDecoder r = (ResolvingDecoder) d;
Field[] ff = r.readFieldOrder();
for (int i = 0; i < state.getBatchSize(); i++) {
for (int j = 0; j < ff.length; j++) {
Field f = ff[j];
switch (f.pos()) {
case 0:
case 1:
case 3:
r.readDouble();
break;
case 2:
case 4:
case 5:
r.readInt();
break;
}
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema writerSchema;
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(super.getRandom());
encoder.writeDouble(r.f1);
encoder.writeDouble(r.f2);
encoder.writeDouble(r.f3);
encoder.writeInt(r.f4);
encoder.writeInt(r.f5);
encoder.writeInt(r.f6);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema, super.newDecoder(this.testData));
}
}
}
| 7,526 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectFloatArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectFloatArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final float[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<float[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private float[][] testData;
private Encoder encoder;
private ReflectDatumWriter<float[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new float[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateFloatArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<float[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final float[] r = populateFloatArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static float[] populateFloatArray(final Random r, final int size) {
float[] result = new float[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextFloat();
}
return result;
}
}
| 7,527 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectLongArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectLongArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final long[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<long[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private long[][] testData;
private Encoder encoder;
private ReflectDatumWriter<long[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(long[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new long[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateDoubleArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(long[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<long[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final long[] r = populateDoubleArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static long[] populateDoubleArray(final Random r, final int size) {
long[] result = new long[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextLong();
}
return result;
}
}
| 7,528 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectIntArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectIntArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final int[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<int[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private int[][] testData;
private Encoder encoder;
private ReflectDatumWriter<int[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(int[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new int[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateDoubleArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(int[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<int[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final int[] r = populateDoubleArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static int[] populateDoubleArray(final Random r, final int size) {
int[] result = new int[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextInt();
}
return result;
}
}
| 7,529 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectLargeFloatArrayBlockedTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
/**
* Uses a larger array size than {@link ReflectFloatArrayTest}.
*/
public class ReflectLargeFloatArrayBlockedTest {
private static final int ARRAY_SIZE = 100;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final float[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<float[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private float[][] testData;
private Encoder encoder;
private ReflectDatumWriter<float[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(254, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new float[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateFloatArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<float[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final float[] r = populateFloatArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static float[] populateFloatArray(final Random r, final int size) {
float[] result = new float[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextFloat();
}
return result;
}
}
| 7,530 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectBigRecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicState;
import org.apache.avro.perf.test.BigRecord;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectBigRecordTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final BigRecord r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<BigRecord> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private BigRecord[] testData;
private Encoder encoder;
private ReflectDatumWriter<BigRecord> datumWriter;
public TestStateEncode() {
super();
final String jsonText = ReflectData.get().getSchema(BigRecord.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new BigRecord[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = new BigRecord(super.getRandom());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
final String jsonText = ReflectData.get().getSchema(BigRecord.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<BigRecord> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final BigRecord r = new BigRecord(super.getRandom());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
| 7,531 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectNestedFloatArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectNestedFloatArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final NativeArrayWrapper r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<float[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private NativeArrayWrapper[] testData;
private Encoder encoder;
private ReflectDatumWriter<NativeArrayWrapper> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(NativeArrayWrapper.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new NativeArrayWrapper[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
NativeArrayWrapper wrapper = new NativeArrayWrapper();
wrapper.value = populateFloatArray(getRandom(), getArraySize());
this.testData[i] = wrapper;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<float[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final float[] r = populateFloatArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static float[] populateFloatArray(final Random r, final int size) {
float[] result = new float[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextFloat();
}
return result;
}
static class NativeArrayWrapper {
float[] value;
}
}
| 7,532 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectLargeFloatArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
/**
* Uses a larger array size than {@link ReflectFloatArrayTest}.
*/
public class ReflectLargeFloatArrayTest {
private static final int ARRAY_SIZE = 100;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final float[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<float[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private float[][] testData;
private Encoder encoder;
private ReflectDatumWriter<float[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new float[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateFloatArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(float[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<float[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final float[] r = populateFloatArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static float[] populateFloatArray(final Random r, final int size) {
float[] result = new float[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextFloat();
}
return result;
}
}
| 7,533 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectRecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectRecordTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final BasicRecord r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<BasicRecord> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private BasicRecord[] testData;
private Encoder encoder;
private ReflectDatumWriter<BasicRecord> datumWriter;
public TestStateEncode() {
super();
final String jsonText = ReflectData.get().getSchema(BasicRecord.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new BasicRecord[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = new BasicRecord(getRandom());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
final String jsonText = ReflectData.get().getSchema(BasicRecord.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<BasicRecord> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord r = new BasicRecord(getRandom());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
| 7,534 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectDoubleArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectDoubleArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final double[] r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<double[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private double[][] testData;
private Encoder encoder;
private ReflectDatumWriter<double[]> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(double[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new double[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
this.testData[i] = populateDoubleArray(getRandom(), getArraySize());
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(double[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<double[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < this.getBatchSize(); i++) {
final double[] r = populateDoubleArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static double[] populateDoubleArray(final Random r, final int size) {
double[] result = new double[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextDouble();
}
return result;
}
}
| 7,535 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/reflect/ReflectNestedObjectArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.reflect;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.perf.test.BasicArrayState;
import org.apache.avro.perf.test.BasicRecord;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ReflectNestedObjectArrayTest {
private static final int ARRAY_SIZE = 10;
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
for (final ObjectArrayWrapper r : state.testData) {
state.datumWriter.write(r, state.encoder);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final ReflectDatumReader<BasicRecord[]> datumReader = new ReflectDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(datumReader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicArrayState {
private final Schema schema;
private ObjectArrayWrapper[] testData;
private Encoder encoder;
private ReflectDatumWriter<ObjectArrayWrapper> datumWriter;
public TestStateEncode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(ObjectArrayWrapper.class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.datumWriter = new ReflectDatumWriter<>(schema);
this.testData = new ObjectArrayWrapper[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
ObjectArrayWrapper wrapper = new ObjectArrayWrapper();
wrapper.value = populateRecordArray(getRandom(), getArraySize());
this.testData[i] = wrapper;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicArrayState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super(ARRAY_SIZE);
final String jsonText = ReflectData.get().getSchema(BasicRecord[].class).toString();
this.schema = new Schema.Parser().parse(jsonText);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
ReflectDatumWriter<BasicRecord[]> writer = new ReflectDatumWriter<>(schema);
for (int i = 0; i < getBatchSize(); i++) {
final BasicRecord[] r = populateRecordArray(getRandom(), getArraySize());
writer.write(r, encoder);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
static BasicRecord[] populateRecordArray(final Random r, final int size) {
BasicRecord[] result = new BasicRecord[size];
for (int i = 0; i < result.length; i++) {
result[i] = new BasicRecord(r);
}
return result;
}
static class ObjectArrayWrapper {
BasicRecord[] value;
}
}
| 7,536 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/ExtendedEnumTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class ExtendedEnumTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i += 4) {
writer.write(state.testData[i + 0], e);
writer.write(state.testData[i + 1], e);
writer.write(state.testData[i + 2], e);
writer.write(state.testData[i + 3], e);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackHole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
final Object o = reader.read(null, d);
blackHole.consume(o);
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private static final String ENUM_SCHEMA = "{ \"type\": \"enum\", \"name\":\"E\", \"symbols\": [\"A\", \"B\"] }";
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(mkSchema(ENUM_SCHEMA));
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Schema enumSchema = this.schema.getField("f").schema();
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(this.schema);
final int tag = super.getRandom().nextInt(2);
rec.put("f", GenericData.get().createEnum(enumSchema.getEnumSymbols().get(tag), enumSchema));
this.testData[i] = rec;
}
}
private String mkSchema(String subschema) {
return ("{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n" + "{ \"name\": \"f\", \"type\": " + subschema
+ "}\n" + "] }");
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private static final String ENUM_SCHEMA = "{ \"type\": \"enum\", \"name\":\"E\", \"symbols\": [\"A\",\"B\",\"C\",\"D\",\"E\"] }";
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(mkSchema(ENUM_SCHEMA));
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(this.schema);
final Schema enumSchema = this.schema.getField("f").schema();
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(this.schema);
final int tag = super.getRandom().nextInt(2);
rec.put("f", GenericData.get().createEnum(enumSchema.getEnumSymbols().get(tag), enumSchema));
writer.write(rec, encoder);
}
this.testData = baos.toByteArray();
}
private String mkSchema(String subschema) {
return ("{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n" + "{ \"name\": \"f\", \"type\": " + subschema
+ "}\n" + "] }");
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,537 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/IntTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class IntTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeInt(state.testData[i + 0]);
e.writeInt(state.testData[i + 1]);
e.writeInt(state.testData[i + 2]);
e.writeInt(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public int decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
int total = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readInt();
total += d.readInt();
total += d.readInt();
total += d.readInt();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private int[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new int[getBatchSize()];
for (int i = 0; i < testData.length; i += 4) {
// fits in 1 byte
testData[i + 0] = super.getRandom().nextInt(50);
// fits in 2 bytes
testData[i + 1] = super.getRandom().nextInt(5000);
// fits in 3 bytes
testData[i + 2] = super.getRandom().nextInt(500000);
// most in 4 bytes, some in 5 bytes
testData[i + 3] = super.getRandom().nextInt(150000000);
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric values of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i += 4) {
// fits in 1 byte
encoder.writeInt(super.getRandom().nextInt(50));
// fits in 2 bytes
encoder.writeInt(super.getRandom().nextInt(5000));
// fits in 3 bytes
encoder.writeInt(super.getRandom().nextInt(500000));
// most in 4 bytes, some in 5 bytes
encoder.writeInt(super.getRandom().nextInt(150000000));
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,538 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/SmallLongTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class SmallLongTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeLong(state.testData[i + 0]);
e.writeLong(state.testData[i + 1]);
e.writeLong(state.testData[i + 2]);
e.writeLong(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public int decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
int total = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readLong();
total += d.readLong();
total += d.readLong();
total += d.readLong();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private int[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric values of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new int[getBatchSize()];
for (int i = 0; i < testData.length; i += 4) {
// fits in 1 byte
testData[i + 0] = super.getRandom().nextInt(50);
// fits in 2 bytes
testData[i + 1] = super.getRandom().nextInt(5000);
// fits in 3 bytes
testData[i + 2] = super.getRandom().nextInt(500000);
// most in 4 bytes, some in 5 bytes
testData[i + 3] = super.getRandom().nextInt(150000000);
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric values of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i += 4) {
// fits in 1 byte
encoder.writeInt(super.getRandom().nextInt(50));
// fits in 2 bytes
encoder.writeInt(super.getRandom().nextInt(5000));
// fits in 3 bytes
encoder.writeInt(super.getRandom().nextInt(500000));
// most in 4 bytes, some in 5 bytes
encoder.writeInt(super.getRandom().nextInt(150000000));
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,539 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/BytesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class BytesTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeBytes(state.testData[i + 0]);
e.writeBytes(state.testData[i + 1]);
e.writeBytes(state.testData[i + 2]);
e.writeBytes(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public ByteBuffer decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
d.readBytes(state.bb);
d.readBytes(state.bb);
d.readBytes(state.bb);
d.readBytes(state.bb);
}
return state.bb;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private byte[][] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new byte[getBatchSize()][];
for (int i = 0; i < testData.length; i++) {
final byte[] data = new byte[super.getRandom().nextInt(70)];
super.getRandom().nextBytes(data);
testData[i] = data;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
private ByteBuffer bb = ByteBuffer.allocate(70);
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
final byte[] data = new byte[super.getRandom().nextInt(70)];
super.getRandom().nextBytes(data);
encoder.writeBytes(data);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,540 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/LongTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class LongTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeLong(state.testData[i + 0]);
e.writeLong(state.testData[i + 1]);
e.writeLong(state.testData[i + 2]);
e.writeLong(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public long decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
long total = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readLong();
total += d.readLong();
total += d.readLong();
total += d.readLong();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private long[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some int of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new long[getBatchSize()];
for (int i = 0; i < testData.length; i += 4) {
// half fit in 1, half in 2
testData[i + 0] = super.getRandom().nextLong() % 0x7FL;
// half fit in <=3, half in 4
testData[i + 1] = super.getRandom().nextLong() % 0x1FFFFFL;
// half in <=5, half in 6
testData[i + 2] = super.getRandom().nextLong() % 0x3FFFFFFFFL;
// half in <=8, half in 9
testData[i + 3] = super.getRandom().nextLong() % 0x1FFFFFFFFFFFFL;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric values of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i += 4) {
// half fit in 1, half in 2
encoder.writeLong(super.getRandom().nextLong() % 0x7FL);
// half fit in <=3, half in 4
encoder.writeLong(super.getRandom().nextLong() % 0x1FFFFFL);
// half in <=5, half in 6
encoder.writeLong(super.getRandom().nextLong() % 0x3FFFFFFFFL);
// half in <=8, half in 9
encoder.writeLong(super.getRandom().nextLong() % 0x1FFFFFFFFFFFFL);
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,541 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/ArrayTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class ArrayTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final int items = state.getBatchSize() / 4;
e.writeArrayStart();
e.setItemCount(1);
e.startItem();
e.writeArrayStart();
e.setItemCount(items);
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.startItem();
e.writeFloat(state.testData[i + 0]);
e.writeFloat(state.testData[i + 1]);
e.writeFloat(state.testData[i + 2]);
e.writeFloat(state.testData[i + 3]);
}
e.writeArrayEnd();
e.writeArrayEnd();
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public float decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
float total = 0.0f;
d.readArrayStart();
for (long i = d.readArrayStart(); i != 0; i = d.arrayNext()) {
for (long j = 0; j < i; j++) {
total += d.readFloat();
total += d.readFloat();
total += d.readFloat();
total += d.readFloat();
}
}
d.arrayNext();
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private float[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new float[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextFloat();
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final int items = getBatchSize() / 4;
encoder.writeArrayStart();
encoder.setItemCount(1);
encoder.startItem();
encoder.writeArrayStart();
encoder.setItemCount(items);
for (int i = 0; i < getBatchSize(); i += 4) {
encoder.startItem();
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
}
encoder.writeArrayEnd();
encoder.writeArrayEnd();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,542 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/BooleanTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class BooleanTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeBoolean(state.testData[i + 0]);
e.writeBoolean(state.testData[i + 1]);
e.writeBoolean(state.testData[i + 2]);
e.writeBoolean(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public boolean decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
boolean total = true;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total ^= d.readBoolean();
total ^= d.readBoolean();
total ^= d.readBoolean();
total ^= d.readBoolean();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private boolean[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new boolean[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextBoolean();
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeBoolean(super.getRandom().nextBoolean());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,543 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/FloatTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class FloatTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeFloat(state.testData[i + 0]);
e.writeFloat(state.testData[i + 1]);
e.writeFloat(state.testData[i + 2]);
e.writeFloat(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public float decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
float total = 0.0f;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readFloat();
total += d.readFloat();
total += d.readFloat();
total += d.readFloat();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private float[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new float[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextFloat();
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeFloat(super.getRandom().nextFloat());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,544 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/UnchangedUnionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class UnchangedUnionTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i += 4) {
writer.write(state.testData[i + 0], e);
writer.write(state.testData[i + 1], e);
writer.write(state.testData[i + 2], e);
writer.write(state.testData[i + 3], e);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackHole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
final Object o = reader.read(null, d);
blackHole.consume(o);
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private static final String UNCHANGED_UNION = "[ \"null\", \"int\" ]";
private GenericRecord[] testData;
private Encoder encoder;
private final Schema schema;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(mkSchema(UNCHANGED_UNION));
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(this.schema);
final int val = super.getRandom().nextInt(1000000);
final Integer v = (val < 750000 ? Integer.valueOf(val) : null);
rec.put("f", v);
this.testData[i] = rec;
}
}
private String mkSchema(String subschema) {
return ("{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n" + "{ \"name\": \"f\", \"type\": " + subschema
+ "}\n" + "] }");
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private static final String UNCHANGED_UNION = "[ \"null\", \"int\" ]";
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(mkSchema(UNCHANGED_UNION));
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(this.schema);
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(this.schema);
final int val = super.getRandom().nextInt(1000000);
final Integer v = (val < 750000 ? Integer.valueOf(val) : null);
rec.put("f", v);
writer.write(rec, encoder);
}
this.testData = baos.toByteArray();
}
private String mkSchema(String subschema) {
return ("{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n" + "{ \"name\": \"f\", \"type\": " + subschema
+ "}\n" + "] }");
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,545 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/MapTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.apache.avro.util.Utf8;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class MapTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final int items = state.getBatchSize() / 4;
e.writeMapStart();
e.setItemCount(items);
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.startItem();
e.writeString(state.utf);
e.writeFloat(state.testData[i + 0]);
e.writeFloat(state.testData[i + 1]);
e.writeFloat(state.testData[i + 2]);
e.writeFloat(state.testData[i + 3]);
}
e.writeMapEnd();
}
@Benchmark
public float decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
float result = 0.0f;
for (long i = d.readMapStart(); i != 0; i = d.mapNext()) {
for (long j = 0; j < i; j++) {
state.utf = d.readString(state.utf);
result += d.readFloat();
result += d.readFloat();
result += d.readFloat();
result += d.readFloat();
}
}
return result;
}
@State(Scope.Thread)
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public static class TestStateEncode extends BasicState {
private float[] testData;
private Encoder encoder;
private Utf8 utf = new Utf8("This is a map key");
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new float[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextFloat();
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
private Utf8 utf = new Utf8();
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final int items = getBatchSize() / 4;
encoder.writeMapStart();
encoder.setItemCount(items);
for (int i = 0; i < getBatchSize(); i += 4) {
encoder.startItem();
encoder.writeString("This is a map key");
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
encoder.writeFloat(super.getRandom().nextFloat());
}
encoder.writeMapEnd();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,546 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/StringTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.apache.avro.util.Utf8;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class StringTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeString(state.testData[i + 0]);
e.writeString(state.testData[i + 1]);
e.writeString(state.testData[i + 2]);
e.writeString(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public int decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
int result = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
result += d.readString(state.utf).toString().length();
result += d.readString(state.utf).toString().length();
result += d.readString(state.utf).toString().length();
result += d.readString(state.utf).toString().length();
}
return result;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private String[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new String[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = randomString();
}
}
private String randomString() {
final char[] data = new char[super.getRandom().nextInt(70)];
for (int j = 0; j < data.length; j++) {
data[j] = (char) ('a' + super.getRandom().nextInt('z' - 'a'));
}
return new String(data);
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
private Utf8 utf = new Utf8();
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeString(randomString());
}
this.testData = baos.toByteArray();
}
private String randomString() {
final char[] data = new char[super.getRandom().nextInt(70)];
for (int j = 0; j < data.length; j++) {
data[j] = (char) ('a' + super.getRandom().nextInt('z' - 'a'));
}
return new String(data);
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,547 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/DoubleTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.basic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
public class DoubleTest {
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (int i = 0; i < state.getBatchSize(); i += 4) {
e.writeDouble(state.testData[i + 0]);
e.writeDouble(state.testData[i + 1]);
e.writeDouble(state.testData[i + 2]);
e.writeDouble(state.testData[i + 3]);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public double decode(final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
double total = 0;
for (int i = 0; i < state.getBatchSize(); i += 4) {
total += d.readDouble();
total += d.readDouble();
total += d.readDouble();
total += d.readDouble();
}
return total;
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private double[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
}
/**
* Setup each trial
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new double[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
testData[i] = super.getRandom().nextDouble();
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeDouble(super.getRandom().nextDouble());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,548 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericNestedFakeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericNestedFakeTest {
private static final String NESTED_RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \n" + "{ \"type\": \"record\", \"name\": \"D\", \"fields\": [\n"
+ "{\"name\": \"dbl\", \"type\": \"double\" }]\n" + "} },\n" + "{ \"name\": \"f2\", \"type\": \"D\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"D\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.schema);
for (final GenericRecord rec : state.testData) {
writer.write(rec, e);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(NESTED_RECORD_SCHEMA);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Random r = super.getRandom();
Schema doubleSchema = schema.getFields().get(0).schema();
for (int i = 0; i < testData.length; i++) {
GenericRecord rec = new GenericData.Record(schema);
GenericRecord inner;
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(0, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(1, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(2, inner);
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(NESTED_RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
| 7,549 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericWithPromotionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericWithPromotionTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_PROMOTION = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"long\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"long\" },\n" + "{ \"name\": \"f6\", \"type\": \"long\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.writerSchema, state.readerSchema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema readerSchema;
private final Schema writerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(this.writerSchema);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(writerSchema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
writer.write(rec, encoder);
}
encoder.flush();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,550 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericNestedTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericNestedTest {
private static final String NESTED_RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \n" + "{ \"type\": \"record\", \"name\": \"D\", \"fields\": [\n"
+ "{\"name\": \"dbl\", \"type\": \"double\" }]\n" + "} },\n" + "{ \"name\": \"f2\", \"type\": \"D\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"D\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
for (final GenericRecord rec : state.testData) {
GenericRecord inner;
inner = (GenericRecord) rec.get(0);
e.writeDouble((Double) inner.get(0));
inner = (GenericRecord) rec.get(1);
e.writeDouble((Double) inner.get(0));
inner = (GenericRecord) rec.get(2);
e.writeDouble((Double) inner.get(0));
e.writeInt((Integer) rec.get(3));
e.writeInt((Integer) rec.get(4));
e.writeInt((Integer) rec.get(5));
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
Schema doubleSchema = state.schema.getFields().get(0).schema();
for (int i = 0; i < state.getBatchSize(); i++) {
GenericRecord rec = new GenericData.Record(state.schema);
GenericRecord inner;
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(0, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(1, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(2, inner);
rec.put(3, d.readInt());
rec.put(4, d.readInt());
rec.put(5, d.readInt());
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(NESTED_RECORD_SCHEMA);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Random r = super.getRandom();
Schema doubleSchema = schema.getFields().get(0).schema();
for (int i = 0; i < testData.length; i++) {
GenericRecord rec = new GenericData.Record(schema);
GenericRecord inner;
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(0, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(1, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(2, inner);
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(NESTED_RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
| 7,551 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericWithDefaultTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericWithDefaultTest {
private static final String RECORD_SCHEMA_WITH_DEFAULT = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f7\", \"type\": \"string\", " + "\"default\": \"undefined\" },\n"
+ "{ \"name\": \"f8\", \"type\": \"string\"," + "\"default\": \"undefined\" }\n" + "] }";
@Benchmark
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.schema);
for (final GenericRecord rec : state.testData) {
writer.write(rec, e);
}
}
@Benchmark
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Random r = super.getRandom();
for (int i = 0; i < testData.length; i++) {
final GenericRecord rec = new GenericData.Record(schema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
rec.put(6, randomString(r));
rec.put(7, randomString(r));
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(this.schema);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(schema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
rec.put(6, randomString(r));
rec.put(7, randomString(r));
writer.write(rec, encoder);
}
encoder.flush();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
private static String randomString(Random r) {
char[] data = new char[r.nextInt(70)];
for (int j = 0; j < data.length; j++) {
data[j] = (char) ('a' + r.nextInt('z' - 'a'));
}
return new String(data);
}
}
| 7,552 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.schema);
for (final GenericRecord rec : state.testData) {
writer.write(rec, e);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.schema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema schema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
final Random r = super.getRandom();
for (int i = 0; i < testData.length; i++) {
final GenericRecord rec = new GenericData.Record(schema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema schema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.schema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeDouble(r.nextDouble());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
encoder.writeInt(r.nextInt());
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(schema, super.newDecoder(this.testData));
}
}
}
| 7,553 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericWithOutOfOrderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericWithOutOfOrderTest {
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f4\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.writerSchema, state.readerSchema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema readerSchema;
private final Schema writerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
this.writerSchema = new Schema.Parser().parse(RECORD_SCHEMA);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(this.writerSchema);
final Random r = super.getRandom();
for (int i = 0; i < getBatchSize(); i++) {
final GenericRecord rec = new GenericData.Record(writerSchema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
writer.write(rec, encoder);
}
encoder.flush();
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
| 7,554 |
0 | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test | Create_ds/avro/lang/java/perf/src/main/java/org/apache/avro/perf/test/generic/GenericStringTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.perf.test.generic;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.perf.test.BasicState;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Level;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.infra.Blackhole;
public class GenericStringTest {
private static final String GENERIC_STRINGS = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"string\" },\n" + "{ \"name\": \"f2\", \"type\": \"string\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"string\" }\n" + "] }";
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void encode(final TestStateEncode state) throws Exception {
final Encoder e = state.encoder;
final GenericDatumWriter<Object> writer = new GenericDatumWriter<>(state.readerSchema);
for (final GenericRecord rec : state.testData) {
writer.write(rec, e);
}
}
@Benchmark
@OperationsPerInvocation(BasicState.BATCH_SIZE)
public void decode(final Blackhole blackhole, final TestStateDecode state) throws Exception {
final Decoder d = state.decoder;
final GenericDatumReader<Object> reader = new GenericDatumReader<>(state.readerSchema);
for (int i = 0; i < state.getBatchSize(); i++) {
blackhole.consume(reader.read(null, d));
}
}
@State(Scope.Thread)
public static class TestStateEncode extends BasicState {
private final Schema readerSchema;
private GenericRecord[] testData;
private Encoder encoder;
public TestStateEncode() {
super();
this.readerSchema = new Schema.Parser().parse(GENERIC_STRINGS);
}
/**
* Setup the trial data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws Exception {
this.encoder = super.newEncoder(false, getNullOutputStream());
this.testData = new GenericRecord[getBatchSize()];
for (int i = 0; i < testData.length; i++) {
GenericRecord rec = new GenericData.Record(readerSchema);
rec.put(0, randomString(super.getRandom()));
rec.put(1, randomString(super.getRandom()));
rec.put(2, randomString(super.getRandom()));
testData[i] = rec;
}
}
}
@State(Scope.Thread)
public static class TestStateDecode extends BasicState {
private final Schema readerSchema;
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
this.readerSchema = new Schema.Parser().parse(GENERIC_STRINGS);
}
/**
* Generate test data.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i++) {
encoder.writeString(randomString(super.getRandom()));
encoder.writeString(randomString(super.getRandom()));
encoder.writeString(randomString(super.getRandom()));
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = DecoderFactory.get().validatingDecoder(readerSchema, super.newDecoder(this.testData));
}
}
private static String randomString(Random r) {
char[] data = new char[r.nextInt(70)];
for (int j = 0; j < data.length; j++) {
data[j] = (char) ('a' + r.nextInt('z' - 'a'));
}
return new String(data);
}
}
| 7,555 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/specific/TestGeneratedCode.java | /*
* Copyright 2017 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.test.RecordWithErrorField;
import org.apache.avro.specific.test.TestError;
import org.apache.avro.util.Utf8;
import org.junit.Assert;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.apache.avro.specific.test.FullRecordV1;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.apache.avro.specific.test.FullRecordV2;
public class TestGeneratedCode {
private final static SpecificData MODEL = new SpecificData();
private final static Schema V1S = FullRecordV1.getClassSchema();
private final static Schema V2S = FullRecordV2.getClassSchema();
@BeforeEach
public void setUp() {
MODEL.setCustomCoders(true);
}
@Test
void withoutSchemaMigration() throws IOException {
FullRecordV1 src = new FullRecordV1(true, 87231, 731L, 54.2832F, 38.321, "Hi there", null);
assertTrue(((SpecificRecordBase) src).hasCustomCoders(), "Test schema must allow for custom coders.");
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
Encoder e = EncoderFactory.get().directBinaryEncoder(out, null);
DatumWriter<FullRecordV1> w = (DatumWriter<FullRecordV1>) MODEL.createDatumWriter(V1S);
w.write(src, e);
e.flush();
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
Decoder d = DecoderFactory.get().directBinaryDecoder(in, null);
DatumReader<FullRecordV1> r = (DatumReader<FullRecordV1>) MODEL.createDatumReader(V1S);
FullRecordV1 dst = r.read(null, d);
Assert.assertEquals(src, dst);
}
@Test
void withSchemaMigration() throws IOException {
Map<CharSequence, CharSequence> map = new HashMap<>();
FullRecordV2 src = new FullRecordV2(true, 731, 87231, 38L, 54.2832F, "Hi there",
ByteBuffer.wrap(Utf8.getBytesFor("Hello, world!")), map);
assertTrue(((SpecificRecordBase) src).hasCustomCoders(), "Test schema must allow for custom coders.");
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
Encoder e = EncoderFactory.get().directBinaryEncoder(out, null);
DatumWriter<FullRecordV2> w = (DatumWriter<FullRecordV2>) MODEL.createDatumWriter(V2S);
w.write(src, e);
e.flush();
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
Decoder d = DecoderFactory.get().directBinaryDecoder(in, null);
DatumReader<FullRecordV1> r = (DatumReader<FullRecordV1>) MODEL.createDatumReader(V2S, V1S);
FullRecordV1 dst = r.read(null, d);
FullRecordV1 expected = new FullRecordV1(true, 87231, 731L, 54.2832F, 38.0, null, "Hello, world!");
Assert.assertEquals(expected, dst);
}
@Test
public void withErrorField() throws IOException {
TestError srcError = TestError.newBuilder().setMessage$("Oops").build();
RecordWithErrorField src = new RecordWithErrorField("Hi there", srcError);
Assert.assertFalse("Test schema with error field cannot allow for custom coders.",
((SpecificRecordBase) src).hasCustomCoders());
Schema schema = RecordWithErrorField.getClassSchema();
ByteArrayOutputStream out = new ByteArrayOutputStream(1024);
Encoder e = EncoderFactory.get().directBinaryEncoder(out, null);
DatumWriter<RecordWithErrorField> w = (DatumWriter<RecordWithErrorField>) MODEL.createDatumWriter(schema);
w.write(src, e);
e.flush();
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
Decoder d = DecoderFactory.get().directBinaryDecoder(in, null);
DatumReader<RecordWithErrorField> r = (DatumReader<RecordWithErrorField>) MODEL.createDatumReader(schema);
RecordWithErrorField dst = r.read(null, d);
TestError expectedError = TestError.newBuilder().setMessage$("Oops").build();
RecordWithErrorField expected = new RecordWithErrorField("Hi there", expectedError);
Assert.assertEquals(expected, dst);
}
}
| 7,556 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/specific/TestSpecificData.java | /*
* Copyright 2021 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ElementVisitor;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.TypeParameterElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import com.sun.source.util.JavacTask;
import org.apache.avro.Schema;
import org.apache.avro.compiler.specific.SpecificCompiler;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class TestSpecificData {
@Test
void separateThreadContextClassLoader() throws Exception {
Schema schema = new Schema.Parser().parse(new File("src/test/resources/foo.Bar.avsc"));
SpecificCompiler compiler = new SpecificCompiler(schema);
compiler.setStringType(GenericData.StringType.String);
compiler.compileToDestination(null, new File("target"));
GenericRecord bar = new GenericData.Record(schema);
bar.put("title", "hello");
bar.put("created_at", 1630126246000L);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(bar, encoder);
encoder.flush();
byte[] data = out.toByteArray();
JavaCompiler javac = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = javac.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> units = fileManager.getJavaFileObjects("target/foo/Bar.java");
JavaCompiler.CompilationTask task1 = javac.getTask(null, fileManager, null, null, null, units);
JavacTask jcTask = (JavacTask) task1;
Iterable<? extends Element> analyze = jcTask.analyze();
GeneratedCodeController ctrl = new GeneratedCodeController();
for (Element el : analyze) {
if (el.getKind() == ElementKind.CLASS) {
List<String> accept = el.accept(ctrl, 0);
assertTrue(accept.isEmpty(), accept.stream().collect(Collectors.joining("\n\t")));
}
}
javac.getTask(null, fileManager, null, null, null, units).call();
fileManager.close();
AtomicReference<Exception> ref = new AtomicReference<>();
ClassLoader cl = new URLClassLoader(new URL[] { new File("target/").toURI().toURL() });
Thread t = new Thread() {
@Override
public void run() {
SpecificDatumReader<Object> reader = new SpecificDatumReader<>(schema);
try {
Object o = reader.read(null, DecoderFactory.get().binaryDecoder(data, null));
System.out.println(o.getClass() + ": " + o);
} catch (Exception ex) {
ref.set(ex);
}
}
};
t.setContextClassLoader(cl);
t.start();
t.join();
Exception ex = ref.get();
if (ex != null) {
ex.printStackTrace();
fail(ex.getMessage());
}
}
static class GeneratedCodeController implements ElementVisitor<List<String>, Integer> {
@Override
public List<String> visit(Element e, Integer integer) {
return null;
}
@Override
public List<String> visit(Element e) {
return this.visit(e, 1);
}
@Override
public List<String> visitPackage(PackageElement e, Integer integer) {
return e.getEnclosedElements().stream().map((Element sub) -> sub.accept(this, 1)).filter(Objects::nonNull)
.flatMap(List::stream).collect(Collectors.toList());
}
@Override
public List<String> visitType(TypeElement e, Integer integer) {
List<TypeMirror> interfaces = this.allInterfaces(e);
List<Method> methods = interfaces.stream().filter((TypeMirror tm) -> tm.getKind() == TypeKind.DECLARED)
.map(TypeMirror::toString).map((String typeName) -> {
try {
return Thread.currentThread().getContextClassLoader().loadClass(typeName);
} catch (ClassNotFoundException ex) {
return null;
}
}).filter(Objects::nonNull).map(Class::getMethods).flatMap(Arrays::stream)
.filter((Method m) -> Modifier.isPublic(m.getModifiers()) && !Modifier.isStatic(m.getModifiers())
&& !Modifier.isFinal(m.getModifiers()) && m.getDeclaringClass() != Object.class)
.collect(Collectors.toList());
Stream<String> errors = e.getEnclosedElements().stream()
.filter((Element el) -> el.getKind() == ElementKind.METHOD).map(ExecutableElement.class::cast)
.filter((ExecutableElement declM) -> GeneratedCodeController.findFirst(declM, methods) != null)
.filter((ExecutableElement declM) -> declM.getAnnotation(Override.class) == null)
.map((ExecutableElement declM) -> "'" + declM.getReturnType().toString() + " " + declM.getSimpleName()
+ "(...)' method doesn't have @Override annotation");
Stream<String> subError = e.getEnclosedElements().stream().map((Element sub) -> sub.accept(this, 1))
.filter(Objects::nonNull).flatMap(List::stream);
return Stream.concat(errors, subError).collect(Collectors.toList());
}
private List<TypeMirror> allInterfaces(TypeElement e) {
List<TypeMirror> allInterfaces = new ArrayList<>(e.getInterfaces());
TypeMirror superclass = e.getSuperclass();
if (superclass != null && !Objects.equals(superclass.toString(), "java.lang.Object")) {
allInterfaces.add(superclass);
if (superclass.getKind() == TypeKind.DECLARED) {
final Element element = ((DeclaredType) superclass).asElement();
if (element instanceof TypeElement) {
allInterfaces((TypeElement) element);
}
}
}
return allInterfaces;
}
private static Method findFirst(ExecutableElement ref, List<Method> methods) {
return methods.stream().filter((Method m) -> GeneratedCodeController.areMethodSame(ref, m)).findFirst()
.orElse(null);
}
private static boolean areMethodSame(ExecutableElement declaredMethod, Method interfaceMethod) {
boolean res = Objects.equals(declaredMethod.getSimpleName().toString(), interfaceMethod.getName());
if (!res) {
return false;
}
TypeMirror type = declaredMethod.getReturnType();
if (!type.toString().equals(interfaceMethod.getReturnType().getName())) {
try {
Class<?> declaredReturnedType = Thread.currentThread().getContextClassLoader().loadClass(type.toString());
res &= interfaceMethod.getReturnType().isAssignableFrom(declaredReturnedType);
} catch (ClassNotFoundException ex) {
return false;
}
}
List<? extends VariableElement> parameters = declaredMethod.getParameters();
Class<?>[] parameterTypes = interfaceMethod.getParameterTypes();
if (parameters.size() != parameterTypes.length) {
return false;
}
for (int i = 0; i < parameterTypes.length; i++) {
res &= areEquivalent(parameters.get(i), parameterTypes[i]);
}
return res;
}
private static boolean areEquivalent(VariableElement sourceParam, Class<?> typeParam) {
// sourceParam.getSimpleName()
// Type type = sourceParam.type;
return Objects.equals(sourceParam.getSimpleName(), typeParam.getName());
}
@Override
public List<String> visitVariable(VariableElement e, Integer integer) {
return Collections.emptyList();
}
@Override
public List<String> visitExecutable(ExecutableElement e, Integer integer) {
return null;
}
@Override
public List<String> visitTypeParameter(TypeParameterElement e, Integer integer) {
return Collections.emptyList();
}
@Override
public List<String> visitUnknown(Element e, Integer integer) {
return Collections.emptyList();
}
}
}
| 7,557 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestLogicalTypes.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class TestLogicalTypes {
private Schema logicalTypeFields;
@BeforeEach
public void setup() throws ParseException {
final ClassLoader cl = Thread.currentThread().getContextClassLoader();
Idl idl = new Idl(cl.getResourceAsStream("logicalTypes.avdl"), "UTF-8");
Protocol protocol = idl.CompilationUnit();
logicalTypeFields = protocol.getType("org.apache.avro.test.LogicalTypeFields");
}
@Test
void dateBecomesLogicalType() {
assertEquals(LogicalTypes.date(), logicalTypeOfField("aDate"));
}
@Test
void timeMsBecomesLogicalType() {
assertEquals(LogicalTypes.timeMillis(), logicalTypeOfField("aTime"));
}
@Test
void timestampMsBecomesLogicalType() {
assertEquals(LogicalTypes.timestampMillis(), logicalTypeOfField("aTimestamp"));
}
@Test
void localTimestampMsBecomesLogicalType() {
assertEquals(LogicalTypes.localTimestampMillis(), logicalTypeOfField("aLocalTimestamp"));
}
@Test
void decimalBecomesLogicalType() {
assertEquals(LogicalTypes.decimal(6, 2), logicalTypeOfField("pocketMoney"));
}
@Test
void uuidBecomesLogicalType() {
assertEquals(LogicalTypes.uuid(), logicalTypeOfField("identifier"));
}
@Test
void annotatedLongBecomesLogicalType() {
assertEquals(LogicalTypes.timestampMicros(), logicalTypeOfField("anotherTimestamp"));
}
@Test
void annotatedBytesFieldBecomesLogicalType() {
assertEquals(LogicalTypes.decimal(6, 2), logicalTypeOfField("allowance"));
}
@Test
void incorrectlyAnnotatedBytesFieldHasNoLogicalType() {
Schema fieldSchema = logicalTypeFields.getField("byteArray").schema();
assertNull(fieldSchema.getLogicalType());
assertEquals("decimal", fieldSchema.getObjectProp("logicalType"));
assertEquals(3000000000L, fieldSchema.getObjectProp("precision")); // Not an int, so not a valid precision
assertEquals(0, fieldSchema.getObjectProp("scale"));
}
private LogicalType logicalTypeOfField(String name) {
return logicalTypeFields.getField(name).schema().getLogicalType();
}
}
| 7,558 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static java.util.Objects.requireNonNull;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* Simple test harness for Idl. This relies on an input/ and output/ directory.
* Inside the input/ directory are .avdl files. Each file should have a
* corresponding .avpr file in output/. When you run the test, it generates and
* stringifies each .avdl file and compares it to the expected output, failing
* if the two differ.
*
* To make it simpler to write these tests, you can run ant -Dtestcase=TestIdl
* -Dtest.idl.mode=write, which will *replace* all expected output.
*/
public class TestIdl {
private static final File TEST_DIR = new File(System.getProperty("test.idl.dir", "src/test/idl"));
private static final File TEST_INPUT_DIR = new File(TEST_DIR, "input");
private static final File TEST_OUTPUT_DIR = new File(TEST_DIR, "output");
private static final String TEST_MODE = System.getProperty("test.idl.mode", "run");
private List<GenTest> tests;
@BeforeEach
public void loadTests() {
assertTrue(TEST_DIR.exists());
assertTrue(TEST_INPUT_DIR.exists());
assertTrue(TEST_OUTPUT_DIR.exists());
tests = new ArrayList<>();
for (File inF : requireNonNull(TEST_INPUT_DIR.listFiles())) {
if (!inF.getName().endsWith(".avdl"))
continue;
if (inF.getName().startsWith("."))
continue;
File outF = new File(TEST_OUTPUT_DIR, inF.getName().replaceFirst("\\.avdl$", ".avpr"));
tests.add(new GenTest(inF, outF));
}
// AVRO-3706 : test folder with space in name.
File inputWorkSpace = new File(TEST_DIR, "work space");
File root = new File(inputWorkSpace, "root.avdl");
File rootResult = new File(inputWorkSpace, "root.avpr");
tests.add(new GenTest(root, rootResult));
}
@Test
void runTests() throws Exception {
if (!"run".equals(TEST_MODE))
return;
int failed = 0;
for (GenTest t : tests) {
try {
t.run();
} catch (Exception e) {
failed++;
System.err.println("Failed: " + t.testName());
e.printStackTrace(System.err);
}
}
if (failed > 0) {
fail(failed + " tests failed");
}
}
@Test
void writeTests() throws Exception {
if (!"write".equals(TEST_MODE))
return;
for (GenTest t : tests) {
t.write();
}
}
@Test
void docCommentsAndWarnings() throws Exception {
try (Idl parser = new Idl(new File(TEST_INPUT_DIR, "comments.avdl"))) {
final Protocol protocol = parser.CompilationUnit();
final List<String> warnings = parser.getWarningsAfterParsing();
assertEquals("Documented Enum", protocol.getType("testing.DocumentedEnum").getDoc());
assertEquals("Documented Fixed Type", protocol.getType("testing.DocumentedFixed").getDoc());
final Schema documentedError = protocol.getType("testing.DocumentedError");
assertEquals("Documented Error", documentedError.getDoc());
assertEquals("Documented Reason Field", documentedError.getField("reason").doc());
assertEquals("Default Doc Explanation Field", documentedError.getField("explanation").doc());
final Map<String, Protocol.Message> messages = protocol.getMessages();
final Protocol.Message documentedMethod = messages.get("documentedMethod");
assertEquals("Documented Method", documentedMethod.getDoc());
assertEquals("Documented Parameter", documentedMethod.getRequest().getField("message").doc());
assertEquals("Default Documented Parameter", documentedMethod.getRequest().getField("defMsg").doc());
assertNull(protocol.getType("testing.UndocumentedEnum").getDoc());
assertNull(protocol.getType("testing.UndocumentedFixed").getDoc());
assertNull(protocol.getType("testing.UndocumentedRecord").getDoc());
assertNull(messages.get("undocumentedMethod").getDoc());
final String pattern1 = "Found documentation comment at line %d, column %d. Ignoring previous one at line %d, column %d: \"%s\""
+ "\nDid you mean to use a multiline comment ( /* ... */ ) instead?";
final String pattern2 = "Ignoring out-of-place documentation comment at line %d, column %d: \"%s\""
+ "\nDid you mean to use a multiline comment ( /* ... */ ) instead?";
assertEquals(Arrays.asList(String.format(pattern1, 21, 47, 21, 10, "Dangling Enum1"),
String.format(pattern2, 21, 47, "Dangling Enum2"), String.format(pattern1, 23, 9, 22, 9, "Dangling Enum3"),
String.format(pattern1, 24, 9, 23, 9, "Dangling Enum4"),
String.format(pattern1, 25, 5, 24, 9, "Dangling Enum5"), String.format(pattern2, 25, 5, "Dangling Enum6"),
String.format(pattern1, 27, 5, 26, 5, "Dangling Enum7"),
String.format(pattern1, 28, 5, 27, 5, "Dangling Enum8"), String.format(pattern2, 28, 5, "Dangling Enum9"),
String.format(pattern1, 34, 5, 33, 5, "Dangling Fixed1"),
String.format(pattern1, 35, 5, 34, 5, "Dangling Fixed2"),
String.format(pattern1, 36, 5, 35, 5, "Dangling Fixed3"),
String.format(pattern1, 37, 5, 36, 5, "Dangling Fixed4"), String.format(pattern2, 37, 5, "Dangling Fixed5"),
String.format(pattern1, 43, 5, 42, 5, "Dangling Error1"), String.format(pattern2, 43, 5, "Dangling Field1"),
String.format(pattern2, 46, 5, "Dangling Field2"), String.format(pattern2, 47, 5, "Dangling Error2"),
String.format(pattern1, 55, 5, 54, 5, "Dangling Param1"), String.format(pattern2, 55, 5, "Dangling Param2"),
String.format(pattern2, 58, 5, "Dangling Param3"), String.format(pattern1, 60, 5, 59, 5, "Dangling Method1"),
String.format(pattern1, 61, 5, 60, 5, "Dangling Method2"),
String.format(pattern2, 61, 5, "Dangling Method3")), warnings);
}
}
/**
* An individual comparison test
*/
private static class GenTest {
private final File in, expectedOut;
public GenTest(File in, File expectedOut) {
this.in = in;
this.expectedOut = expectedOut;
}
private String generate() throws Exception {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL[] newPathURL = new URL[] { cl.getResource("putOnClassPath-test-resource.jar") };
URLClassLoader ucl = new URLClassLoader(newPathURL, cl);
Idl parser = new Idl(in, ucl);
Protocol p = parser.CompilationUnit();
parser.close();
return p.toString();
}
public String testName() {
return this.in.getName();
}
public void run() throws Exception {
String output = generate();
String slurped = slurp(expectedOut);
assertEquals(slurped.trim(), output.replace("\\r", "").trim());
}
public void write() throws Exception {
writeFile(expectedOut, generate());
}
private static String slurp(File f) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(f), StandardCharsets.UTF_8));
String line;
StringBuilder builder = new StringBuilder();
while ((line = in.readLine()) != null) {
builder.append(line);
}
in.close();
ObjectMapper mapper = new ObjectMapper();
JsonNode json = mapper.readTree(builder.toString());
return mapper.writer().writeValueAsString(json);
}
private static void writeFile(File f, String s) throws IOException {
FileWriter w = new FileWriter(f);
w.write(s);
w.close();
}
}
}
| 7,559 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/DocCommentHelperTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import org.junit.jupiter.api.Test;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class DocCommentHelperTest {
@Test
public void noWarnings() {
DocCommentHelper.getAndClearWarnings(); // Clear warnings
DocCommentHelper.setDoc(token(1, 1, "This is a token."));
assertEquals(DocCommentHelper.getDoc(), "This is a token.");
DocCommentHelper.clearDoc(); // Should be a no-op. If not, it adds a warning.
assertEquals(emptyList(), DocCommentHelper.getAndClearWarnings(), "There should be no warnings");
}
/**
* Create a doc comment token. Does not include the initial '/**'.
*
* @param line the line where the comment starts
* @param column the column where the comment starts (the position
* of the '/**')
* @param tokenWithoutSuffix the comment content (without the trailing
* '<span>*</span>/')
* @return a mock token
*/
private Token token(int line, int column, String tokenWithoutSuffix) {
final Token token = new Token();
token.image = tokenWithoutSuffix + "*/";
token.beginLine = line;
token.beginColumn = column + 3;
return token;
}
@Test
public void warningAfterSecondDoc() {
DocCommentHelper.getAndClearWarnings(); // Clear warnings
DocCommentHelper.setDoc(token(3, 2, "This is the first token."));
DocCommentHelper.setDoc(token(5, 4, "This is the second token."));
assertEquals(DocCommentHelper.getDoc(), "This is the second token.");
assertEquals(singletonList(
"Found documentation comment at line 5, column 4. Ignoring previous one at line 3, column 2: \"This is the first token.\"\n"
+ "Did you mean to use a multiline comment ( /* ... */ ) instead?"),
DocCommentHelper.getAndClearWarnings(), "There should be a warning");
}
@Test
public void warningAfterUnusedDoc() {
DocCommentHelper.getAndClearWarnings(); // Clear warnings
DocCommentHelper.setDoc(token(3, 2, "This is a token."));
DocCommentHelper.clearDoc();
assertNull(DocCommentHelper.getDoc());
assertEquals(
singletonList("Ignoring out-of-place documentation comment at line 3, column 2: \"This is a token.\"\n"
+ "Did you mean to use a multiline comment ( /* ... */ ) instead?"),
DocCommentHelper.getAndClearWarnings(), "There should be a warning");
}
@Test
public void stripIndentsFromDocCommentWithStars() {
String parsedComment = "* First line\n\t * Second Line\n\t * * Third Line\n\t *\n\t * Fifth Line";
String schemaComment = "First line\nSecond Line\n* Third Line\n\nFifth Line";
assertEquals(schemaComment, DocCommentHelper.stripIndents(parsedComment));
}
@Test
public void stripIndentsFromDocCommentWithoutStars() {
String parsedComment = "First line\n\t Second Line\n\t * Third Line\n\t \n\t Fifth Line";
String schemaComment = "First line\nSecond Line\n * Third Line\n \n Fifth Line";
assertEquals(schemaComment, DocCommentHelper.stripIndents(parsedComment));
}
}
| 7,560 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestReferenceAnnotationNotAllowed.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class TestReferenceAnnotationNotAllowed {
@Test
void referenceAnnotationNotAllowed() {
final ClassLoader cl = Thread.currentThread().getContextClassLoader();
Idl idl = new Idl(cl.getResourceAsStream("AnnotationOnTypeReference.avdl"), "UTF-8");
try {
idl.CompilationUnit();
fail("Compilation should fail: annotations on type references are not allowed.");
} catch (ParseException e) {
assertEquals("Type references may not be annotated, at line 29, column 17", e.getMessage());
}
}
}
| 7,561 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestSchemaResolver.java | /*
* Copyright 2017 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestSchemaResolver {
@Test
void resolving() throws ParseException, MalformedURLException, IOException {
File file = new File(".");
String currentWorkPath = file.getAbsolutePath();
String testIdl = currentWorkPath + File.separator + "src" + File.separator + "test" + File.separator + "idl"
+ File.separator + "cycle.avdl";
Idl compiler = new Idl(new File(testIdl));
Protocol protocol = compiler.CompilationUnit();
System.out.println(protocol);
assertEquals(5, protocol.getTypes().size());
}
@Test
void isUnresolvedSchemaError1() {
assertThrows(IllegalArgumentException.class, () -> {
// No "org.apache.avro.compiler.idl.unresolved.name" property
Schema s = SchemaBuilder.record("R").fields().endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
});
}
@Test
void isUnresolvedSchemaError2() {
assertThrows(IllegalArgumentException.class, () -> {
// No "UnresolvedSchema" property
Schema s = SchemaBuilder.record("R").prop("org.apache.avro.compiler.idl.unresolved.name", "x").fields()
.endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
});
}
@Test
void isUnresolvedSchemaError3() {
assertThrows(IllegalArgumentException.class, () -> {
// Namespace not "org.apache.avro.compiler".
Schema s = SchemaBuilder.record("UnresolvedSchema").prop("org.apache.avro.compiler.idl.unresolved.name", "x")
.fields().endRecord();
SchemaResolver.getUnresolvedSchemaName(s);
});
}
@Test
void getUnresolvedSchemaNameError() {
assertThrows(IllegalArgumentException.class, () -> {
Schema s = SchemaBuilder.fixed("a").size(10);
SchemaResolver.getUnresolvedSchemaName(s);
});
}
}
| 7,562 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestCycle.java | /*
* Copyright 2015 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.compiler.specific.SpecificCompiler;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestCycle {
private static final Logger LOG = LoggerFactory.getLogger(TestCycle.class);
@Test
void cycleGeneration() throws ParseException, IOException {
final ClassLoader cl = Thread.currentThread().getContextClassLoader();
Idl idl = new Idl(cl.getResourceAsStream("input/cycle.avdl"), "UTF-8");
Protocol protocol = idl.CompilationUnit();
String json = protocol.toString();
LOG.info(json);
SpecificCompiler compiler = new SpecificCompiler(protocol);
compiler.setStringType(GenericData.StringType.String);
File output = new File("./target");
compiler.compileToDestination(null, output);
Map<String, Schema> schemas = new HashMap<>();
for (Schema schema : protocol.getTypes()) {
final String name = schema.getName();
schemas.put(name, schema);
}
GenericRecordBuilder rb2 = new GenericRecordBuilder(schemas.get("SampleNode"));
rb2.set("count", 10);
rb2.set("subNodes", Collections.EMPTY_LIST);
GenericData.Record node = rb2.build();
GenericRecordBuilder mb = new GenericRecordBuilder(schemas.get("Method"));
mb.set("declaringClass", "Test");
mb.set("methodName", "test");
GenericData.Record method = mb.build();
GenericRecordBuilder spb = new GenericRecordBuilder(schemas.get("SamplePair"));
spb.set("method", method);
spb.set("node", node);
GenericData.Record sp = spb.build();
GenericRecordBuilder rb = new GenericRecordBuilder(schemas.get("SampleNode"));
rb.set("count", 10);
rb.set("subNodes", Collections.singletonList(sp));
GenericData.Record record = rb.build();
serDeserRecord(record);
}
private static void serDeserRecord(GenericData.Record data) throws IOException {
ByteArrayOutputStream bab = new ByteArrayOutputStream();
GenericDatumWriter writer = new GenericDatumWriter(data.getSchema());
final BinaryEncoder directBinaryEncoder = EncoderFactory.get().directBinaryEncoder(bab, null);
writer.write(data, directBinaryEncoder);
directBinaryEncoder.flush();
ByteArrayInputStream bis = new ByteArrayInputStream(bab.toByteArray(), 0, bab.size());
GenericDatumReader reader = new GenericDatumReader(data.getSchema());
BinaryDecoder directBinaryDecoder = DecoderFactory.get().directBinaryDecoder(bis, null);
GenericData.Record read = (GenericData.Record) reader.read(null, directBinaryDecoder);
assertEquals(data.toString(), read.toString());
}
}
| 7,563 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.specific;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticListener;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import org.apache.avro.AvroTypeException;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData.StringType;
import org.apache.avro.specific.SpecificData;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestSpecificCompiler {
private static final Logger LOG = LoggerFactory.getLogger(TestSpecificCompiler.class);
/**
* JDK 18+ generates a warning for each member field which does not implement
* java.io.Serializable. Since Avro is an alternative serialization format, we
* can just ignore this warning.
*/
private static final String NON_TRANSIENT_INSTANCE_FIELD_MESSAGE = "non-transient instance field of a serializable class declared with a non-serializable type";
@TempDir
public File OUTPUT_DIR;
private File outputFile;
@BeforeEach
public void setUp() {
this.outputFile = new File(this.OUTPUT_DIR, "SimpleRecord.java");
}
private File src = new File("src/test/resources/simple_record.avsc");
static void assertCompilesWithJavaCompiler(File dstDir, Collection<SpecificCompiler.OutputFile> outputs)
throws IOException {
assertCompilesWithJavaCompiler(dstDir, outputs, false);
}
/**
* Uses the system's java compiler to actually compile the generated code.
*/
static void assertCompilesWithJavaCompiler(File dstDir, Collection<SpecificCompiler.OutputFile> outputs,
boolean ignoreWarnings) throws IOException {
if (outputs.isEmpty()) {
return; // Nothing to compile!
}
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
List<File> javaFiles = new ArrayList<>();
for (SpecificCompiler.OutputFile o : outputs) {
javaFiles.add(o.writeToDestination(null, dstDir));
}
final List<Diagnostic<?>> warnings = new ArrayList<>();
DiagnosticListener<JavaFileObject> diagnosticListener = diagnostic -> {
switch (diagnostic.getKind()) {
case ERROR:
// Do not add these to warnings because they will fail the compilation, anyway.
LOG.error("{}", diagnostic);
break;
case WARNING:
case MANDATORY_WARNING:
String message = diagnostic.getMessage(Locale.ROOT);
if (!NON_TRANSIENT_INSTANCE_FIELD_MESSAGE.equals(message)) {
LOG.warn("{}", diagnostic);
warnings.add(diagnostic);
}
break;
case NOTE:
case OTHER:
LOG.debug("{}", diagnostic);
break;
}
};
JavaCompiler.CompilationTask cTask = compiler.getTask(null, fileManager, diagnosticListener,
Collections.singletonList("-Xlint:all"), null, fileManager.getJavaFileObjects(javaFiles.toArray(new File[0])));
boolean compilesWithoutError = cTask.call();
assertTrue(compilesWithoutError);
if (!ignoreWarnings) {
assertEquals(0, warnings.size(), "Warnings produced when compiling generated code with -Xlint:all");
}
}
private static Schema createSampleRecordSchema(int numStringFields, int numDoubleFields) {
SchemaBuilder.FieldAssembler<Schema> sb = SchemaBuilder.record("sample.record").fields();
for (int i = 0; i < numStringFields; i++) {
sb.name("sf_" + i).type().stringType().noDefault();
}
for (int i = 0; i < numDoubleFields; i++) {
sb.name("df_" + i).type().doubleType().noDefault();
}
return sb.endRecord();
}
private SpecificCompiler createCompiler() throws IOException {
Schema.Parser parser = new Schema.Parser();
Schema schema = parser.parse(this.src);
SpecificCompiler compiler = new SpecificCompiler(schema);
String velocityTemplateDir = "src/main/velocity/org/apache/avro/compiler/specific/templates/java/classic/";
compiler.setTemplateDir(velocityTemplateDir);
compiler.setStringType(StringType.CharSequence);
return compiler;
}
@Test
void canReadTemplateFilesOnTheFilesystem() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(new File(OUTPUT_DIR, "SimpleRecord.java").exists());
}
@Test
void publicFieldVisibility() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setFieldVisibility(SpecificCompiler.FieldVisibility.PUBLIC);
assertTrue(compiler.publicFields());
assertFalse(compiler.privateFields());
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
// No line, once trimmed, should start with a deprecated field declaration
// nor a private field declaration. Since the nested builder uses private
// fields, we cannot do the second check.
line = line.trim();
assertFalse(line.startsWith("@Deprecated public int value"),
"Line started with a deprecated field declaration: " + line);
}
}
}
@Test
void createAllArgsConstructor() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
boolean foundAllArgsConstructor = false;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while (!foundAllArgsConstructor && (line = reader.readLine()) != null) {
foundAllArgsConstructor = line.contains("All-args constructor");
}
}
assertTrue(foundAllArgsConstructor);
}
@Test
void maxValidParameterCounts() throws Exception {
Schema validSchema1 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT, 0);
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, "testMaxValidParameterCounts1"),
new SpecificCompiler(validSchema1).compile());
createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT - 2, 1);
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, "testMaxValidParameterCounts2"),
new SpecificCompiler(validSchema1).compile());
}
@Test
void invalidParameterCounts() throws Exception {
Schema invalidSchema1 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT + 1, 0);
SpecificCompiler compiler = new SpecificCompiler(invalidSchema1);
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, "testInvalidParameterCounts1"), compiler.compile());
Schema invalidSchema2 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT, 10);
compiler = new SpecificCompiler(invalidSchema2);
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, "testInvalidParameterCounts2"), compiler.compile());
}
@Test
void maxParameterCounts() throws Exception {
Schema validSchema1 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT, 0);
assertTrue(new SpecificCompiler(validSchema1).compile().size() > 0);
Schema validSchema2 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT - 2, 1);
assertTrue(new SpecificCompiler(validSchema2).compile().size() > 0);
Schema validSchema3 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT - 1, 1);
assertTrue(new SpecificCompiler(validSchema3).compile().size() > 0);
Schema validSchema4 = createSampleRecordSchema(SpecificCompiler.MAX_FIELD_PARAMETER_UNIT_COUNT + 1, 0);
assertTrue(new SpecificCompiler(validSchema4).compile().size() > 0);
}
@Test
void calcAllArgConstructorParameterUnitsFailure() {
assertThrows(RuntimeException.class, () -> {
Schema nonRecordSchema = SchemaBuilder.array().items().booleanType();
new SpecificCompiler().calcAllArgConstructorParameterUnits(nonRecordSchema);
});
}
@Test
void privateFieldVisibility() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setFieldVisibility(SpecificCompiler.FieldVisibility.PRIVATE);
assertFalse(compiler.publicFields());
assertTrue(compiler.privateFields());
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line = null;
while ((line = reader.readLine()) != null) {
// No line, once trimmed, should start with a public field declaration
// or with a deprecated public field declaration
line = line.trim();
assertFalse(line.startsWith("public int value"), "Line started with a public field declaration: " + line);
assertFalse(line.startsWith("@Deprecated public int value"),
"Line started with a deprecated field declaration: " + line);
}
}
}
@Test
void settersCreatedByDefault() throws IOException {
SpecificCompiler compiler = createCompiler();
assertTrue(compiler.isCreateSetters());
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
int foundSetters = 0;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
// We should find the setter in the main class
line = line.trim();
if (line.startsWith("public void setValue(")) {
foundSetters++;
}
}
}
assertEquals(1, foundSetters, "Found the wrong number of setters");
}
@Test
void settersNotCreatedWhenOptionTurnedOff() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setCreateSetters(false);
assertFalse(compiler.isCreateSetters());
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
// No setter should be found
line = line.trim();
assertFalse(line.startsWith("public void setValue("), "No line should include the setter: " + line);
}
}
}
@Test
void settingOutputCharacterEncoding() throws Exception {
SpecificCompiler compiler = createCompiler();
// Generated file in default encoding
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
byte[] fileInDefaultEncoding = new byte[(int) this.outputFile.length()];
FileInputStream is = new FileInputStream(this.outputFile);
is.read(fileInDefaultEncoding);
is.close(); // close input stream otherwise delete might fail
if (!this.outputFile.delete()) {
throw new IllegalStateException("unable to delete " + this.outputFile); // delete otherwise compiler might not
// overwrite because src timestamp hasn't
// changed.
}
// Generate file in another encoding (make sure it has different number of bytes
// per character)
String differentEncoding = Charset.defaultCharset().equals(Charset.forName("UTF-16")) ? "UTF-32" : "UTF-16";
compiler.setOutputCharacterEncoding(differentEncoding);
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
byte[] fileInDifferentEncoding = new byte[(int) this.outputFile.length()];
is = new FileInputStream(this.outputFile);
is.read(fileInDifferentEncoding);
is.close();
// Compare as bytes
assertThat("Generated file should contain different bytes after setting non-default encoding",
fileInDefaultEncoding, not(equalTo(fileInDifferentEncoding)));
// Compare as strings
assertThat("Generated files should contain the same characters in the proper encodings",
new String(fileInDefaultEncoding), equalTo(new String(fileInDifferentEncoding, differentEncoding)));
}
@Test
void javaTypeWithDecimalLogicalTypeEnabled() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
Schema localTimestampSchema = LogicalTypes.localTimestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
Schema decimalSchema = LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES));
Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
// Date/time types should always use upper level java classes
// Decimal type target class depends on configuration
// UUID should always be CharSequence since we haven't added its
// support in SpecificRecord
assertEquals("java.time.LocalDate", compiler.javaType(dateSchema), "Should use LocalDate for date type");
assertEquals("java.time.LocalTime", compiler.javaType(timeSchema), "Should use LocalTime for time-millis type");
assertEquals("java.time.Instant", compiler.javaType(timestampSchema),
"Should use DateTime for timestamp-millis type");
assertEquals("java.time.LocalDateTime", compiler.javaType(localTimestampSchema),
"Should use LocalDateTime for local-timestamp-millis type");
assertEquals("java.math.BigDecimal", compiler.javaType(decimalSchema), "Should use Java BigDecimal type");
assertEquals("new org.apache.avro.Conversions.UUIDConversion()", compiler.conversionInstance(uuidSchema),
"Should use org.apache.avro.Conversions.UUIDConversion() type");
}
@Test
void javaTypeWithDecimalLogicalTypeDisabled() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(false);
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
Schema decimalSchema = LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES));
Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
// Date/time types should always use upper level java classes
// Decimal type target class depends on configuration
// UUID should always be CharSequence since we haven't added its
// support in SpecificRecord
assertEquals("java.time.LocalDate", compiler.javaType(dateSchema), "Should use LocalDate for date type");
assertEquals("java.time.LocalTime", compiler.javaType(timeSchema), "Should use LocalTime for time-millis type");
assertEquals("java.time.Instant", compiler.javaType(timestampSchema),
"Should use DateTime for timestamp-millis type");
assertEquals("java.nio.ByteBuffer", compiler.javaType(decimalSchema), "Should use ByteBuffer type");
assertEquals("new org.apache.avro.Conversions.UUIDConversion()", compiler.conversionInstance(uuidSchema),
"Should use org.apache.avro.Conversions.UUIDConversion() type");
}
@Test
void javaTypeWithDateTimeTypes() throws Exception {
SpecificCompiler compiler = createCompiler();
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timeMicrosSchema = LogicalTypes.timeMicros().addToSchema(Schema.create(Schema.Type.LONG));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
Schema timestampMicrosSchema = LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG));
// Date/time types should always use upper level java classes
assertEquals("java.time.LocalDate", compiler.javaType(dateSchema), "Should use java.time.LocalDate for date type");
assertEquals("java.time.LocalTime", compiler.javaType(timeSchema),
"Should use java.time.LocalTime for time-millis type");
assertEquals("java.time.Instant", compiler.javaType(timestampSchema),
"Should use java.time.Instant for timestamp-millis type");
assertEquals("java.time.LocalTime", compiler.javaType(timeMicrosSchema),
"Should use java.time.LocalTime for time-micros type");
assertEquals("java.time.Instant", compiler.javaType(timestampMicrosSchema),
"Should use java.time.Instant for timestamp-micros type");
}
@Test
void javaUnbox() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(false);
Schema intSchema = Schema.create(Schema.Type.INT);
Schema longSchema = Schema.create(Schema.Type.LONG);
Schema floatSchema = Schema.create(Schema.Type.FLOAT);
Schema doubleSchema = Schema.create(Schema.Type.DOUBLE);
Schema boolSchema = Schema.create(Schema.Type.BOOLEAN);
assertEquals("int", compiler.javaUnbox(intSchema, false), "Should use int for Type.INT");
assertEquals("long", compiler.javaUnbox(longSchema, false), "Should use long for Type.LONG");
assertEquals("float", compiler.javaUnbox(floatSchema, false), "Should use float for Type.FLOAT");
assertEquals("double", compiler.javaUnbox(doubleSchema, false), "Should use double for Type.DOUBLE");
assertEquals("boolean", compiler.javaUnbox(boolSchema, false), "Should use boolean for Type.BOOLEAN");
// see AVRO-2569
Schema nullSchema = Schema.create(Schema.Type.NULL);
assertEquals("void", compiler.javaUnbox(nullSchema, true), "Should use void for Type.NULL");
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
// Date/time types should always use upper level java classes, even though
// their underlying representations are primitive types
assertEquals("java.time.LocalDate", compiler.javaUnbox(dateSchema, false), "Should use LocalDate for date type");
assertEquals("java.time.LocalTime", compiler.javaUnbox(timeSchema, false),
"Should use LocalTime for time-millis type");
assertEquals("java.time.Instant", compiler.javaUnbox(timestampSchema, false),
"Should use DateTime for timestamp-millis type");
}
@Test
void javaUnboxDateTime() throws Exception {
SpecificCompiler compiler = createCompiler();
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
// Date/time types should always use upper level java classes, even though
// their underlying representations are primitive types
assertEquals("java.time.LocalDate", compiler.javaUnbox(dateSchema, false),
"Should use java.time.LocalDate for date type");
assertEquals("java.time.LocalTime", compiler.javaUnbox(timeSchema, false),
"Should use java.time.LocalTime for time-millis type");
assertEquals("java.time.Instant", compiler.javaUnbox(timestampSchema, false),
"Should use java.time.Instant for timestamp-millis type");
}
@Test
void nullableLogicalTypesJavaUnboxDecimalTypesEnabled() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
// Nullable types should return boxed types instead of primitive types
Schema nullableDecimalSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL),
LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES)));
Schema nullableDecimalSchema2 = Schema.createUnion(
LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES)), Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableDecimalSchema1, false), "java.math.BigDecimal", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableDecimalSchema2, false), "java.math.BigDecimal", "Should return boxed type");
}
@Test
void nullableLogicalTypesJavaUnboxDecimalTypesDisabled() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(false);
// Since logical decimal types are disabled, a ByteBuffer is expected.
Schema nullableDecimalSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL),
LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES)));
Schema nullableDecimalSchema2 = Schema.createUnion(
LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES)), Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableDecimalSchema1, false), "java.nio.ByteBuffer", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableDecimalSchema2, false), "java.nio.ByteBuffer", "Should return boxed type");
}
@Test
void nullableTypesJavaUnbox() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(false);
// Nullable types should return boxed types instead of primitive types
Schema nullableIntSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.INT));
Schema nullableIntSchema2 = Schema.createUnion(Schema.create(Schema.Type.INT), Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableIntSchema1, false), "java.lang.Integer", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableIntSchema2, false), "java.lang.Integer", "Should return boxed type");
Schema nullableLongSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.LONG));
Schema nullableLongSchema2 = Schema.createUnion(Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableLongSchema1, false), "java.lang.Long", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableLongSchema2, false), "java.lang.Long", "Should return boxed type");
Schema nullableFloatSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.FLOAT));
Schema nullableFloatSchema2 = Schema.createUnion(Schema.create(Schema.Type.FLOAT), Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableFloatSchema1, false), "java.lang.Float", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableFloatSchema2, false), "java.lang.Float", "Should return boxed type");
Schema nullableDoubleSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL),
Schema.create(Schema.Type.DOUBLE));
Schema nullableDoubleSchema2 = Schema.createUnion(Schema.create(Schema.Type.DOUBLE),
Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableDoubleSchema1, false), "java.lang.Double", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableDoubleSchema2, false), "java.lang.Double", "Should return boxed type");
Schema nullableBooleanSchema1 = Schema.createUnion(Schema.create(Schema.Type.NULL),
Schema.create(Schema.Type.BOOLEAN));
Schema nullableBooleanSchema2 = Schema.createUnion(Schema.create(Schema.Type.BOOLEAN),
Schema.create(Schema.Type.NULL));
assertEquals(compiler.javaUnbox(nullableBooleanSchema1, false), "java.lang.Boolean", "Should return boxed type");
assertEquals(compiler.javaUnbox(nullableBooleanSchema2, false), "java.lang.Boolean", "Should return boxed type");
}
@Test
void getUsedCustomLogicalTypeFactories() throws Exception {
LogicalTypes.register("string-custom", new StringCustomLogicalTypeFactory());
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
final Schema schema = new Schema.Parser().parse("{\"type\":\"record\"," + "\"name\":\"NestedLogicalTypesRecord\","
+ "\"namespace\":\"org.apache.avro.codegentest.testdata\","
+ "\"doc\":\"Test nested types with logical types in generated Java classes\"," + "\"fields\":["
+ "{\"name\":\"nestedRecord\",\"type\":" + "{\"type\":\"record\",\"name\":\"NestedRecord\",\"fields\":"
+ "[{\"name\":\"nullableDateField\"," + "\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}},"
+ "{\"name\":\"myLogical\",\"type\":{\"type\":\"string\",\"logicalType\":\"string-custom\"}}]}");
final Map<String, String> usedCustomLogicalTypeFactories = compiler.getUsedCustomLogicalTypeFactories(schema);
assertEquals(1, usedCustomLogicalTypeFactories.size());
final Map.Entry<String, String> entry = usedCustomLogicalTypeFactories.entrySet().iterator().next();
assertEquals("string-custom", entry.getKey());
assertEquals("org.apache.avro.compiler.specific.TestSpecificCompiler.StringCustomLogicalTypeFactory",
entry.getValue());
}
@Test
void emptyGetUsedCustomLogicalTypeFactories() throws Exception {
LogicalTypes.register("string-custom", new StringCustomLogicalTypeFactory());
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
final Schema schema = new Schema.Parser().parse("{\"type\":\"record\"," + "\"name\":\"NestedLogicalTypesRecord\","
+ "\"namespace\":\"org.apache.avro.codegentest.testdata\","
+ "\"doc\":\"Test nested types with logical types in generated Java classes\"," + "\"fields\":["
+ "{\"name\":\"nestedRecord\"," + "\"type\":{\"type\":\"record\",\"name\":\"NestedRecord\",\"fields\":"
+ "[{\"name\":\"nullableDateField\","
+ "\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}}]}");
final Map<String, String> usedCustomLogicalTypeFactories = compiler.getUsedCustomLogicalTypeFactories(schema);
assertEquals(0, usedCustomLogicalTypeFactories.size());
}
@Test
void getUsedConversionClassesForNullableLogicalTypes() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
Schema nullableDecimal1 = Schema.createUnion(Schema.create(Schema.Type.NULL),
LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES)));
Schema schemaWithNullableDecimal1 = Schema.createRecord("WithNullableDecimal", "", "", false,
Collections.singletonList(new Schema.Field("decimal", nullableDecimal1, "", null)));
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schemaWithNullableDecimal1);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.Conversions.DecimalConversion", usedConversionClasses.iterator().next());
}
@Test
void getUsedConversionClassesForNullableTimestamps() throws Exception {
SpecificCompiler compiler = createCompiler();
// timestamp-millis and timestamp-micros used to cause collisions when both were
// present or added as converters (AVRO-2481).
final Schema tsMillis = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
final Schema tsMicros = LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG));
final Collection<String> conversions = compiler.getUsedConversionClasses(SchemaBuilder.record("WithTimestamps")
.fields().name("tsMillis").type(tsMillis).noDefault().name("tsMillisOpt").type().unionOf().nullType().and()
.type(tsMillis).endUnion().noDefault().name("tsMicros").type(tsMicros).noDefault().name("tsMicrosOpt").type()
.unionOf().nullType().and().type(tsMicros).endUnion().noDefault().endRecord());
assertEquals(2, conversions.size());
assertThat(conversions, hasItem("org.apache.avro.data.TimeConversions.TimestampMillisConversion"));
assertThat(conversions, hasItem("org.apache.avro.data.TimeConversions.TimestampMicrosConversion"));
}
@Test
void getUsedConversionClassesForNullableLogicalTypesInNestedRecord() throws Exception {
SpecificCompiler compiler = createCompiler();
final Schema schema = new Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"NestedLogicalTypesRecord\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"nestedRecord\",\"type\":{\"type\":\"record\",\"name\":\"NestedRecord\",\"fields\":[{\"name\":\"nullableDateField\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}}]}");
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next());
}
@Test
void getUsedConversionClassesForNullableLogicalTypesInArray() throws Exception {
SpecificCompiler compiler = createCompiler();
final Schema schema = new Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"NullableLogicalTypesArray\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"arrayOfLogicalType\",\"type\":{\"type\":\"array\",\"items\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}}]}");
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next());
}
@Test
void getUsedConversionClassesForNullableLogicalTypesInArrayOfRecords() throws Exception {
SpecificCompiler compiler = createCompiler();
final Schema schema = new Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"NestedLogicalTypesArray\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"arrayOfRecords\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"RecordInArray\",\"fields\":[{\"name\":\"nullableDateField\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}}}]}");
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next());
}
@Test
void getUsedConversionClassesForNullableLogicalTypesInUnionOfRecords() throws Exception {
SpecificCompiler compiler = createCompiler();
final Schema schema = new Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"NestedLogicalTypesUnion\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"unionOfRecords\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"RecordInUnion\",\"fields\":[{\"name\":\"nullableDateField\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]}]}]}");
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next());
}
@Test
void getUsedConversionClassesForNullableLogicalTypesInMapOfRecords() throws Exception {
SpecificCompiler compiler = createCompiler();
final Schema schema = new Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"NestedLogicalTypesMap\",\"namespace\":\"org.apache.avro.codegentest.testdata\",\"doc\":\"Test nested types with logical types in generated Java classes\",\"fields\":[{\"name\":\"mapOfRecords\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"record\",\"name\":\"RecordInMap\",\"fields\":[{\"name\":\"nullableDateField\",\"type\":[\"null\",{\"type\":\"int\",\"logicalType\":\"date\"}]}]},\"avro.java.string\":\"String\"}}]}");
final Collection<String> usedConversionClasses = compiler.getUsedConversionClasses(schema);
assertEquals(1, usedConversionClasses.size());
assertEquals("org.apache.avro.data.TimeConversions.DateConversion", usedConversionClasses.iterator().next());
}
/**
* Checks that identifiers that may cause problems in Java code will compile
* correctly when used in a generated specific record.
*
* @param schema A schema with an identifier __test__
* that will be replaced.
* @param throwsTypeExceptionOnPrimitive If true, using a reserved word that is
* also an Avro primitive type name must
* throw an exception instead of
* generating code.
* @param dstDirPrefix Where to generate the java code before
* compiling.
*/
public void testManglingReservedIdentifiers(String schema, boolean throwsTypeExceptionOnPrimitive,
String dstDirPrefix) throws IOException {
Set<String> reservedIdentifiers = new HashSet<>();
reservedIdentifiers.addAll(SpecificData.RESERVED_WORDS);
reservedIdentifiers.addAll(SpecificCompiler.TYPE_IDENTIFIER_RESERVED_WORDS);
reservedIdentifiers.addAll(SpecificCompiler.ACCESSOR_MUTATOR_RESERVED_WORDS);
reservedIdentifiers.addAll(SpecificCompiler.ERROR_RESERVED_WORDS);
for (String reserved : reservedIdentifiers) {
try {
Schema s = new Schema.Parser().parse(schema.replace("__test__", reserved));
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, dstDirPrefix + "_" + reserved),
new SpecificCompiler(s).compile());
} catch (AvroTypeException e) {
if (!(throwsTypeExceptionOnPrimitive && e.getMessage().contains("Schemas may not be named after primitives")))
throw e;
}
}
}
@Test
void mangleRecordName() throws Exception {
testManglingReservedIdentifiers(
SchemaBuilder.record("__test__").fields().requiredInt("field").endRecord().toString(), true,
"testMangleRecordName");
}
@Test
void mangleRecordNamespace() throws Exception {
testManglingReservedIdentifiers(
SchemaBuilder.record("__test__.Record").fields().requiredInt("field").endRecord().toString(), false,
"testMangleRecordNamespace");
}
@Test
void mangleField() throws Exception {
testManglingReservedIdentifiers(
SchemaBuilder.record("Record").fields().requiredInt("__test__").endRecord().toString(), false,
"testMangleField");
}
@Test
void mangleEnumName() throws Exception {
testManglingReservedIdentifiers(SchemaBuilder.enumeration("__test__").symbols("reserved").toString(), true,
"testMangleEnumName");
}
@Test
void mangleEnumSymbol() throws Exception {
testManglingReservedIdentifiers(SchemaBuilder.enumeration("Enum").symbols("__test__").toString(), false,
"testMangleEnumSymbol");
}
@Test
void mangleFixedName() throws Exception {
testManglingReservedIdentifiers(SchemaBuilder.fixed("__test__").size(2).toString(), true, "testMangleFixedName");
}
@Test
void logicalTypesWithMultipleFields() throws Exception {
Schema logicalTypesWithMultipleFields = new Schema.Parser()
.parse(new File("src/test/resources/logical_types_with_multiple_fields.avsc"));
assertCompilesWithJavaCompiler(new File(OUTPUT_DIR, "testLogicalTypesWithMultipleFields"),
new SpecificCompiler(logicalTypesWithMultipleFields).compile(), true);
}
@Test
void unionAndFixedFields() throws Exception {
Schema unionTypesWithMultipleFields = new Schema.Parser()
.parse(new File("src/test/resources/union_and_fixed_fields.avsc"));
assertCompilesWithJavaCompiler(new File(this.outputFile, "testUnionAndFixedFields"),
new SpecificCompiler(unionTypesWithMultipleFields).compile());
}
@Test
void logicalTypesWithMultipleFieldsDateTime() throws Exception {
Schema logicalTypesWithMultipleFields = new Schema.Parser()
.parse(new File("src/test/resources/logical_types_with_multiple_fields.avsc"));
assertCompilesWithJavaCompiler(new File(this.outputFile, "testLogicalTypesWithMultipleFieldsDateTime"),
new SpecificCompiler(logicalTypesWithMultipleFields).compile());
}
@Test
void conversionInstanceWithDecimalLogicalTypeDisabled() throws Exception {
final SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(false);
final Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
final Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
final Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
final Schema decimalSchema = LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES));
final Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
assertEquals("new org.apache.avro.data.TimeConversions.DateConversion()", compiler.conversionInstance(dateSchema),
"Should use date conversion for date type");
assertEquals("new org.apache.avro.data.TimeConversions.TimeMillisConversion()",
compiler.conversionInstance(timeSchema), "Should use time conversion for time type");
assertEquals("new org.apache.avro.data.TimeConversions.TimestampMillisConversion()",
compiler.conversionInstance(timestampSchema), "Should use timestamp conversion for date type");
assertEquals("null", compiler.conversionInstance(decimalSchema), "Should use null for decimal if the flag is off");
assertEquals("new org.apache.avro.Conversions.UUIDConversion()", compiler.conversionInstance(uuidSchema),
"Should use org.apache.avro.Conversions.UUIDConversion() for uuid if the flag is off");
}
@Test
void conversionInstanceWithDecimalLogicalTypeEnabled() throws Exception {
SpecificCompiler compiler = createCompiler();
compiler.setEnableDecimalLogicalType(true);
Schema dateSchema = LogicalTypes.date().addToSchema(Schema.create(Schema.Type.INT));
Schema timeSchema = LogicalTypes.timeMillis().addToSchema(Schema.create(Schema.Type.INT));
Schema timestampSchema = LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
Schema decimalSchema = LogicalTypes.decimal(9, 2).addToSchema(Schema.create(Schema.Type.BYTES));
Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
assertEquals("new org.apache.avro.data.TimeConversions.DateConversion()", compiler.conversionInstance(dateSchema),
"Should use date conversion for date type");
assertEquals("new org.apache.avro.data.TimeConversions.TimeMillisConversion()",
compiler.conversionInstance(timeSchema), "Should use time conversion for time type");
assertEquals("new org.apache.avro.data.TimeConversions.TimestampMillisConversion()",
compiler.conversionInstance(timestampSchema), "Should use timestamp conversion for date type");
assertEquals("new org.apache.avro.Conversions.DecimalConversion()", compiler.conversionInstance(decimalSchema),
"Should use null for decimal if the flag is off");
assertEquals("new org.apache.avro.Conversions.UUIDConversion()", compiler.conversionInstance(uuidSchema),
"Should use org.apache.avro.Conversions.UUIDConversion() for uuid if the flag is off");
}
@Test
void pojoWithOptionalTurnedOffByDefault() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
assertFalse(line.contains("Optional"));
}
}
}
@Test
void pojoWithOptionalCreatedWhenOptionTurnedOn() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setGettersReturnOptional(true);
// compiler.setCreateOptionalGetters(true);
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
int optionalFound = 0;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.contains("Optional")) {
optionalFound++;
}
}
}
assertEquals(9, optionalFound);
}
@Test
void pojoWithOptionalCreateForNullableFieldsWhenOptionTurnedOn() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setGettersReturnOptional(true);
compiler.setOptionalGettersForNullableFieldsOnly(true);
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
int optionalFound = 0;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.contains("Optional")) {
optionalFound++;
}
}
}
assertEquals(5, optionalFound);
}
@Test
void pojoWithOptionalCreatedWhenOptionalForEverythingTurnedOn() throws IOException {
SpecificCompiler compiler = createCompiler();
// compiler.setGettersReturnOptional(true);
compiler.setCreateOptionalGetters(true);
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
int optionalFound = 0;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.contains("Optional")) {
optionalFound++;
}
}
}
assertEquals(17, optionalFound);
}
@Test
void pojoWithOptionalOnlyWhenNullableCreatedTurnedOnAndGettersReturnOptionalTurnedOff() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setOptionalGettersForNullableFieldsOnly(true);
compiler.compileToDestination(this.src, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
// no optionals since gettersReturnOptionalOnlyForNullable is false
assertFalse(line.contains("Optional"));
}
}
}
@Test
void additionalToolsAreInjectedIntoTemplate() throws Exception {
SpecificCompiler compiler = createCompiler();
List<Object> customTools = new ArrayList<>();
customTools.add(new String());
compiler.setAdditionalVelocityTools(customTools);
compiler.setTemplateDir("src/test/resources/templates_with_custom_tools/");
compiler.compileToDestination(this.src, this.OUTPUT_DIR);
assertTrue(this.outputFile.exists());
int itWorksFound = 0;
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.contains("It works!")) {
itWorksFound++;
}
}
}
assertEquals(1, itWorksFound);
}
@Test
void pojoWithUUID() throws IOException {
SpecificCompiler compiler = createCompiler();
compiler.setOptionalGettersForNullableFieldsOnly(true);
File avsc = new File("src/main/resources/logical-uuid.avsc");
compiler.compileToDestination(avsc, OUTPUT_DIR);
assertTrue(this.outputFile.exists());
try (BufferedReader reader = new BufferedReader(new FileReader(this.outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
line = line.trim();
if (line.contains("guid")) {
assertTrue(line.contains("java.util.UUID"));
}
}
}
}
public static class StringCustomLogicalTypeFactory implements LogicalTypes.LogicalTypeFactory {
@Override
public LogicalType fromSchema(Schema schema) {
return new LogicalType("string-custom");
}
}
@Test
void fieldWithUnderscore_avro3826() {
String jsonSchema = "{\n" + " \"name\": \"Value\",\n" + " \"type\": \"record\",\n" + " \"fields\": [\n"
+ " { \"name\": \"__deleted\", \"type\": \"string\"\n" + " }\n" + " ]\n" + "}";
Collection<SpecificCompiler.OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(jsonSchema))
.compile();
assertEquals(1, outputs.size());
SpecificCompiler.OutputFile outputFile = outputs.iterator().next();
assertTrue(outputFile.contents.contains("getDeleted()"));
assertFalse(outputFile.contents.contains("$0"));
assertFalse(outputFile.contents.contains("$1"));
String jsonSchema2 = "{\n" + " \"name\": \"Value\", \"type\": \"record\",\n" + " \"fields\": [\n"
+ " { \"name\": \"__deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"_deleted\", \"type\": \"string\"}\n" + " ]\n" + "}";
Collection<SpecificCompiler.OutputFile> outputs2 = new SpecificCompiler(new Schema.Parser().parse(jsonSchema2))
.compile();
assertEquals(1, outputs2.size());
SpecificCompiler.OutputFile outputFile2 = outputs2.iterator().next();
assertTrue(outputFile2.contents.contains("getDeleted()"));
assertTrue(outputFile2.contents.contains("getDeleted$0()"));
assertFalse(outputFile.contents.contains("$1"));
String jsonSchema3 = "{\n" + " \"name\": \"Value\", \"type\": \"record\",\n" + " \"fields\": [\n"
+ " { \"name\": \"__deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"_deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"deleted\", \"type\": \"string\"}\n" + " ]\n" + "}";
Collection<SpecificCompiler.OutputFile> outputs3 = new SpecificCompiler(new Schema.Parser().parse(jsonSchema3))
.compile();
assertEquals(1, outputs3.size());
SpecificCompiler.OutputFile outputFile3 = outputs3.iterator().next();
assertTrue(outputFile3.contents.contains("getDeleted()"));
assertTrue(outputFile3.contents.contains("getDeleted$0()"));
assertTrue(outputFile3.contents.contains("getDeleted$1()"));
assertFalse(outputFile3.contents.contains("$2"));
String jsonSchema4 = "{\n" + " \"name\": \"Value\", \"type\": \"record\",\n" + " \"fields\": [\n"
+ " { \"name\": \"__deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"_deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"deleted\", \"type\": \"string\"},\n"
+ " { \"name\": \"Deleted\", \"type\": \"string\"}\n" + " ]\n" + "}";
Collection<SpecificCompiler.OutputFile> outputs4 = new SpecificCompiler(new Schema.Parser().parse(jsonSchema4))
.compile();
assertEquals(1, outputs4.size());
SpecificCompiler.OutputFile outputFile4 = outputs4.iterator().next();
assertTrue(outputFile4.contents.contains("getDeleted()"));
assertTrue(outputFile4.contents.contains("getDeleted$0()"));
assertTrue(outputFile4.contents.contains("getDeleted$1()"));
assertTrue(outputFile4.contents.contains("getDeleted$2()"));
assertFalse(outputFile4.contents.contains("$3"));
}
}
| 7,564 |
0 | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/test/java/org/apache/avro/compiler/schema/TestSchemas.java | /*
* Copyright 2017 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.schema;
import org.apache.avro.Schema;
import org.apache.avro.SchemaCompatibility;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestSchemas {
private static final String SCHEMA = "{\"type\":\"record\",\"name\":\"SampleNode\",\"doc\":\"caca\","
+ "\"namespace\":\"org.spf4j.ssdump2.avro\",\n" + " \"fields\":[\n"
+ " {\"name\":\"count\",\"type\":\"int\",\"default\":0,\"doc\":\"caca\"},\n"
+ " {\"name\":\"kind1\",\"type\":{\"type\":\"enum\", \"name\": \"Kind1\", \"symbols\": [\"A1\", \"B1\"]}},\n"
+ " {\"name\":\"kind2\",\"type\":{\"type\":\"enum\", \"name\": \"Kind2\", \"symbols\": [\"A2\", \"B2\"], \"doc\": \"doc\"}},\n"
+ " {\"name\":\"pat\",\"type\":{\"type\":\"fixed\", \"name\": \"FixedPattern\", \"size\": 10}},\n"
+ " {\"name\":\"uni\",\"type\":[\"int\", \"double\"]},\n"
+ " {\"name\":\"mp\",\"type\":{\"type\":\"map\", \"values\": \"int\"}},\n"
+ " {\"name\":\"subNodes\",\"type\":\n" + " {\"type\":\"array\",\"items\":{\n"
+ " \"type\":\"record\",\"name\":\"SamplePair\",\n" + " \"fields\":[\n"
+ " {\"name\":\"method\",\"type\":\n"
+ " {\"type\":\"record\",\"name\":\"Method\",\n" + " \"fields\":[\n"
+ " {\"name\":\"declaringClass\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},\n"
+ " {\"name\":\"methodName\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}\n"
+ " ]}},\n" + " {\"name\":\"node\",\"type\":\"SampleNode\"}]}}}" + "]}";
@Test
void textCloning() {
Schema recSchema = new Schema.Parser().parse(SCHEMA);
CloningVisitor cv = new CloningVisitor(recSchema);
Schema trimmed = Schemas.visit(recSchema, cv);
assertNull(trimmed.getDoc());
assertNotNull(recSchema.getDoc());
SchemaCompatibility.SchemaCompatibilityType compat = SchemaCompatibility
.checkReaderWriterCompatibility(trimmed, recSchema).getType();
assertEquals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE, compat);
compat = SchemaCompatibility.checkReaderWriterCompatibility(recSchema, trimmed).getType();
assertEquals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE, compat);
assertNotNull(cv.toString());
}
@Test
void textCloningCopyDocs() {
Schema recSchema = new Schema.Parser().parse(SCHEMA);
Schema trimmed = Schemas.visit(recSchema, new CloningVisitor(new CloningVisitor.PropertyCopier() {
@Override
public void copy(final Schema first, final Schema second) {
Schemas.copyLogicalTypes(first, second);
Schemas.copyAliases(first, second);
}
@Override
public void copy(final Schema.Field first, final Schema.Field second) {
Schemas.copyAliases(first, second);
}
}, true, recSchema));
assertEquals("caca", trimmed.getDoc());
assertNotNull(recSchema.getDoc());
SchemaCompatibility.SchemaCompatibilityType compat = SchemaCompatibility
.checkReaderWriterCompatibility(trimmed, recSchema).getType();
assertEquals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE, compat);
compat = SchemaCompatibility.checkReaderWriterCompatibility(recSchema, trimmed).getType();
assertEquals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE, compat);
}
@Test
void cloningError1() {
assertThrows(IllegalStateException.class, () -> {
// Visit Terminal with union
Schema recordSchema = new Schema.Parser().parse(
"{\"type\": \"record\", \"name\": \"R\", \"fields\":[{\"name\": \"f1\", \"type\": [\"int\", \"long\"]}]}");
new CloningVisitor(recordSchema).visitTerminal(recordSchema.getField("f1").schema());
});
}
@Test
void cloningError2() {
assertThrows(IllegalStateException.class, () -> {
// After visit Non-terminal with int
Schema recordSchema = new Schema.Parser()
.parse("{\"type\": \"record\", \"name\": \"R\", \"fields\":[{\"name\": \"f1\", \"type\": \"int\"}]}");
new CloningVisitor(recordSchema).afterVisitNonTerminal(recordSchema.getField("f1").schema());
});
}
@Test
void hasGeneratedJavaClass() {
assertTrue(Schemas
.hasGeneratedJavaClass(new Schema.Parser().parse("{\"type\": \"fixed\", \"name\": \"N\", \"size\": 10}")));
assertFalse(Schemas.hasGeneratedJavaClass(new Schema.Parser().parse("{\"type\": \"int\"}")));
}
@Test
void getJavaClassName() {
assertEquals("N",
Schemas.getJavaClassName(new Schema.Parser().parse("{\"type\": \"fixed\", \"name\": \"N\", \"size\": 10}")));
assertEquals("N", Schemas.getJavaClassName(
new Schema.Parser().parse("{\"type\": \"fixed\", \"name\": \"N\", \"size\": 10, \"namespace\": \"\"}")));
assertEquals("com.example.N", Schemas.getJavaClassName(new Schema.Parser()
.parse("{\"type\": \"fixed\", \"name\": \"N\", \"size\": 10, \"namespace\": \"com.example\"}")));
}
private static class TestVisitor implements SchemaVisitor<String> {
StringBuilder sb = new StringBuilder();
@Override
public SchemaVisitorAction visitTerminal(Schema terminal) {
sb.append(terminal);
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction visitNonTerminal(Schema nonTerminal) {
String n = nonTerminal.getName();
sb.append(n).append('.');
if (n.startsWith("t")) {
return SchemaVisitorAction.TERMINATE;
} else if (n.startsWith("ss")) {
return SchemaVisitorAction.SKIP_SIBLINGS;
} else if (n.startsWith("st")) {
return SchemaVisitorAction.SKIP_SUBTREE;
} else {
return SchemaVisitorAction.CONTINUE;
}
}
@Override
public SchemaVisitorAction afterVisitNonTerminal(Schema nonTerminal) {
sb.append("!");
String n = nonTerminal.getName();
if (n.startsWith("ct")) {
return SchemaVisitorAction.TERMINATE;
} else if (n.startsWith("css")) {
return SchemaVisitorAction.SKIP_SIBLINGS;
} else if (n.startsWith("cst")) {
return SchemaVisitorAction.SKIP_SUBTREE;
} else {
return SchemaVisitorAction.CONTINUE;
}
}
@Override
public String get() {
return sb.toString();
}
}
@Test
void visit1() {
String s1 = "{\"type\": \"record\", \"name\": \"t1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": \"int\"}"
+ "]}";
assertEquals("t1.", Schemas.visit(new Schema.Parser().parse(s1), new TestVisitor()));
}
@Test
void visit2() {
String s2 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": \"int\"}"
+ "]}";
assertEquals("c1.\"int\"!", Schemas.visit(new Schema.Parser().parse(s2), new TestVisitor()));
}
@Test
void visit3() {
String s3 = "{\"type\": \"record\", \"name\": \"ss1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": \"int\"}"
+ "]}";
assertEquals("ss1.", Schemas.visit(new Schema.Parser().parse(s3), new TestVisitor()));
}
@Test
void visit4() {
String s4 = "{\"type\": \"record\", \"name\": \"st1\", \"fields\": [" + "{\"name\": \"f1\", \"type\": \"int\"}"
+ "]}";
assertEquals("st1.!", Schemas.visit(new Schema.Parser().parse(s4), new TestVisitor()));
}
@Test
void visit5() {
String s5 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"c2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.c2.\"int\"!\"long\"!", Schemas.visit(new Schema.Parser().parse(s5), new TestVisitor()));
}
@Test
void visit6() {
String s6 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ss2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.ss2.!", Schemas.visit(new Schema.Parser().parse(s6), new TestVisitor()));
}
@Test
void visit7() {
String s7 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"css2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.css2.\"int\"!!", Schemas.visit(new Schema.Parser().parse(s7), new TestVisitor()));
}
@Test
void visit8() {
assertThrows(UnsupportedOperationException.class, () -> {
String s8 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"cst2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"int\"}" + "]}";
Schemas.visit(new Schema.Parser().parse(s8), new TestVisitor());
});
}
@Test
void visit9() {
String s9 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.ct2.\"int\"!", Schemas.visit(new Schema.Parser().parse(s9), new TestVisitor()));
}
@Test
void visit10() {
String s10 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"int\"}" + "]}";
assertThrows(UnsupportedOperationException.class, () -> {
Schemas.visit(new Schema.Parser().parse(s10), new TestVisitor() {
@Override
public SchemaVisitorAction visitTerminal(Schema terminal) {
return SchemaVisitorAction.SKIP_SUBTREE;
}
});
});
}
@Test
void visit11() {
String s11 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"c2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"},{\"name\": \"f12\", \"type\": \"double\"}" + "]}},"
+ "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.c2.\"int\".!\"long\".!", Schemas.visit(new Schema.Parser().parse(s11), new TestVisitor() {
public SchemaVisitorAction visitTerminal(Schema terminal) {
sb.append(terminal).append('.');
return SchemaVisitorAction.SKIP_SIBLINGS;
}
}));
}
@Test
void visit12() {
String s12 = "{\"type\": \"record\", \"name\": \"c1\", \"fields\": ["
+ "{\"name\": \"f1\", \"type\": {\"type\": \"record\", \"name\": \"ct2\", \"fields\": "
+ "[{\"name\": \"f11\", \"type\": \"int\"}]}}," + "{\"name\": \"f2\", \"type\": \"long\"}" + "]}";
assertEquals("c1.ct2.\"int\".", Schemas.visit(new Schema.Parser().parse(s12), new TestVisitor() {
public SchemaVisitorAction visitTerminal(Schema terminal) {
sb.append(terminal).append('.');
return SchemaVisitorAction.TERMINATE;
}
}));
}
@Test
void visit13() {
String s12 = "{\"type\": \"int\"}";
assertEquals("\"int\".", Schemas.visit(new Schema.Parser().parse(s12), new TestVisitor() {
public SchemaVisitorAction visitTerminal(Schema terminal) {
sb.append(terminal).append('.');
return SchemaVisitorAction.SKIP_SIBLINGS;
}
}));
}
}
| 7,565 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/ResolvingVisitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.function.Function;
import org.apache.avro.AvroTypeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.compiler.schema.SchemaVisitor;
import org.apache.avro.compiler.schema.SchemaVisitorAction;
import org.apache.avro.compiler.schema.Schemas;
/**
* this visitor will create a clone of the original Schema and will also resolve
* all unresolved schemas
*
* by default. what attributes are copied is customizable.
*/
public final class ResolvingVisitor implements SchemaVisitor<Schema> {
private final IdentityHashMap<Schema, Schema> replace;
private final Function<String, Schema> symbolTable;
private final Schema root;
public ResolvingVisitor(final Schema root, final IdentityHashMap<Schema, Schema> replace,
final Function<String, Schema> symbolTable) {
this.replace = replace;
this.symbolTable = symbolTable;
this.root = root;
}
@Override
public SchemaVisitorAction visitTerminal(final Schema terminal) {
Schema.Type type = terminal.getType();
Schema newSchema;
switch (type) {
case RECORD: // recursion.
case ARRAY:
case MAP:
case UNION:
if (!replace.containsKey(terminal)) {
throw new IllegalStateException("Schema " + terminal + " must be already processed");
}
return SchemaVisitorAction.CONTINUE;
case BOOLEAN:
case BYTES:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
case NULL:
case STRING:
newSchema = Schema.create(type);
break;
case ENUM:
newSchema = Schema.createEnum(terminal.getName(), terminal.getDoc(), terminal.getNamespace(),
terminal.getEnumSymbols(), terminal.getEnumDefault());
break;
case FIXED:
newSchema = Schema.createFixed(terminal.getName(), terminal.getDoc(), terminal.getNamespace(),
terminal.getFixedSize());
break;
default:
throw new IllegalStateException("Unsupported schema " + terminal);
}
copyAllProperties(terminal, newSchema);
replace.put(terminal, newSchema);
return SchemaVisitorAction.CONTINUE;
}
public static void copyAllProperties(final Schema first, final Schema second) {
Schemas.copyLogicalTypes(first, second);
Schemas.copyAliases(first, second);
Schemas.copyProperties(first, second);
}
public static void copyAllProperties(final Field first, final Field second) {
Schemas.copyAliases(first, second);
Schemas.copyProperties(first, second);
}
@Override
public SchemaVisitorAction visitNonTerminal(final Schema nt) {
Schema.Type type = nt.getType();
if (type == Schema.Type.RECORD) {
if (SchemaResolver.isUnresolvedSchema(nt)) {
// unresolved schema will get a replacement that we already encountered,
// or we will attempt to resolve.
final String unresolvedSchemaName = SchemaResolver.getUnresolvedSchemaName(nt);
Schema resSchema = symbolTable.apply(unresolvedSchemaName);
if (resSchema == null) {
throw new AvroTypeException("Unable to resolve " + unresolvedSchemaName);
}
Schema replacement = replace.get(resSchema);
if (replacement == null) {
replace.put(nt,
Schemas.visit(resSchema, new ResolvingVisitor(resSchema, new IdentityHashMap<>(), symbolTable)));
} else {
replace.put(nt, replacement);
}
} else {
// create a fieldless clone. Fields will be added in afterVisitNonTerminal.
Schema newSchema = Schema.createRecord(nt.getName(), nt.getDoc(), nt.getNamespace(), nt.isError());
copyAllProperties(nt, newSchema);
replace.put(nt, newSchema);
}
}
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction afterVisitNonTerminal(final Schema nt) {
Schema.Type type = nt.getType();
Schema newSchema;
switch (type) {
case RECORD:
if (!SchemaResolver.isUnresolvedSchema(nt)) {
newSchema = replace.get(nt);
List<Schema.Field> fields = nt.getFields();
List<Schema.Field> newFields = new ArrayList<>(fields.size());
for (Schema.Field field : fields) {
newFields.add(new Field(field, replace.get(field.schema())));
}
newSchema.setFields(newFields);
}
return SchemaVisitorAction.CONTINUE;
case UNION:
List<Schema> types = nt.getTypes();
List<Schema> newTypes = new ArrayList<>(types.size());
for (Schema sch : types) {
newTypes.add(replace.get(sch));
}
newSchema = Schema.createUnion(newTypes);
break;
case ARRAY:
newSchema = Schema.createArray(replace.get(nt.getElementType()));
break;
case MAP:
newSchema = Schema.createMap(replace.get(nt.getValueType()));
break;
default:
throw new IllegalStateException("Illegal type " + type + ", schema " + nt);
}
copyAllProperties(nt, newSchema);
replace.put(nt, newSchema);
return SchemaVisitorAction.CONTINUE;
}
@Override
public Schema get() {
return replace.get(root);
}
@Override
public String toString() {
return "ResolvingVisitor{" + "replace=" + replace + ", symbolTable=" + symbolTable + ", root=" + root + '}';
}
}
| 7,566 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/DocCommentHelper.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Utility class with {@code ThreadLocal} fields that allow the generated
* classes {@link Idl} and {@link IdlTokenManager} to exchange documentation
* comments without forcing explicit parsing of documentation comments.
*
* The reason this works is that all calls to this class happen within a call to
* the method {@link Idl#CompilationUnit()} (either directly or indirectly).
*/
public class DocCommentHelper {
/**
* Pattern to match the common whitespace indents in a multi-line String.
* Doesn't match a single-line String, fully matches any multi-line String.
*
* To use: match on a {@link String#trim() trimmed} String, and then replace all
* newlines followed by the group "indent" with a newline.
*/
private static final Pattern WS_INDENT = Pattern.compile("(?U).*\\R(?<indent>\\h*).*(?:\\R\\k<indent>.*)*");
/**
* Pattern to match the whitespace indents plus common stars (1 or 2) in a
* multi-line String. If a String fully matches, replace all occurrences of a
* newline followed by whitespace and then the group "stars" with a newline.
*
* Note: partial matches are invalid.
*/
private static final Pattern STAR_INDENT = Pattern.compile("(?U)(?<stars>\\*{1,2}).*(?:\\R\\h*\\k<stars>.*)*");
private static final ThreadLocal<DocComment> DOC = new ThreadLocal<>();
private static final ThreadLocal<List<String>> WARNINGS = ThreadLocal.withInitial(ArrayList::new);
/**
* Return all warnings that were encountered while parsing, once. Subsequent
* calls before parsing again will return an empty list.
*/
static List<String> getAndClearWarnings() {
List<String> warnings = WARNINGS.get();
WARNINGS.remove();
return warnings;
}
static void setDoc(Token token) {
DocComment newDocComment = new DocComment(token);
DocComment oldDocComment = DOC.get();
if (oldDocComment != null) {
WARNINGS.get()
.add(String.format(
"Found documentation comment at line %d, column %d. Ignoring previous one at line %d, column %d: \"%s\"\n"
+ "Did you mean to use a multiline comment ( /* ... */ ) instead?",
newDocComment.line, newDocComment.column, oldDocComment.line, oldDocComment.column, oldDocComment.text));
}
DOC.set(newDocComment);
}
/**
* Clear any documentation (and generate a warning if there was).
*
* This method should NOT be used after an optional component in a grammar
* (i.e., after a @code{[…]} or @code{…*} construct), because the optional
* grammar part may have already caused parsing a doc comment special token
* placed after the code block.
*/
static void clearDoc() {
DocComment oldDocComment = DOC.get();
if (oldDocComment != null) {
WARNINGS.get()
.add(String.format(
"Ignoring out-of-place documentation comment at line %d, column %d: \"%s\"\n"
+ "Did you mean to use a multiline comment ( /* ... */ ) instead?",
oldDocComment.line, oldDocComment.column, oldDocComment.text));
}
DOC.remove();
}
static String getDoc() {
DocComment docComment = DOC.get();
DOC.remove();
return docComment == null ? null : docComment.text;
}
/* Package private to facilitate testing */
static String stripIndents(String doc) {
Matcher starMatcher = STAR_INDENT.matcher(doc);
if (starMatcher.matches()) {
return doc.replaceAll("(?U)(?:^|(\\R)\\h*)\\Q" + starMatcher.group("stars") + "\\E\\h?", "$1");
}
Matcher whitespaceMatcher = WS_INDENT.matcher(doc);
if (whitespaceMatcher.matches()) {
return doc.replaceAll("(?U)(\\R)" + whitespaceMatcher.group("indent"), "$1");
}
return doc;
}
private static class DocComment {
private final String text;
private final int line;
private final int column;
DocComment(Token token) {
// The token is everything after the initial '/**', including all
// whitespace and the ending '*/'
int tokenLength = token.image.length();
this.text = stripIndents(token.image.substring(0, tokenLength - 2).trim());
this.line = token.beginLine;
// The preceding token was "/**", and the current token includes
// everything since (also all whitespace). Thus, we can safely subtract 3
// from the token column to get the start of the doc comment.
this.column = token.beginColumn - 3;
}
}
}
| 7,567 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/IsResolvedSchemaVisitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import org.apache.avro.Schema;
import org.apache.avro.compiler.schema.SchemaVisitor;
import org.apache.avro.compiler.schema.SchemaVisitorAction;
/**
* This visitor checks if the current schema is fully resolved.
*/
public final class IsResolvedSchemaVisitor implements SchemaVisitor<Boolean> {
boolean hasUnresolvedParts;
IsResolvedSchemaVisitor() {
hasUnresolvedParts = false;
}
@Override
public SchemaVisitorAction visitTerminal(Schema terminal) {
hasUnresolvedParts = SchemaResolver.isUnresolvedSchema(terminal);
return hasUnresolvedParts ? SchemaVisitorAction.TERMINATE : SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction visitNonTerminal(Schema nonTerminal) {
hasUnresolvedParts = SchemaResolver.isUnresolvedSchema(nonTerminal);
if (hasUnresolvedParts) {
return SchemaVisitorAction.TERMINATE;
}
if (nonTerminal.getType() == Schema.Type.RECORD && !nonTerminal.hasFields()) {
// We're still initializing the type...
return SchemaVisitorAction.SKIP_SUBTREE;
}
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction afterVisitNonTerminal(Schema nonTerminal) {
return SchemaVisitorAction.CONTINUE;
}
@Override
public Boolean get() {
return !hasUnresolvedParts;
}
}
| 7,568 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/SchemaResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.idl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.compiler.schema.Schemas;
/**
* Utility class to resolve schemas that are unavailable at the time they are
* referenced in the IDL.
*/
final class SchemaResolver {
private SchemaResolver() {
}
private static final String UR_SCHEMA_ATTR = "org.apache.avro.compiler.idl.unresolved.name";
private static final String UR_SCHEMA_NAME = "UnresolvedSchema";
private static final String UR_SCHEMA_NS = "org.apache.avro.compiler";
private static final AtomicInteger COUNTER = new AtomicInteger();
/**
* Create a schema to represent a "unresolved" schema. (used to represent a
* schema where the definition is not known at the time) This concept might be
* generalizable...
*
* @param name
* @return
*/
static Schema unresolvedSchema(final String name) {
Schema schema = Schema.createRecord(UR_SCHEMA_NAME + '_' + COUNTER.getAndIncrement(), "unresolved schema",
UR_SCHEMA_NS, false, Collections.EMPTY_LIST);
schema.addProp(UR_SCHEMA_ATTR, name);
return schema;
}
/**
* Is this a unresolved schema.
*
* @param schema
* @return
*/
static boolean isUnresolvedSchema(final Schema schema) {
return (schema.getType() == Schema.Type.RECORD && schema.getProp(UR_SCHEMA_ATTR) != null && schema.getName() != null
&& schema.getName().startsWith(UR_SCHEMA_NAME) && UR_SCHEMA_NS.equals(schema.getNamespace()));
}
/**
* get the unresolved schema name.
*
* @param schema
* @return
*/
static String getUnresolvedSchemaName(final Schema schema) {
if (!isUnresolvedSchema(schema)) {
throw new IllegalArgumentException("Not a unresolved schema: " + schema);
}
return schema.getProp(UR_SCHEMA_ATTR);
}
/**
* Is this a unresolved schema.
*
* @param schema
* @return
*/
static boolean isFullyResolvedSchema(final Schema schema) {
if (isUnresolvedSchema(schema)) {
return false;
} else {
return Schemas.visit(schema, new IsResolvedSchemaVisitor());
}
}
/**
* Will clone the provided protocol while resolving all unreferenced schemas
*
* @param protocol
* @return
*/
static Protocol resolve(final Protocol protocol) {
Protocol result = new Protocol(protocol.getName(), protocol.getDoc(), protocol.getNamespace());
final Collection<Schema> types = protocol.getTypes();
// replace unresolved schemas.
List<Schema> newSchemas = new ArrayList<>(types.size());
IdentityHashMap<Schema, Schema> replacements = new IdentityHashMap<>();
for (Schema schema : types) {
newSchemas.add(Schemas.visit(schema, new ResolvingVisitor(schema, replacements, new SymbolTable(protocol))));
}
result.setTypes(newSchemas); // replace types with resolved ones
// Resolve all schemas referenced by protocol Messages.
for (Map.Entry<String, Protocol.Message> entry : protocol.getMessages().entrySet()) {
Protocol.Message value = entry.getValue();
Protocol.Message nvalue;
if (value.isOneWay()) {
Schema replacement = resolve(replacements, value.getRequest(), protocol);
nvalue = result.createMessage(value.getName(), value.getDoc(), value, replacement);
} else {
Schema request = resolve(replacements, value.getRequest(), protocol);
Schema response = resolve(replacements, value.getResponse(), protocol);
Schema errors = resolve(replacements, value.getErrors(), protocol);
nvalue = result.createMessage(value.getName(), value.getDoc(), value, request, response, errors);
}
result.getMessages().put(entry.getKey(), nvalue);
}
Schemas.copyProperties(protocol, result);
return result;
}
private static Schema resolve(final IdentityHashMap<Schema, Schema> replacements, final Schema request,
final Protocol protocol) {
Schema replacement = replacements.get(request);
if (replacement == null) {
replacement = Schemas.visit(request, new ResolvingVisitor(request, replacements, new SymbolTable(protocol)));
}
return replacement;
}
private static class SymbolTable implements Function<String, Schema> {
private final Protocol symbolTable;
public SymbolTable(Protocol symbolTable) {
this.symbolTable = symbolTable;
}
@Override
public Schema apply(final String f) {
return symbolTable.getType(f);
}
}
}
| 7,569 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SchemaTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.specific;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
/** Ant task to generate Java interface and classes for a protocol. */
public class SchemaTask extends ProtocolTask {
@Override
protected void doCompile(File src, File dest) throws IOException {
final Schema.Parser parser = new Schema.Parser();
final Schema schema = parser.parse(src);
final SpecificCompiler compiler = new SpecificCompiler(schema);
compiler.setStringType(getStringType());
compiler.compileToDestination(src, dest);
}
public static void main(String[] args) throws IOException {
if (args.length < 2) {
System.err.println("Usage: SchemaTask <schema.avsc>... <output-folder>");
System.exit(1);
}
File dst = new File(args[args.length - 1]);
for (int i = 0; i < args.length - 1; i++)
new SchemaTask().doCompile(new File(args[i]), dst);
}
}
| 7,570 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/ProtocolTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.specific;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Protocol;
import org.apache.avro.generic.GenericData.StringType;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.FileSet;
/** Ant task to generate Java interface and classes for a protocol. */
public class ProtocolTask extends Task {
private File src;
private File dest = new File(".");
private StringType stringType = StringType.CharSequence;
private final ArrayList<FileSet> filesets = new ArrayList<>();
/** Set the schema file. */
public void setFile(File file) {
this.src = file;
}
/** Set the output directory */
public void setDestdir(File dir) {
this.dest = dir;
}
/** Set the string type. */
public void setStringType(StringType type) {
this.stringType = type;
}
/** Get the string type. */
public StringType getStringType() {
return this.stringType;
}
/** Add a fileset. */
public void addFileset(FileSet set) {
filesets.add(set);
}
/** Run the compiler. */
@Override
public void execute() {
if (src == null && filesets.size() == 0)
throw new BuildException("No file or fileset specified.");
if (src != null)
compile(src);
Project myProject = getProject();
for (FileSet fs : filesets) {
DirectoryScanner ds = fs.getDirectoryScanner(myProject);
File dir = fs.getDir(myProject);
String[] srcs = ds.getIncludedFiles();
for (String src1 : srcs) {
compile(new File(dir, src1));
}
}
}
protected void doCompile(File src, File dir) throws IOException {
Protocol protocol = Protocol.parse(src);
SpecificCompiler compiler = new SpecificCompiler(protocol);
compiler.setStringType(getStringType());
compiler.compileToDestination(src, dest);
}
private void compile(File file) {
try {
doCompile(file, dest);
} catch (AvroRuntimeException | IOException e) {
throw new BuildException(e);
}
}
}
| 7,571 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/specific/SpecificCompiler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.specific;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.avro.Conversion;
import org.apache.avro.Conversions;
import org.apache.avro.JsonProperties;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Protocol;
import org.apache.avro.Protocol.Message;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.SchemaNormalization;
import org.apache.avro.data.TimeConversions;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.StringType;
import org.apache.avro.specific.SpecificData;
import org.apache.commons.lang3.StringUtils;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.avro.specific.SpecificData.RESERVED_WORDS;
import static org.apache.avro.specific.SpecificData.RESERVED_WORD_ESCAPE_CHAR;
/**
* Generate specific Java interfaces and classes for protocols and schemas.
* <p>
* Java reserved keywords are mangled to preserve compilation.
*/
public class SpecificCompiler {
/*
* From Section 4.10 of the Java VM Specification: A method descriptor is valid
* only if it represents method parameters with a total length of 255 or less,
* where that length includes the contribution for this in the case of instance
* or interface method invocations. The total length is calculated by summing
* the contributions of the individual parameters, where a parameter of type
* long or double contributes two units to the length and a parameter of any
* other type contributes one unit.
*
* Arguments of type Double/Float contribute 2 "parameter units" to this limit,
* all other types contribute 1 "parameter unit". All instance methods for a
* class are passed a reference to the instance (`this), and hence, they are
* permitted at most `JVM_METHOD_ARG_LIMIT-1` "parameter units" for their
* arguments.
*
* @see <a href=
* "https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.10">
* JVM Spec: Section 4.10</a>
*/
private static final int JVM_METHOD_ARG_LIMIT = 255;
/*
* Note: This is protected instead of private only so it's visible for testing.
*/
protected static final int MAX_FIELD_PARAMETER_UNIT_COUNT = JVM_METHOD_ARG_LIMIT - 1;
public enum FieldVisibility {
PUBLIC, PRIVATE
}
void addLogicalTypeConversions(SpecificData specificData) {
specificData.addLogicalTypeConversion(new TimeConversions.DateConversion());
specificData.addLogicalTypeConversion(new TimeConversions.TimeMillisConversion());
specificData.addLogicalTypeConversion(new TimeConversions.TimeMicrosConversion());
specificData.addLogicalTypeConversion(new TimeConversions.TimestampMillisConversion());
specificData.addLogicalTypeConversion(new TimeConversions.TimestampMicrosConversion());
specificData.addLogicalTypeConversion(new TimeConversions.LocalTimestampMicrosConversion());
specificData.addLogicalTypeConversion(new TimeConversions.LocalTimestampMillisConversion());
specificData.addLogicalTypeConversion(new Conversions.UUIDConversion());
}
private final SpecificData specificData = new SpecificData();
private final Set<Schema> queue = new HashSet<>();
private Protocol protocol;
private VelocityEngine velocityEngine;
private String templateDir;
private FieldVisibility fieldVisibility = FieldVisibility.PRIVATE;
private boolean createOptionalGetters = false;
private boolean gettersReturnOptional = false;
private boolean optionalGettersForNullableFieldsOnly = false;
private boolean createSetters = true;
private boolean createNullSafeAnnotations = false;
private boolean createAllArgsConstructor = true;
private String outputCharacterEncoding;
private boolean enableDecimalLogicalType = false;
private String suffix = ".java";
private List<Object> additionalVelocityTools = Collections.emptyList();
private String recordSpecificClass = "org.apache.avro.specific.SpecificRecordBase";
private String errorSpecificClass = "org.apache.avro.specific.SpecificExceptionBase";
/*
* Used in the record.vm template.
*/
public boolean isCreateAllArgsConstructor() {
return createAllArgsConstructor;
}
/* Reserved words for accessor/mutator methods */
protected static final Set<String> ACCESSOR_MUTATOR_RESERVED_WORDS = new HashSet<>(
Arrays.asList("class", "schema", "classSchema"));
static {
// Add reserved words to accessor/mutator reserved words
ACCESSOR_MUTATOR_RESERVED_WORDS.addAll(RESERVED_WORDS);
}
/* Reserved words for type identifiers */
protected static final Set<String> TYPE_IDENTIFIER_RESERVED_WORDS = new HashSet<>(
Arrays.asList("var", "yield", "record"));
static {
// Add reserved words to type identifier reserved words
TYPE_IDENTIFIER_RESERVED_WORDS.addAll(RESERVED_WORDS);
}
/* Reserved words for error types */
protected static final Set<String> ERROR_RESERVED_WORDS = new HashSet<>(Arrays.asList("message", "cause"));
static {
// Add accessor/mutator reserved words to error reserved words
ERROR_RESERVED_WORDS.addAll(ACCESSOR_MUTATOR_RESERVED_WORDS);
}
private static final String FILE_HEADER = "/**\n" + " * Autogenerated by Avro\n" + " *\n"
+ " * DO NOT EDIT DIRECTLY\n" + " */\n";
public SpecificCompiler(Protocol protocol) {
this();
// enqueue all types
for (Schema s : protocol.getTypes()) {
enqueue(s);
}
this.protocol = protocol;
}
public SpecificCompiler(Schema schema) {
this(Collections.singleton(schema));
}
public SpecificCompiler(Collection<Schema> schemas) {
this();
for (Schema schema : schemas) {
enqueue(schema);
}
this.protocol = null;
}
public SpecificCompiler(Iterable<Schema> schemas) {
this();
schemas.forEach(this::enqueue);
this.protocol = null;
}
/**
* Creates a specific compiler with the given type to use for date/time related
* logical types.
*/
SpecificCompiler() {
this.templateDir = System.getProperty("org.apache.avro.specific.templates",
"/org/apache/avro/compiler/specific/templates/java/classic/");
initializeVelocity();
initializeSpecificData();
}
/**
* Set additional Velocity tools (simple POJOs) to be injected into the Velocity
* template context.
*/
public void setAdditionalVelocityTools(List<Object> additionalVelocityTools) {
this.additionalVelocityTools = additionalVelocityTools;
}
/**
* Set the resource directory where templates reside. First, the compiler checks
* the system path for the specified file, if not it is assumed that it is
* present on the classpath.
*/
public void setTemplateDir(String templateDir) {
this.templateDir = templateDir;
}
/**
* Set the resource file suffix, .java or .xxx
*/
public void setSuffix(String suffix) {
this.suffix = suffix;
}
/**
* @return true if the record fields should be public
*/
public boolean publicFields() {
return this.fieldVisibility == FieldVisibility.PUBLIC;
}
/**
* @return true if the record fields should be private
*/
public boolean privateFields() {
return this.fieldVisibility == FieldVisibility.PRIVATE;
}
/**
* Sets the field visibility option.
*/
public void setFieldVisibility(FieldVisibility fieldVisibility) {
this.fieldVisibility = fieldVisibility;
}
public boolean isCreateSetters() {
return this.createSetters;
}
/**
* Set to false to not create setter methods for the fields of the record.
*/
public void setCreateSetters(boolean createSetters) {
this.createSetters = createSetters;
}
public boolean isCreateNullSafeAnnotations() {
return this.createNullSafeAnnotations;
}
/**
* Set to true to add jetbrains @Nullable and @NotNull annotations
*/
public void setCreateNullSafeAnnotations(boolean createNullSafeAnnotations) {
this.createNullSafeAnnotations = createNullSafeAnnotations;
}
public boolean isCreateOptionalGetters() {
return this.createOptionalGetters;
}
/**
* Set to false to not create the getters that return an Optional.
*/
public void setCreateOptionalGetters(boolean createOptionalGetters) {
this.createOptionalGetters = createOptionalGetters;
}
public boolean isGettersReturnOptional() {
return this.gettersReturnOptional;
}
/**
* Set to false to not create the getters that return an Optional.
*/
public void setGettersReturnOptional(boolean gettersReturnOptional) {
this.gettersReturnOptional = gettersReturnOptional;
}
public boolean isOptionalGettersForNullableFieldsOnly() {
return optionalGettersForNullableFieldsOnly;
}
/**
* Set to true to create the Optional getters only for nullable fields.
*/
public void setOptionalGettersForNullableFieldsOnly(boolean optionalGettersForNullableFieldsOnly) {
this.optionalGettersForNullableFieldsOnly = optionalGettersForNullableFieldsOnly;
}
/**
* Set to true to use {@link java.math.BigDecimal} instead of
* {@link java.nio.ByteBuffer} for logical type "decimal"
*/
public void setEnableDecimalLogicalType(boolean enableDecimalLogicalType) {
this.enableDecimalLogicalType = enableDecimalLogicalType;
}
public void addCustomConversion(Class<?> conversionClass) {
try {
final Conversion<?> conversion = (Conversion<?>) conversionClass.getDeclaredConstructor().newInstance();
specificData.addLogicalTypeConversion(conversion);
} catch (IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e) {
throw new RuntimeException("Failed to instantiate conversion class " + conversionClass, e);
}
}
public Collection<String> getUsedConversionClasses(Schema schema) {
Collection<String> result = new HashSet<>();
for (Conversion<?> conversion : getUsedConversions(schema)) {
result.add(conversion.getClass().getCanonicalName());
}
return result;
}
public Map<String, String> getUsedCustomLogicalTypeFactories(Schema schema) {
final Set<String> logicalTypeNames = getUsedLogicalTypes(schema).stream().map(LogicalType::getName)
.collect(Collectors.toSet());
return LogicalTypes.getCustomRegisteredTypes().entrySet().stream()
.filter(entry -> logicalTypeNames.contains(entry.getKey()))
.collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().getClass().getCanonicalName()));
}
private void collectUsedTypes(Schema schema, Set<Conversion<?>> conversionResults,
Set<LogicalType> logicalTypeResults, Set<Schema> seenSchemas) {
if (seenSchemas.contains(schema)) {
return;
}
final LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
if (logicalTypeResults != null && logicalType != null)
logicalTypeResults.add(logicalType);
final Conversion<?> conversion = specificData.getConversionFor(logicalType);
if (conversionResults != null && conversion != null)
conversionResults.add(conversion);
seenSchemas.add(schema);
switch (schema.getType()) {
case RECORD:
for (Schema.Field field : schema.getFields()) {
collectUsedTypes(field.schema(), conversionResults, logicalTypeResults, seenSchemas);
}
break;
case MAP:
collectUsedTypes(schema.getValueType(), conversionResults, logicalTypeResults, seenSchemas);
break;
case ARRAY:
collectUsedTypes(schema.getElementType(), conversionResults, logicalTypeResults, seenSchemas);
break;
case UNION:
for (Schema s : schema.getTypes())
collectUsedTypes(s, conversionResults, logicalTypeResults, seenSchemas);
break;
case NULL:
case ENUM:
case FIXED:
case STRING:
case BYTES:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
break;
default:
throw new RuntimeException("Unknown type: " + schema);
}
}
private Set<Conversion<?>> getUsedConversions(Schema schema) {
final Set<Conversion<?>> conversionResults = new HashSet<>();
collectUsedTypes(schema, conversionResults, null, new HashSet<>());
return conversionResults;
}
private Set<LogicalType> getUsedLogicalTypes(Schema schema) {
final Set<LogicalType> logicalTypeResults = new HashSet<>();
collectUsedTypes(schema, null, logicalTypeResults, new HashSet<>());
return logicalTypeResults;
}
private void initializeVelocity() {
this.velocityEngine = new VelocityEngine();
// These properties tell Velocity to use its own classpath-based
// loader, then drop down to check the root and the current folder
velocityEngine.addProperty("resource.loaders", "class, file");
velocityEngine.addProperty("resource.loader.class.class",
"org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
velocityEngine.addProperty("resource.loader.file.class",
"org.apache.velocity.runtime.resource.loader.FileResourceLoader");
velocityEngine.addProperty("resource.loader.file.path", "/, ., ");
velocityEngine.setProperty("runtime.strict_mode.enable", true);
// Set whitespace gobbling to Backward Compatible (BC)
// https://velocity.apache.org/engine/2.0/developer-guide.html#space-gobbling
velocityEngine.setProperty("parser.space_gobbling", "bc");
}
private void initializeSpecificData() {
addLogicalTypeConversions(specificData);
specificData.addLogicalTypeConversion(new Conversions.DecimalConversion());
}
/**
* Captures output file path and contents.
*/
static class OutputFile {
String path;
String contents;
String outputCharacterEncoding;
/**
* Writes output to path destination directory when it is newer than src,
* creating directories as necessary. Returns the created file.
*/
File writeToDestination(File src, File destDir) throws IOException {
File f = new File(destDir, path);
if (src != null && f.exists() && f.lastModified() >= src.lastModified())
return f; // already up to date: ignore
f.getParentFile().mkdirs();
Writer fw = null;
FileOutputStream fos = null;
try {
if (outputCharacterEncoding != null) {
fos = new FileOutputStream(f);
fw = new OutputStreamWriter(fos, outputCharacterEncoding);
} else {
fw = Files.newBufferedWriter(f.toPath(), UTF_8);
}
fw.write(FILE_HEADER);
fw.write(contents);
} finally {
if (fw != null)
fw.close();
if (fos != null)
fos.close();
}
return f;
}
}
/**
* Generates Java interface and classes for a protocol.
*
* @param src the source Avro protocol file
* @param dest the directory to place generated files in
*/
public static void compileProtocol(File src, File dest) throws IOException {
compileProtocol(new File[] { src }, dest);
}
/**
* Generates Java interface and classes for a number of protocol files.
*
* @param srcFiles the source Avro protocol files
* @param dest the directory to place generated files in
*/
public static void compileProtocol(File[] srcFiles, File dest) throws IOException {
for (File src : srcFiles) {
Protocol protocol = Protocol.parse(src);
SpecificCompiler compiler = new SpecificCompiler(protocol);
compiler.compileToDestination(src, dest);
}
}
/**
* Generates Java classes for a schema.
*/
public static void compileSchema(File src, File dest) throws IOException {
compileSchema(new File[] { src }, dest);
}
/**
* Generates Java classes for a number of schema files.
*/
public static void compileSchema(File[] srcFiles, File dest) throws IOException {
Schema.Parser parser = new Schema.Parser();
for (File src : srcFiles) {
Schema schema = parser.parse(src);
SpecificCompiler compiler = new SpecificCompiler(schema);
compiler.compileToDestination(src, dest);
}
}
/**
* Recursively enqueue schemas that need a class generated.
*/
private void enqueue(Schema schema) {
if (queue.contains(schema))
return;
switch (schema.getType()) {
case RECORD:
queue.add(schema);
for (Schema.Field field : schema.getFields())
enqueue(field.schema());
break;
case MAP:
enqueue(schema.getValueType());
break;
case ARRAY:
enqueue(schema.getElementType());
break;
case UNION:
for (Schema s : schema.getTypes())
enqueue(s);
break;
case ENUM:
case FIXED:
queue.add(schema);
break;
case STRING:
case BYTES:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
case NULL:
break;
default:
throw new RuntimeException("Unknown type: " + schema);
}
}
/**
* Generate java classes for enqueued schemas.
*/
Collection<OutputFile> compile() {
List<OutputFile> out = new ArrayList<>(queue.size() + 1);
for (Schema schema : queue) {
out.add(compile(schema));
}
if (protocol != null) {
out.add(compileInterface(protocol));
}
return out;
}
/**
* Generate output under dst, unless existing file is newer than src.
*/
public void compileToDestination(File src, File dst) throws IOException {
for (Schema schema : queue) {
OutputFile o = compile(schema);
o.writeToDestination(src, dst);
}
if (protocol != null) {
compileInterface(protocol).writeToDestination(src, dst);
}
}
private String renderTemplate(String templateName, VelocityContext context) {
Template template;
try {
template = this.velocityEngine.getTemplate(templateName);
} catch (Exception e) {
throw new RuntimeException(e);
}
StringWriter writer = new StringWriter();
template.merge(context, writer);
return writer.toString();
}
OutputFile compileInterface(Protocol protocol) {
protocol = addStringType(protocol); // annotate protocol as needed
VelocityContext context = new VelocityContext();
context.put("protocol", protocol);
context.put("this", this);
for (Object velocityTool : additionalVelocityTools) {
String toolName = velocityTool.getClass().getSimpleName().toLowerCase();
context.put(toolName, velocityTool);
}
String out = renderTemplate(templateDir + "protocol.vm", context);
OutputFile outputFile = new OutputFile();
String mangledName = mangleTypeIdentifier(protocol.getName());
outputFile.path = makePath(mangledName, mangle(protocol.getNamespace()));
outputFile.contents = out;
outputFile.outputCharacterEncoding = outputCharacterEncoding;
return outputFile;
}
// package private for testing purposes
String makePath(String name, String space) {
if (space == null || space.isEmpty()) {
return name + suffix;
} else {
return space.replace('.', File.separatorChar) + File.separatorChar + name + suffix;
}
}
/**
* Returns the number of parameter units required by fields for the
* AllArgsConstructor.
*
* @param record a Record schema
*/
protected int calcAllArgConstructorParameterUnits(Schema record) {
if (record.getType() != Schema.Type.RECORD)
throw new RuntimeException("This method must only be called for record schemas.");
return record.getFields().size();
}
protected void validateRecordForCompilation(Schema record) {
this.createAllArgsConstructor = calcAllArgConstructorParameterUnits(record) <= MAX_FIELD_PARAMETER_UNIT_COUNT;
if (!this.createAllArgsConstructor) {
Logger logger = LoggerFactory.getLogger(SpecificCompiler.class);
logger.warn("Record '" + record.getFullName() + "' contains more than " + MAX_FIELD_PARAMETER_UNIT_COUNT
+ " parameters which exceeds the JVM "
+ "spec for the number of permitted constructor arguments. Clients must "
+ "rely on the builder pattern to create objects instead. For more info " + "see JIRA ticket AVRO-1642.");
}
}
OutputFile compile(Schema schema) {
schema = addStringType(schema); // annotate schema as needed
String output = "";
VelocityContext context = new VelocityContext();
context.put("this", this);
context.put("schema", schema);
for (Object velocityTool : additionalVelocityTools) {
String toolName = velocityTool.getClass().getSimpleName().toLowerCase();
context.put(toolName, velocityTool);
}
switch (schema.getType()) {
case RECORD:
validateRecordForCompilation(schema);
output = renderTemplate(templateDir + "record.vm", context);
break;
case ENUM:
output = renderTemplate(templateDir + "enum.vm", context);
break;
case FIXED:
output = renderTemplate(templateDir + "fixed.vm", context);
break;
case BOOLEAN:
case NULL:
break;
default:
throw new RuntimeException("Unknown type: " + schema);
}
OutputFile outputFile = new OutputFile();
String name = mangleTypeIdentifier(schema.getName());
outputFile.path = makePath(name, mangle(schema.getNamespace()));
outputFile.contents = output;
outputFile.outputCharacterEncoding = outputCharacterEncoding;
return outputFile;
}
private StringType stringType = StringType.CharSequence;
/**
* Set the Java type to be emitted for string schemas.
*/
public void setStringType(StringType t) {
this.stringType = t;
}
// annotate map and string schemas with string type
private Protocol addStringType(Protocol p) {
if (stringType != StringType.String)
return p;
Protocol newP = new Protocol(p.getName(), p.getDoc(), p.getNamespace());
Map<Schema, Schema> types = new LinkedHashMap<>();
p.forEachProperty(newP::addProp);
// annotate types
Collection<Schema> namedTypes = new LinkedHashSet<>();
for (Schema s : p.getTypes())
namedTypes.add(addStringType(s, types));
newP.setTypes(namedTypes);
// annotate messages
Map<String, Message> newM = newP.getMessages();
for (Message m : p.getMessages().values())
newM.put(m.getName(),
m.isOneWay() ? newP.createMessage(m, addStringType(m.getRequest(), types))
: newP.createMessage(m, addStringType(m.getRequest(), types), addStringType(m.getResponse(), types),
addStringType(m.getErrors(), types)));
return newP;
}
private Schema addStringType(Schema s) {
if (stringType != StringType.String)
return s;
return addStringType(s, new HashMap<>());
}
// annotate map and string schemas with string type
private Schema addStringType(Schema s, Map<Schema, Schema> seen) {
if (seen.containsKey(s))
return seen.get(s); // break loops
Schema result = s;
switch (s.getType()) {
case STRING:
result = Schema.create(Schema.Type.STRING);
if (s.getLogicalType() == null) {
GenericData.setStringType(result, stringType);
}
break;
case RECORD:
result = Schema.createRecord(s.getFullName(), s.getDoc(), null, s.isError());
for (String alias : s.getAliases())
result.addAlias(alias, null); // copy aliases
seen.put(s, result);
List<Field> newFields = new ArrayList<>(s.getFields().size());
for (Field f : s.getFields()) {
Schema fSchema = addStringType(f.schema(), seen);
Field newF = new Field(f, fSchema);
newFields.add(newF);
}
result.setFields(newFields);
break;
case ARRAY:
Schema e = addStringType(s.getElementType(), seen);
result = Schema.createArray(e);
break;
case MAP:
Schema v = addStringType(s.getValueType(), seen);
result = Schema.createMap(v);
GenericData.setStringType(result, stringType);
break;
case UNION:
List<Schema> types = new ArrayList<>(s.getTypes().size());
for (Schema branch : s.getTypes())
types.add(addStringType(branch, seen));
result = Schema.createUnion(types);
break;
}
result.addAllProps(s);
if (s.getLogicalType() != null) {
s.getLogicalType().addToSchema(result);
}
seen.put(s, result);
return result;
}
/**
* Utility for template use (and also internal use). Returns a string giving the
* FQN of the Java type to be used for a string schema or for the key of a map
* schema. (It's an error to call this on a schema other than a string or map.)
*/
public String getStringType(Schema s) {
String prop;
switch (s.getType()) {
case MAP:
prop = SpecificData.KEY_CLASS_PROP;
break;
case STRING:
prop = SpecificData.CLASS_PROP;
break;
default:
throw new IllegalArgumentException("Can't check string-type of non-string/map type: " + s);
}
return getStringType(s.getObjectProp(prop));
}
private String getStringType(Object overrideClassProperty) {
if (overrideClassProperty != null)
return overrideClassProperty.toString();
switch (stringType) {
case String:
return "java.lang.String";
case Utf8:
return "org.apache.avro.util.Utf8";
case CharSequence:
return "java.lang.CharSequence";
default:
throw new RuntimeException("Unknown string type: " + stringType);
}
}
/**
* Utility for template use. Returns true iff a STRING-schema or the key of a
* MAP-schema is what SpecificData defines as "stringable" (which means we need
* to call toString on it before before writing it).
*/
public boolean isStringable(Schema schema) {
String t = getStringType(schema);
return !(t.equals("java.lang.String") || t.equals("java.lang.CharSequence")
|| t.equals("org.apache.avro.util.Utf8"));
}
private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
/**
* Utility for template use. Returns the java type for a Schema.
*/
public String javaType(Schema schema) {
return javaType(schema, true);
}
private String javaType(Schema schema, boolean checkConvertedLogicalType) {
if (checkConvertedLogicalType) {
String convertedLogicalType = getConvertedLogicalType(schema);
if (convertedLogicalType != null) {
return convertedLogicalType;
}
}
switch (schema.getType()) {
case RECORD:
case ENUM:
case FIXED:
return mangleFullyQualified(schema.getFullName());
case ARRAY:
return "java.util.List<" + javaType(schema.getElementType()) + ">";
case MAP:
return "java.util.Map<" + getStringType(schema.getObjectProp(SpecificData.KEY_CLASS_PROP)) + ","
+ javaType(schema.getValueType()) + ">";
case UNION:
List<Schema> types = schema.getTypes(); // elide unions with null
if ((types.size() == 2) && types.contains(NULL_SCHEMA))
return javaType(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
return "java.lang.Object";
case STRING:
return getStringType(schema.getObjectProp(SpecificData.CLASS_PROP));
case BYTES:
return "java.nio.ByteBuffer";
case INT:
return "java.lang.Integer";
case LONG:
return "java.lang.Long";
case FLOAT:
return "java.lang.Float";
case DOUBLE:
return "java.lang.Double";
case BOOLEAN:
return "java.lang.Boolean";
case NULL:
return "java.lang.Void";
default:
throw new RuntimeException("Unknown type: " + schema);
}
}
private String mangleFullyQualified(String fullName) {
int lastDot = fullName.lastIndexOf('.');
if (lastDot < 0) {
return mangleTypeIdentifier(fullName);
} else {
String namespace = fullName.substring(0, lastDot);
String typeName = fullName.substring(lastDot + 1);
return mangle(namespace) + "." + mangleTypeIdentifier(typeName);
}
}
private LogicalType getLogicalType(Schema schema) {
if (enableDecimalLogicalType || !(schema.getLogicalType() instanceof LogicalTypes.Decimal)) {
return schema.getLogicalType();
}
return null;
}
private String getConvertedLogicalType(Schema schema) {
final Conversion<?> conversion = specificData.getConversionFor(getLogicalType(schema));
if (conversion != null) {
return conversion.getConvertedType().getName();
}
return null;
}
/**
* Utility for template use.
*/
public String generateSetterCode(Schema schema, String name, String pname) {
Conversion<?> conversion = specificData.getConversionFor(schema.getLogicalType());
if (conversion != null) {
return conversion.adjustAndSetValue("this." + name, pname);
}
return "this." + name + " = " + pname + ";";
}
/**
* Utility for template use. Returns the unboxed java type for a Schema.
*
* @deprecated use javaUnbox(Schema, boolean), kept for backward compatibility
* of custom templates
*/
@Deprecated
public String javaUnbox(Schema schema) {
return javaUnbox(schema, false);
}
/**
* Utility for template use. Returns the unboxed java type for a Schema
* including the void type.
*/
public String javaUnbox(Schema schema, boolean unboxNullToVoid) {
String convertedLogicalType = getConvertedLogicalType(schema);
if (convertedLogicalType != null) {
return convertedLogicalType;
}
switch (schema.getType()) {
case INT:
return "int";
case LONG:
return "long";
case FLOAT:
return "float";
case DOUBLE:
return "double";
case BOOLEAN:
return "boolean";
case NULL:
if (unboxNullToVoid) {
// Used for preventing unnecessary returns for RPC methods without response but
// with error(s)
return "void";
}
default:
return javaType(schema, false);
}
}
/**
* Utility for template use. Return a string with a given number of spaces to be
* used for indentation purposes.
*/
public String indent(int n) {
return new String(new char[n]).replace('\0', ' ');
}
/**
* Utility for template use. For a two-branch union type with one null branch,
* returns the index of the null branch. It's an error to use on anything other
* than a two-branch union with on null branch.
*/
public int getNonNullIndex(Schema s) {
if (s.getType() != Schema.Type.UNION || s.getTypes().size() != 2 || !s.getTypes().contains(NULL_SCHEMA))
throw new IllegalArgumentException("Can only be used on 2-branch union with a null branch: " + s);
return (s.getTypes().get(0).equals(NULL_SCHEMA) ? 1 : 0);
}
/**
* Utility for template use. Returns true if the encode/decode logic in
* record.vm can handle the schema being presented.
*/
public boolean isCustomCodable(Schema schema) {
return isCustomCodable(schema, new HashSet<>());
}
private boolean isCustomCodable(Schema schema, Set<Schema> seen) {
if (!seen.add(schema))
// Recursive call: assume custom codable until a caller on the call stack proves
// otherwise.
return true;
if (schema.getLogicalType() != null)
return false;
boolean result = true;
switch (schema.getType()) {
case RECORD:
if (schema.isError())
return false;
for (Schema.Field f : schema.getFields())
result &= isCustomCodable(f.schema(), seen);
break;
case MAP:
result = isCustomCodable(schema.getValueType(), seen);
break;
case ARRAY:
result = isCustomCodable(schema.getElementType(), seen);
break;
case UNION:
List<Schema> types = schema.getTypes();
// Only know how to handle "nulling" unions for now
if (types.size() != 2 || !types.contains(NULL_SCHEMA))
return false;
for (Schema s : types)
result &= isCustomCodable(s, seen);
break;
default:
}
return result;
}
public boolean hasLogicalTypeField(Schema schema) {
for (Schema.Field field : schema.getFields()) {
if (field.schema().getLogicalType() != null) {
return true;
}
}
return false;
}
public String conversionInstance(Schema schema) {
if (schema == null || schema.getLogicalType() == null) {
return "null";
}
if (LogicalTypes.Decimal.class.equals(schema.getLogicalType().getClass()) && !enableDecimalLogicalType) {
return "null";
}
final Conversion<Object> conversion = specificData.getConversionFor(schema.getLogicalType());
if (conversion != null) {
return "new " + conversion.getClass().getCanonicalName() + "()";
}
return "null";
}
/**
* Utility for template use. Returns the java annotations for a schema.
*/
public String[] javaAnnotations(JsonProperties props) {
final Object value = props.getObjectProp("javaAnnotation");
if (value == null)
return new String[0];
if (value instanceof String)
return new String[] { value.toString() };
if (value instanceof List) {
final List<?> list = (List<?>) value;
final List<String> annots = new ArrayList<>(list.size());
for (Object o : list) {
annots.add(o.toString());
}
return annots.toArray(new String[0]);
}
return new String[0];
}
// maximum size for string constants, to avoid javac limits
int maxStringChars = 8192;
/**
* Utility for template use. Takes a (potentially overly long) string and splits
* it into a quoted, comma-separted sequence of escaped strings.
*
* @param s The string to split
* @return A sequence of quoted, comma-separated, escaped strings
*/
public String javaSplit(String s) throws IOException {
StringBuilder b = new StringBuilder(s.length());
b.append("\""); // initial quote
for (int i = 0; i < s.length(); i += maxStringChars) {
if (i != 0)
b.append("\",\""); // insert quote-comma-quote
String chunk = s.substring(i, Math.min(s.length(), i + maxStringChars));
b.append(javaEscape(chunk)); // escape chunks
}
b.append("\""); // final quote
return b.toString();
}
/**
* Utility for template use. Escapes quotes and backslashes.
*/
public static String javaEscape(String o) {
return o.replace("\\", "\\\\").replace("\"", "\\\"");
}
/**
* Utility for template use. Escapes comment end with HTML entities.
*/
public static String escapeForJavadoc(String s) {
return s.replace("*/", "*/").replace("<", "<").replace(">", ">");
}
/**
* Utility for template use. Returns empty string for null.
*/
public static String nullToEmpty(String x) {
return x == null ? "" : x;
}
/**
* Utility for template use. Adds a dollar sign to reserved words.
*/
public static String mangle(String word) {
return mangle(word, false);
}
/**
* Utility for template use. Adds a dollar sign to reserved words.
*/
public static String mangle(String word, boolean isError) {
return mangle(word, isError ? ERROR_RESERVED_WORDS : RESERVED_WORDS);
}
/**
* Utility for template use. Adds a dollar sign to reserved words in type
* identifiers.
*/
public static String mangleTypeIdentifier(String word) {
return mangleTypeIdentifier(word, false);
}
/**
* Utility for template use. Adds a dollar sign to reserved words in type
* identifiers.
*/
public static String mangleTypeIdentifier(String word, boolean isError) {
return mangle(word, isError ? ERROR_RESERVED_WORDS : TYPE_IDENTIFIER_RESERVED_WORDS);
}
/**
* Utility for template use. Adds a dollar sign to reserved words.
*/
public static String mangle(String word, Set<String> reservedWords) {
return mangle(word, reservedWords, false);
}
/**
* Utility for template use. Adds a dollar sign to reserved words.
*/
public static String mangle(String word, Set<String> reservedWords, boolean isMethod) {
if (StringUtils.isBlank(word)) {
return word;
}
if (word.contains(".")) {
// If the 'word' is really a full path of a class we must mangle just the
String[] packageWords = word.split("\\.");
String[] newPackageWords = new String[packageWords.length];
for (int i = 0; i < packageWords.length; i++) {
String oldName = packageWords[i];
newPackageWords[i] = mangle(oldName, reservedWords, false);
}
return String.join(".", newPackageWords);
}
if (reservedWords.contains(word) || (isMethod && reservedWords
.contains(Character.toLowerCase(word.charAt(0)) + ((word.length() > 1) ? word.substring(1) : "")))) {
return word + RESERVED_WORD_ESCAPE_CHAR;
}
return word;
}
/**
* Utility for use by templates. Return schema fingerprint as a long.
*/
public static long fingerprint64(Schema schema) {
return SchemaNormalization.parsingFingerprint64(schema);
}
/**
* Generates the name of a field accessor method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the accessor name.
* @return the name of the accessor method for the given field.
*/
public static String generateGetMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "get", "");
}
/**
* Generates the name of a field accessor method that returns a Java 8 Optional.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the accessor name.
* @return the name of the accessor method for the given field.
*/
public static String generateGetOptionalMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "getOptional", "");
}
/**
* Generates the name of a field mutator method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the mutator name.
* @return the name of the mutator method for the given field.
*/
public static String generateSetMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "set", "");
}
/**
* Generates the name of a field "has" method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the "has" method name.
* @return the name of the has method for the given field.
*/
public static String generateHasMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "has", "");
}
/**
* Generates the name of a field "clear" method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the accessor name.
* @return the name of the has method for the given field.
*/
public static String generateClearMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "clear", "");
}
/**
* Utility for use by templates. Does this schema have a Builder method?
*/
public static boolean hasBuilder(Schema schema) {
switch (schema.getType()) {
case RECORD:
return true;
case UNION:
List<Schema> types = schema.getTypes(); // elide unions with null
if ((types.size() == 2) && types.contains(NULL_SCHEMA)) {
return hasBuilder(types.get(types.get(0).equals(NULL_SCHEMA) ? 1 : 0));
}
return false;
default:
return false;
}
}
/**
* Generates the name of a field Builder accessor method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the Builder accessor name.
* @return the name of the Builder accessor method for the given field.
*/
public static String generateGetBuilderMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "get", "Builder");
}
/**
* Generates the name of a field Builder mutator method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the Builder mutator name.
* @return the name of the Builder mutator method for the given field.
*/
public static String generateSetBuilderMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "set", "Builder");
}
/**
* Generates the name of a field Builder "has" method.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the "has" Builder method name.
* @return the name of the "has" Builder method for the given field.
*/
public static String generateHasBuilderMethod(Schema schema, Field field) {
return generateMethodName(schema, field, "has", "Builder");
}
/**
* Generates a method name from a field name.
*
* @param schema the schema in which the field is defined.
* @param field the field for which to generate the accessor name.
* @param prefix method name prefix, e.g. "get" or "set".
* @param postfix method name postfix, e.g. "" or "Builder".
* @return the generated method name.
*/
private static String generateMethodName(Schema schema, Field field, String prefix, String postfix) {
// Check for the special case in which the schema defines two fields whose
// names are identical except for the case of the first character:
int indexNameConflict = calcNameIndex(field.name(), schema);
StringBuilder methodBuilder = new StringBuilder(prefix);
String fieldName = mangle(field.name(), schema.isError() ? ERROR_RESERVED_WORDS : ACCESSOR_MUTATOR_RESERVED_WORDS,
true);
boolean nextCharToUpper = true;
for (int ii = 0; ii < fieldName.length(); ii++) {
if (fieldName.charAt(ii) == '_') {
nextCharToUpper = true;
} else if (nextCharToUpper) {
methodBuilder.append(Character.toUpperCase(fieldName.charAt(ii)));
nextCharToUpper = false;
} else {
methodBuilder.append(fieldName.charAt(ii));
}
}
methodBuilder.append(postfix);
// If there is a field name conflict append $0 or $1
if (indexNameConflict >= 0) {
if (methodBuilder.charAt(methodBuilder.length() - 1) != '$') {
methodBuilder.append('$');
}
methodBuilder.append(indexNameConflict);
}
return methodBuilder.toString();
}
/**
* Calc name index for getter / setter field in case of conflict as example,
* having a schema with fields __X, _X, _x, X, x should result with indexes __X:
* 3, _X: 2, _x: 1, X: 0 x: None (-1)
*
* @param fieldName : field name.
* @param schema : schema.
* @return index for field.
*/
private static int calcNameIndex(String fieldName, Schema schema) {
// get name without underscore at start
// and calc number of other similar fields with same subname.
int countSimilar = 0;
String pureFieldName = fieldName;
while (!pureFieldName.isEmpty() && pureFieldName.charAt(0) == '_') {
pureFieldName = pureFieldName.substring(1);
if (schema.getField(pureFieldName) != null) {
countSimilar++;
}
String reversed = reverseFirstLetter(pureFieldName);
if (schema.getField(reversed) != null) {
countSimilar++;
}
}
// field name start with upper have +1
String reversed = reverseFirstLetter(fieldName);
if (!pureFieldName.isEmpty() && Character.isUpperCase(pureFieldName.charAt(0))
&& schema.getField(reversed) != null) {
countSimilar++;
}
int ret = -1; // if no similar name, no index.
if (countSimilar > 0) {
ret = countSimilar - 1; // index is count similar -1 (start with $0)
}
return ret;
}
/**
* Reverse first letter upper <=> lower. __Name <=> __name
*
* @param name : input name.
* @return name with change case of first letter.
*/
private static String reverseFirstLetter(String name) {
StringBuilder builder = new StringBuilder(name);
int index = 0;
while (builder.length() > index && builder.charAt(index) == '_') {
index++;
}
if (builder.length() > index) {
char c = builder.charAt(index);
char inverseC = Character.isLowerCase(c) ? Character.toUpperCase(c) : Character.toLowerCase(c);
builder.setCharAt(index, inverseC);
}
return builder.toString();
}
/**
* Tests whether an unboxed Java type can be set to null
*/
public static boolean isUnboxedJavaTypeNullable(Schema schema) {
switch (schema.getType()) {
// Primitives can't be null; assume anything else can
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
return false;
default:
return true;
}
}
public static void main(String[] args) throws Exception {
// compileSchema(new File(args[0]), new File(args[1]));
compileProtocol(new File(args[0]), new File(args[1]));
}
/**
* Sets character encoding for generated java file
*
* @param outputCharacterEncoding Character encoding for output files (defaults
* to system encoding)
*/
public void setOutputCharacterEncoding(String outputCharacterEncoding) {
this.outputCharacterEncoding = outputCharacterEncoding;
}
public String getSchemaParentClass(boolean isError) {
if (isError) {
return this.errorSpecificClass;
} else {
return this.recordSpecificClass;
}
}
public void setRecordSpecificClass(final String recordSpecificClass) {
this.recordSpecificClass = recordSpecificClass;
}
public void setErrorSpecificClass(final String errorSpecificClass) {
this.errorSpecificClass = errorSpecificClass;
}
}
| 7,572 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/schema/SchemaVisitorAction.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.schema;
public enum SchemaVisitorAction {
/**
* continue visit.
*/
CONTINUE,
/**
* terminate visit.
*/
TERMINATE,
/**
* when returned from pre non terminal visit method the children of the non
* terminal are skipped. afterVisitNonTerminal for the current schema will not
* be invoked.
*/
SKIP_SUBTREE,
/**
* Skip visiting the siblings of this schema.
*/
SKIP_SIBLINGS;
}
| 7,573 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/schema/Schemas.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.schema;
import java.util.ArrayDeque;
import java.util.Collections;
import java.util.Deque;
import java.util.IdentityHashMap;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.avro.JsonProperties;
import org.apache.avro.LogicalType;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.compiler.specific.SpecificCompiler;
/**
* Avro Schema utilities, to traverse...
*/
public final class Schemas {
private Schemas() {
}
public static void copyAliases(final Schema from, final Schema to) {
switch (from.getType()) { // only named types.
case RECORD:
case ENUM:
case FIXED:
Set<String> aliases = from.getAliases();
for (String alias : aliases) {
to.addAlias(alias);
}
}
}
public static void copyAliases(final Schema.Field from, final Schema.Field to) {
Set<String> aliases = from.aliases();
for (String alias : aliases) {
to.addAlias(alias);
}
}
public static void copyLogicalTypes(final Schema from, final Schema to) {
LogicalType logicalType = from.getLogicalType();
if (logicalType != null) {
logicalType.addToSchema(to);
}
}
public static void copyProperties(final JsonProperties from, final JsonProperties to) {
from.forEachProperty(to::addProp);
}
public static boolean hasGeneratedJavaClass(final Schema schema) {
Schema.Type type = schema.getType();
switch (type) {
case ENUM:
case RECORD:
case FIXED:
return true;
default:
return false;
}
}
public static String getJavaClassName(final Schema schema) {
String namespace = schema.getNamespace();
if (namespace == null) {
return SpecificCompiler.mangle(schema.getName());
} else {
return namespace + '.' + SpecificCompiler.mangle(schema.getName());
}
}
/**
* depth first visit.
*
* @param start
* @param visitor
*/
public static <T> T visit(final Schema start, final SchemaVisitor<T> visitor) {
// Set of Visited Schemas
IdentityHashMap<Schema, Schema> visited = new IdentityHashMap<>();
// Stack that contains the Schams to process and afterVisitNonTerminal
// functions.
// Deque<Either<Schema, Supplier<SchemaVisitorAction>>>
// Using either has a cost which we want to avoid...
Deque<Object> dq = new ArrayDeque<>();
dq.addLast(start);
Object current;
while ((current = dq.pollLast()) != null) {
if (current instanceof Supplier) {
// we are executing a non terminal post visit.
SchemaVisitorAction action = ((Supplier<SchemaVisitorAction>) current).get();
switch (action) {
case CONTINUE:
break;
case SKIP_SUBTREE:
throw new UnsupportedOperationException();
case SKIP_SIBLINGS:
while (dq.getLast() instanceof Schema) {
dq.removeLast();
}
break;
case TERMINATE:
return visitor.get();
default:
throw new UnsupportedOperationException("Invalid action " + action);
}
} else {
Schema schema = (Schema) current;
boolean terminate;
if (!visited.containsKey(schema)) {
Schema.Type type = schema.getType();
switch (type) {
case ARRAY:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getElementType()));
visited.put(schema, schema);
break;
case RECORD:
terminate = visitNonTerminal(visitor, schema, dq, () -> schema.getFields().stream().map(Field::schema)
.collect(Collectors.toCollection(ArrayDeque::new)).descendingIterator());
visited.put(schema, schema);
break;
case UNION:
terminate = visitNonTerminal(visitor, schema, dq, schema.getTypes());
visited.put(schema, schema);
break;
case MAP:
terminate = visitNonTerminal(visitor, schema, dq, Collections.singleton(schema.getValueType()));
visited.put(schema, schema);
break;
case NULL:
case BOOLEAN:
case BYTES:
case DOUBLE:
case ENUM:
case FIXED:
case FLOAT:
case INT:
case LONG:
case STRING:
terminate = visitTerminal(visitor, schema, dq);
break;
default:
throw new UnsupportedOperationException("Invalid type " + type);
}
} else {
terminate = visitTerminal(visitor, schema, dq);
}
if (terminate) {
return visitor.get();
}
}
}
return visitor.get();
}
private static boolean visitNonTerminal(final SchemaVisitor visitor, final Schema schema, final Deque<Object> dq,
final Iterable<Schema> itSupp) {
SchemaVisitorAction action = visitor.visitNonTerminal(schema);
switch (action) {
case CONTINUE:
dq.addLast((Supplier<SchemaVisitorAction>) () -> visitor.afterVisitNonTerminal(schema));
for (Schema child : itSupp) {
dq.addLast(child);
}
break;
case SKIP_SUBTREE:
dq.addLast((Supplier<SchemaVisitorAction>) () -> visitor.afterVisitNonTerminal(schema));
break;
case SKIP_SIBLINGS:
while (!dq.isEmpty() && dq.getLast() instanceof Schema) {
dq.removeLast();
}
break;
case TERMINATE:
return true;
default:
throw new UnsupportedOperationException("Invalid action " + action + " for " + schema);
}
return false;
}
private static boolean visitTerminal(final SchemaVisitor visitor, final Schema schema, final Deque<Object> dq) {
SchemaVisitorAction action = visitor.visitTerminal(schema);
switch (action) {
case CONTINUE:
break;
case SKIP_SUBTREE:
throw new UnsupportedOperationException("Invalid action " + action + " for " + schema);
case SKIP_SIBLINGS:
while (!dq.isEmpty() && dq.getLast() instanceof Schema) {
dq.removeLast();
}
break;
case TERMINATE:
return true;
default:
throw new UnsupportedOperationException("Invalid action " + action + " for " + schema);
}
return false;
}
}
| 7,574 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/schema/CloningVisitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.schema;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import org.apache.avro.Schema;
import static org.apache.avro.Schema.Type.RECORD;
/**
* this visitor will create a clone of the original Schema with docs and other
* nonessential fields stripped by default. what attributes are copied is
* customizable.
*/
public final class CloningVisitor implements SchemaVisitor<Schema> {
private final IdentityHashMap<Schema, Schema> replace = new IdentityHashMap<>();
private final Schema root;
private final PropertyCopier copyProperties;
private final boolean copyDocs;
public interface PropertyCopier {
void copy(Schema first, Schema second);
void copy(Schema.Field first, Schema.Field second);
}
/**
* copy only serialization necessary fields.
*
* @param root
*/
public CloningVisitor(final Schema root) {
this(new PropertyCopier() {
@Override
public void copy(final Schema first, final Schema second) {
Schemas.copyLogicalTypes(first, second);
Schemas.copyAliases(first, second);
}
@Override
public void copy(final Schema.Field first, final Schema.Field second) {
Schemas.copyAliases(first, second);
}
}, false, root);
}
public CloningVisitor(final PropertyCopier copyProperties, final boolean copyDocs, final Schema root) {
this.copyProperties = copyProperties;
this.copyDocs = copyDocs;
this.root = root;
}
@Override
public SchemaVisitorAction visitTerminal(final Schema terminal) {
Schema.Type type = terminal.getType();
Schema newSchema;
switch (type) {
case RECORD: // recursion.
case ARRAY:
case MAP:
case UNION:
if (!replace.containsKey(terminal)) {
throw new IllegalStateException("Schema " + terminal + " must be already processed");
}
return SchemaVisitorAction.CONTINUE;
case BOOLEAN:
case BYTES:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
case NULL:
case STRING:
newSchema = Schema.create(type);
break;
case ENUM:
newSchema = Schema.createEnum(terminal.getName(), copyDocs ? terminal.getDoc() : null, terminal.getNamespace(),
terminal.getEnumSymbols());
break;
case FIXED:
newSchema = Schema.createFixed(terminal.getName(), copyDocs ? terminal.getDoc() : null, terminal.getNamespace(),
terminal.getFixedSize());
break;
default:
throw new IllegalStateException("Unsupported schema " + terminal);
}
copyProperties.copy(terminal, newSchema);
replace.put(terminal, newSchema);
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction visitNonTerminal(final Schema nt) {
Schema.Type type = nt.getType();
if (type == RECORD) {
Schema newSchema = Schema.createRecord(nt.getName(), copyDocs ? nt.getDoc() : null, nt.getNamespace(),
nt.isError());
copyProperties.copy(nt, newSchema);
replace.put(nt, newSchema);
}
return SchemaVisitorAction.CONTINUE;
}
@Override
public SchemaVisitorAction afterVisitNonTerminal(final Schema nt) {
Schema.Type type = nt.getType();
Schema newSchema;
switch (type) {
case RECORD:
newSchema = replace.get(nt);
List<Schema.Field> fields = nt.getFields();
List<Schema.Field> newFields = new ArrayList<>(fields.size());
for (Schema.Field field : fields) {
Schema.Field newField = new Schema.Field(field.name(), replace.get(field.schema()),
copyDocs ? field.doc() : null, field.defaultVal(), field.order());
copyProperties.copy(field, newField);
newFields.add(newField);
}
newSchema.setFields(newFields);
return SchemaVisitorAction.CONTINUE;
case UNION:
List<Schema> types = nt.getTypes();
List<Schema> newTypes = new ArrayList<>(types.size());
for (Schema sch : types) {
newTypes.add(replace.get(sch));
}
newSchema = Schema.createUnion(newTypes);
break;
case ARRAY:
newSchema = Schema.createArray(replace.get(nt.getElementType()));
break;
case MAP:
newSchema = Schema.createMap(replace.get(nt.getValueType()));
break;
default:
throw new IllegalStateException("Illegal type " + type + ", schema " + nt);
}
copyProperties.copy(nt, newSchema);
replace.put(nt, newSchema);
return SchemaVisitorAction.CONTINUE;
}
@Override
public Schema get() {
return replace.get(root);
}
@Override
public String toString() {
return "CloningVisitor{" + "replace=" + replace + ", root=" + root + '}';
}
}
| 7,575 |
0 | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler | Create_ds/avro/lang/java/compiler/src/main/java/org/apache/avro/compiler/schema/SchemaVisitor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.schema;
import org.apache.avro.Schema;
public interface SchemaVisitor<T> {
/**
* Invoked for schemas that do not have "child" schemas (like string, int ...)
* or for a previously encountered schema with children, which will be treated
* as a terminal. (to avoid circular recursion)
*
* @param terminal
*/
SchemaVisitorAction visitTerminal(Schema terminal);
/**
* Invoked for schema with children before proceeding to visit the children.
*
* @param nonTerminal
*/
SchemaVisitorAction visitNonTerminal(Schema nonTerminal);
/**
* Invoked for schemas with children after its children have been visited.
*
* @param nonTerminal
*/
SchemaVisitorAction afterVisitNonTerminal(Schema nonTerminal);
/**
* Invoked when visiting is complete.
*
* @return a value which will be returned by the visit method.
*/
T get();
}
| 7,576 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtobuf.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import org.apache.avro.Schema;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificData;
import org.apache.commons.compress.utils.Lists;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import com.google.protobuf.ByteString;
import org.apache.avro.protobuf.noopt.Test.Foo;
import org.apache.avro.protobuf.noopt.Test.A;
import org.apache.avro.protobuf.noopt.Test.M.N;
public class TestProtobuf {
@Test
void message() throws Exception {
System.out.println(ProtobufData.get().getSchema(Foo.class).toString(true));
Foo.Builder builder = Foo.newBuilder();
builder.setInt32(0);
builder.setInt64(2);
builder.setUint32(3);
builder.setUint64(4);
builder.setSint32(5);
builder.setSint64(6);
builder.setFixed32(7);
builder.setFixed64(8);
builder.setSfixed32(9);
builder.setSfixed64(10);
builder.setFloat(1.0F);
builder.setDouble(2.0);
builder.setBool(true);
builder.setString("foo");
builder.setBytes(ByteString.copyFromUtf8("bar"));
builder.setEnum(A.X);
builder.addIntArray(27);
builder.addSyms(A.Y);
Foo fooInner = builder.build();
Foo fooInArray = builder.build();
builder = Foo.newBuilder(fooInArray);
builder.addFooArray(fooInArray);
com.google.protobuf.Timestamp ts = com.google.protobuf.Timestamp.newBuilder().setSeconds(1L).setNanos(2).build();
builder.setTimestamp(ts);
builder = Foo.newBuilder(fooInner);
builder.setFoo(fooInner);
Foo foo = builder.build();
System.out.println(foo);
ByteArrayOutputStream bao = new ByteArrayOutputStream();
ProtobufDatumWriter<Foo> w = new ProtobufDatumWriter<>(Foo.class);
Encoder e = EncoderFactory.get().binaryEncoder(bao, null);
w.write(foo, e);
e.flush();
Object o = new ProtobufDatumReader<>(Foo.class).read(null,
DecoderFactory.get().binaryDecoder(new ByteArrayInputStream(bao.toByteArray()), null));
assertEquals(foo, o);
}
@Test
void messageWithEmptyArray() throws Exception {
Foo foo = Foo.newBuilder().setInt32(5).setBool(true).build();
ByteArrayOutputStream bao = new ByteArrayOutputStream();
ProtobufDatumWriter<Foo> w = new ProtobufDatumWriter<>(Foo.class);
Encoder e = EncoderFactory.get().binaryEncoder(bao, null);
w.write(foo, e);
e.flush();
Foo o = new ProtobufDatumReader<>(Foo.class).read(null,
DecoderFactory.get().binaryDecoder(new ByteArrayInputStream(bao.toByteArray()), null));
assertEquals(foo.getInt32(), o.getInt32());
assertEquals(foo.getBool(), o.getBool());
assertEquals(0, o.getFooArrayCount());
}
@Test
void emptyArray() throws Exception {
Schema s = ProtobufData.get().getSchema(Foo.class);
assertEquals(s.getField("fooArray").defaultVal(), Lists.newArrayList());
}
@Test
void nestedEnum() throws Exception {
Schema s = ProtobufData.get().getSchema(N.class);
assertEquals(N.class.getName(), SpecificData.get().getClass(s).getName());
}
@Test
void nestedClassNamespace() throws Exception {
Schema s = ProtobufData.get().getSchema(Foo.class);
assertEquals(org.apache.avro.protobuf.noopt.Test.class.getName(), s.getNamespace());
}
@Test
void classNamespaceInMultipleFiles() throws Exception {
Schema fooSchema = ProtobufData.get().getSchema(org.apache.avro.protobuf.multiplefiles.Foo.class);
assertEquals(org.apache.avro.protobuf.multiplefiles.Foo.class.getPackage().getName(), fooSchema.getNamespace());
Schema nSchema = ProtobufData.get().getSchema(org.apache.avro.protobuf.multiplefiles.M.N.class);
assertEquals(org.apache.avro.protobuf.multiplefiles.M.class.getName(), nSchema.getNamespace());
}
@Test
void getNonRepeatedSchemaWithLogicalType() throws Exception {
ProtoConversions.TimestampMillisConversion conversion = new ProtoConversions.TimestampMillisConversion();
// Don't convert to logical type if conversion isn't set
ProtobufData instance1 = new ProtobufData();
Schema s1 = instance1.getSchema(com.google.protobuf.Timestamp.class);
assertNotEquals(conversion.getRecommendedSchema(), s1);
// Convert to logical type if conversion is set
ProtobufData instance2 = new ProtobufData();
instance2.addLogicalTypeConversion(conversion);
Schema s2 = instance2.getSchema(com.google.protobuf.Timestamp.class);
assertEquals(conversion.getRecommendedSchema(), s2);
}
}
| 7,577 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/TestProtoConversions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import com.google.protobuf.Timestamp;
import java.util.Calendar;
import java.util.TimeZone;
import org.apache.avro.Conversion;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.protobuf.ProtoConversions.TimestampMicrosConversion;
import org.apache.avro.protobuf.ProtoConversions.TimestampMillisConversion;
import org.apache.avro.reflect.ReflectData;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestProtoConversions {
private static Schema TIMESTAMP_MILLIS_SCHEMA;
private static Schema TIMESTAMP_MICROS_SCHEMA;
private static Calendar Jan_2_1900_3_4_5_678 = Calendar.getInstance();
private static Calendar May_28_2015_21_46_53_221 = Calendar.getInstance();
static {
May_28_2015_21_46_53_221.setTimeZone(TimeZone.getTimeZone("UTC"));
May_28_2015_21_46_53_221.set(2015, Calendar.MAY, 28, 21, 46, 53);
May_28_2015_21_46_53_221.set(Calendar.MILLISECOND, 221);
Jan_2_1900_3_4_5_678.setTimeZone(TimeZone.getTimeZone("UTC"));
Jan_2_1900_3_4_5_678.set(1900, Calendar.JANUARY, 2, 3, 4, 5);
Jan_2_1900_3_4_5_678.set(Calendar.MILLISECOND, 678);
}
@BeforeAll
public static void createSchemas() {
TestProtoConversions.TIMESTAMP_MILLIS_SCHEMA = LogicalTypes.timestampMillis()
.addToSchema(Schema.create(Schema.Type.LONG));
TestProtoConversions.TIMESTAMP_MICROS_SCHEMA = LogicalTypes.timestampMicros()
.addToSchema(Schema.create(Schema.Type.LONG));
}
@Test
void timestampMillisConversion() throws Exception {
TimestampMillisConversion conversion = new TimestampMillisConversion();
Timestamp May_28_2015_21_46_53_221_ts = Timestamp.newBuilder().setSeconds(1432849613L).setNanos(221000000).build();
Timestamp Jan_2_1900_3_4_5_678_ts = Timestamp.newBuilder().setSeconds(-2208891355L).setNanos(678000000).build();
long instant = May_28_2015_21_46_53_221.getTimeInMillis();
Timestamp tsFromInstant = conversion.fromLong(instant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
long roundTrip = conversion.toLong(tsFromInstant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
assertEquals(instant, roundTrip, "Round-trip conversion should work");
assertEquals(May_28_2015_21_46_53_221_ts,
conversion.fromLong(instant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()),
"Known timestamp should be correct");
assertEquals(instant,
(long) conversion.toLong(May_28_2015_21_46_53_221_ts, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()),
"Known timestamp should be correct");
instant = Jan_2_1900_3_4_5_678.getTimeInMillis();
tsFromInstant = conversion.fromLong(instant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
roundTrip = conversion.toLong(tsFromInstant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
assertEquals(instant, roundTrip, "Round-trip conversion should work");
assertEquals(Jan_2_1900_3_4_5_678_ts,
conversion.fromLong(instant, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()),
"Known timestamp should be correct");
assertEquals(instant,
(long) conversion.toLong(Jan_2_1900_3_4_5_678_ts, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis()),
"Known timestamp should be correct");
}
@Test
void timestampMicrosConversion() {
TimestampMicrosConversion conversion = new TimestampMicrosConversion();
Timestamp May_28_2015_21_46_53_221_843_ts = Timestamp.newBuilder().setSeconds(1432849613L).setNanos(221843000)
.build();
Timestamp Jan_2_1900_3_4_5_678_901_ts = Timestamp.newBuilder().setSeconds(-2208891355L).setNanos(678901000).build();
long instant = May_28_2015_21_46_53_221.getTimeInMillis() * 1000 + 843;
Timestamp tsFromInstant = conversion.fromLong(instant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
long roundTrip = conversion.toLong(tsFromInstant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
assertEquals(instant, roundTrip, "Round-trip conversion should work");
assertEquals(May_28_2015_21_46_53_221_843_ts,
conversion.fromLong(instant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()),
"Known timestamp should be correct");
assertEquals(instant, (long) conversion.toLong(May_28_2015_21_46_53_221_843_ts, TIMESTAMP_MICROS_SCHEMA,
LogicalTypes.timestampMicros()), "Known timestamp should be correct");
instant = Jan_2_1900_3_4_5_678.getTimeInMillis() * 1000 + 901;
tsFromInstant = conversion.fromLong(instant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
roundTrip = conversion.toLong(tsFromInstant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
assertEquals(instant, roundTrip, "Round-trip conversion should work");
assertEquals(Jan_2_1900_3_4_5_678_901_ts,
conversion.fromLong(instant, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()),
"Known timestamp should be correct");
assertEquals(instant,
(long) conversion.toLong(Jan_2_1900_3_4_5_678_901_ts, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros()),
"Known timestamp should be correct");
}
@Test
void timestampMillisConversionSecondsLowerLimit() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
TimestampMillisConversion conversion = new TimestampMillisConversion();
long exceeded = (ProtoConversions.SECONDS_LOWERLIMIT - 1) * 1000;
conversion.fromLong(exceeded, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
});
}
@Test
void timestampMillisConversionSecondsUpperLimit() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
TimestampMillisConversion conversion = new TimestampMillisConversion();
long exceeded = (ProtoConversions.SECONDS_UPPERLIMIT + 1) * 1000;
conversion.fromLong(exceeded, TIMESTAMP_MILLIS_SCHEMA, LogicalTypes.timestampMillis());
});
}
@Test
void timestampMicrosConversionSecondsLowerLimit() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
TimestampMicrosConversion conversion = new TimestampMicrosConversion();
long exceeded = (ProtoConversions.SECONDS_LOWERLIMIT - 1) * 1000000;
conversion.fromLong(exceeded, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
});
}
@Test
void timestampMicrosConversionSecondsUpperLimit() throws Exception {
assertThrows(IllegalArgumentException.class, () -> {
TimestampMicrosConversion conversion = new TimestampMicrosConversion();
long exceeded = (ProtoConversions.SECONDS_UPPERLIMIT + 1) * 1000000;
conversion.fromLong(exceeded, TIMESTAMP_MICROS_SCHEMA, LogicalTypes.timestampMicros());
});
}
/*
* model.addLogicalTypeConversion(new ProtoConversions.TimeMicrosConversion());
* model.addLogicalTypeConversion(new
* ProtoConversions.TimestampMicrosConversion());
*/
@Test
void dynamicSchemaWithDateTimeConversion() throws ClassNotFoundException {
Schema schema = getReflectedSchemaByName("com.google.protobuf.Timestamp", new TimestampMillisConversion());
assertEquals(TIMESTAMP_MILLIS_SCHEMA, schema, "Reflected schema should be logicalType timestampMillis");
}
@Test
void dynamicSchemaWithDateTimeMicrosConversion() throws ClassNotFoundException {
Schema schema = getReflectedSchemaByName("com.google.protobuf.Timestamp", new TimestampMicrosConversion());
assertEquals(TIMESTAMP_MICROS_SCHEMA, schema, "Reflected schema should be logicalType timestampMicros");
}
private Schema getReflectedSchemaByName(String className, Conversion<?> conversion) throws ClassNotFoundException {
// one argument: a fully qualified class name
Class<?> cls = Class.forName(className);
// get the reflected schema for the given class
ReflectData model = new ReflectData();
model.addLogicalTypeConversion(conversion);
return model.getSchema(cls);
}
}
| 7,578 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/TestMultipleFiles.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
public final class TestMultipleFiles {
private TestMultipleFiles() {
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_org_apache_avro_protobuf_multiplefiles_Foo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_org_apache_avro_protobuf_multiplefiles_M_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = { "\n+src/test/protobuf/test_multiple_files."
+ "proto\022&org.apache.avro.protobuf.multiple" + "files\032\037google/protobuf/timestamp.proto\"\244"
+ "\004\n\003Foo\022\r\n\005int32\030\001 \002(\005\022\r\n\005int64\030\002 \001(\003\022\016\n\006"
+ "uint32\030\003 \001(\r\022\016\n\006uint64\030\004 \001(\004\022\016\n\006sint32\030\005"
+ " \001(\021\022\016\n\006sint64\030\006 \001(\022\022\017\n\007fixed32\030\007 \001(\007\022\017\n"
+ "\007fixed64\030\010 \001(\006\022\020\n\010sfixed32\030\t \001(\017\022\020\n\010sfix"
+ "ed64\030\n \001(\020\022\r\n\005float\030\013 \001(\002\022\016\n\006double\030\014 \001("
+ "\001\022\014\n\004bool\030\r \001(\010\022\016\n\006string\030\016 \001(\t\022\r\n\005bytes"
+ "\030\017 \001(\014\022:\n\004enum\030\020 \001(\0162).org.apache.avro.p"
+ "rotobuf.multiplefiles.A:\001Z\022\020\n\010intArray\030\021"
+ " \003(\005\022=\n\010fooArray\030\024 \003(\0132+.org.apache.avro"
+ ".protobuf.multiplefiles.Foo\0227\n\004syms\030\023 \003(" + "\0162).org.apache.avro.protobuf.multiplefil"
+ "es.A\0228\n\003foo\030\022 \001(\0132+.org.apache.avro.prot"
+ "obuf.multiplefiles.Foo\022-\n\ttimestamp\030\025 \001("
+ "\0132\032.google.protobuf.Timestamp\"\017\n\001M\"\n\n\001N\022"
+ "\005\n\001A\020\001*\030\n\001A\022\005\n\001X\020\001\022\005\n\001Y\020\002\022\005\n\001Z\020\003B\002P\001" };
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.protobuf.TimestampProto.getDescriptor(), },
assigner);
internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor = getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_avro_protobuf_multiplefiles_Foo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor,
new java.lang.String[] { "Int32", "Int64", "Uint32", "Uint64", "Sint32", "Sint64", "Fixed32", "Fixed64",
"Sfixed32", "Sfixed64", "Float", "Double", "Bool", "String", "Bytes", "Enum", "IntArray", "FooArray",
"Syms", "Foo", "Timestamp", });
internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor = getDescriptor().getMessageTypes().get(1);
internal_static_org_apache_avro_protobuf_multiplefiles_M_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor, new java.lang.String[] {});
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| 7,579 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/FooOrBuilder.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
public interface FooOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.apache.avro.protobuf.multiplefiles.Foo)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
boolean hasInt32();
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
int getInt32();
/**
* <code>optional int64 int64 = 2;</code>
*/
boolean hasInt64();
/**
* <code>optional int64 int64 = 2;</code>
*/
long getInt64();
/**
* <code>optional uint32 uint32 = 3;</code>
*/
boolean hasUint32();
/**
* <code>optional uint32 uint32 = 3;</code>
*/
int getUint32();
/**
* <code>optional uint64 uint64 = 4;</code>
*/
boolean hasUint64();
/**
* <code>optional uint64 uint64 = 4;</code>
*/
long getUint64();
/**
* <code>optional sint32 sint32 = 5;</code>
*/
boolean hasSint32();
/**
* <code>optional sint32 sint32 = 5;</code>
*/
int getSint32();
/**
* <code>optional sint64 sint64 = 6;</code>
*/
boolean hasSint64();
/**
* <code>optional sint64 sint64 = 6;</code>
*/
long getSint64();
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
boolean hasFixed32();
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
int getFixed32();
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
boolean hasFixed64();
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
long getFixed64();
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
boolean hasSfixed32();
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
int getSfixed32();
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
boolean hasSfixed64();
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
long getSfixed64();
/**
* <code>optional float float = 11;</code>
*/
boolean hasFloat();
/**
* <code>optional float float = 11;</code>
*/
float getFloat();
/**
* <code>optional double double = 12;</code>
*/
boolean hasDouble();
/**
* <code>optional double double = 12;</code>
*/
double getDouble();
/**
* <code>optional bool bool = 13;</code>
*/
boolean hasBool();
/**
* <code>optional bool bool = 13;</code>
*/
boolean getBool();
/**
* <code>optional string string = 14;</code>
*/
boolean hasString();
/**
* <code>optional string string = 14;</code>
*/
java.lang.String getString();
/**
* <code>optional string string = 14;</code>
*/
com.google.protobuf.ByteString getStringBytes();
/**
* <code>optional bytes bytes = 15;</code>
*/
boolean hasBytes();
/**
* <code>optional bytes bytes = 15;</code>
*/
com.google.protobuf.ByteString getBytes();
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
boolean hasEnum();
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
org.apache.avro.protobuf.multiplefiles.A getEnum();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
java.util.List<java.lang.Integer> getIntArrayList();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
int getIntArrayCount();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
int getIntArray(int index);
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> getFooArrayList();
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
org.apache.avro.protobuf.multiplefiles.Foo getFooArray(int index);
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
int getFooArrayCount();
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
java.util.List<? extends org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayOrBuilderList();
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooArrayOrBuilder(int index);
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
java.util.List<org.apache.avro.protobuf.multiplefiles.A> getSymsList();
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
int getSymsCount();
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
org.apache.avro.protobuf.multiplefiles.A getSyms(int index);
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
boolean hasFoo();
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
org.apache.avro.protobuf.multiplefiles.Foo getFoo();
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooOrBuilder();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
boolean hasTimestamp();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
com.google.protobuf.Timestamp getTimestamp();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder();
}
| 7,580 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/Foo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
/**
* Protobuf type {@code org.apache.avro.protobuf.multiplefiles.Foo}
*/
public final class Foo extends com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.multiplefiles.Foo)
FooOrBuilder {
private static final long serialVersionUID = 0L;
// Use Foo.newBuilder() to construct.
private Foo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Foo() {
int32_ = 0;
int64_ = 0L;
uint32_ = 0;
uint64_ = 0L;
sint32_ = 0;
sint64_ = 0L;
fixed32_ = 0;
fixed64_ = 0L;
sfixed32_ = 0;
sfixed64_ = 0L;
float_ = 0F;
double_ = 0D;
bool_ = false;
string_ = "";
bytes_ = com.google.protobuf.ByteString.EMPTY;
enum_ = 3;
intArray_ = java.util.Collections.emptyList();
fooArray_ = java.util.Collections.emptyList();
syms_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Foo(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
int32_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
int64_ = input.readInt64();
break;
}
case 24: {
bitField0_ |= 0x00000004;
uint32_ = input.readUInt32();
break;
}
case 32: {
bitField0_ |= 0x00000008;
uint64_ = input.readUInt64();
break;
}
case 40: {
bitField0_ |= 0x00000010;
sint32_ = input.readSInt32();
break;
}
case 48: {
bitField0_ |= 0x00000020;
sint64_ = input.readSInt64();
break;
}
case 61: {
bitField0_ |= 0x00000040;
fixed32_ = input.readFixed32();
break;
}
case 65: {
bitField0_ |= 0x00000080;
fixed64_ = input.readFixed64();
break;
}
case 77: {
bitField0_ |= 0x00000100;
sfixed32_ = input.readSFixed32();
break;
}
case 81: {
bitField0_ |= 0x00000200;
sfixed64_ = input.readSFixed64();
break;
}
case 93: {
bitField0_ |= 0x00000400;
float_ = input.readFloat();
break;
}
case 97: {
bitField0_ |= 0x00000800;
double_ = input.readDouble();
break;
}
case 104: {
bitField0_ |= 0x00001000;
bool_ = input.readBool();
break;
}
case 114: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00002000;
string_ = bs;
break;
}
case 122: {
bitField0_ |= 0x00004000;
bytes_ = input.readBytes();
break;
}
case 128: {
int rawValue = input.readEnum();
org.apache.avro.protobuf.multiplefiles.A value = org.apache.avro.protobuf.multiplefiles.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(16, rawValue);
} else {
bitField0_ |= 0x00008000;
enum_ = rawValue;
}
break;
}
case 136: {
if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00010000;
}
intArray_.add(input.readInt32());
break;
}
case 138: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00010000) == 0x00010000) && input.getBytesUntilLimit() > 0) {
intArray_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00010000;
}
while (input.getBytesUntilLimit() > 0) {
intArray_.add(input.readInt32());
}
input.popLimit(limit);
break;
}
case 146: {
org.apache.avro.protobuf.multiplefiles.Foo.Builder subBuilder = null;
if (((bitField0_ & 0x00010000) == 0x00010000)) {
subBuilder = foo_.toBuilder();
}
foo_ = input.readMessage(org.apache.avro.protobuf.multiplefiles.Foo.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(foo_);
foo_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00010000;
break;
}
case 152: {
int rawValue = input.readEnum();
org.apache.avro.protobuf.multiplefiles.A value = org.apache.avro.protobuf.multiplefiles.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(19, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00040000;
}
syms_.add(rawValue);
}
break;
}
case 154: {
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
int rawValue = input.readEnum();
org.apache.avro.protobuf.multiplefiles.A value = org.apache.avro.protobuf.multiplefiles.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(19, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00040000;
}
syms_.add(rawValue);
}
}
input.popLimit(oldLimit);
break;
}
case 162: {
if (!((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.multiplefiles.Foo>();
mutable_bitField0_ |= 0x00020000;
}
fooArray_.add(input.readMessage(org.apache.avro.protobuf.multiplefiles.Foo.PARSER, extensionRegistry));
break;
}
case 170: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (((bitField0_ & 0x00020000) == 0x00020000)) {
subBuilder = timestamp_.toBuilder();
}
timestamp_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timestamp_);
timestamp_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00020000;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = java.util.Collections.unmodifiableList(intArray_);
}
if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = java.util.Collections.unmodifiableList(syms_);
}
if (((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.multiplefiles.Foo.class,
org.apache.avro.protobuf.multiplefiles.Foo.Builder.class);
}
private int bitField0_;
public static final int INT32_FIELD_NUMBER = 1;
private int int32_;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public int getInt32() {
return int32_;
}
public static final int INT64_FIELD_NUMBER = 2;
private long int64_;
/**
* <code>optional int64 int64 = 2;</code>
*/
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public long getInt64() {
return int64_;
}
public static final int UINT32_FIELD_NUMBER = 3;
private int uint32_;
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public int getUint32() {
return uint32_;
}
public static final int UINT64_FIELD_NUMBER = 4;
private long uint64_;
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public long getUint64() {
return uint64_;
}
public static final int SINT32_FIELD_NUMBER = 5;
private int sint32_;
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public int getSint32() {
return sint32_;
}
public static final int SINT64_FIELD_NUMBER = 6;
private long sint64_;
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public long getSint64() {
return sint64_;
}
public static final int FIXED32_FIELD_NUMBER = 7;
private int fixed32_;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public int getFixed32() {
return fixed32_;
}
public static final int FIXED64_FIELD_NUMBER = 8;
private long fixed64_;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public long getFixed64() {
return fixed64_;
}
public static final int SFIXED32_FIELD_NUMBER = 9;
private int sfixed32_;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public int getSfixed32() {
return sfixed32_;
}
public static final int SFIXED64_FIELD_NUMBER = 10;
private long sfixed64_;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public long getSfixed64() {
return sfixed64_;
}
public static final int FLOAT_FIELD_NUMBER = 11;
private float float_;
/**
* <code>optional float float = 11;</code>
*/
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional float float = 11;</code>
*/
public float getFloat() {
return float_;
}
public static final int DOUBLE_FIELD_NUMBER = 12;
private double double_;
/**
* <code>optional double double = 12;</code>
*/
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional double double = 12;</code>
*/
public double getDouble() {
return double_;
}
public static final int BOOL_FIELD_NUMBER = 13;
private boolean bool_;
/**
* <code>optional bool bool = 13;</code>
*/
public boolean hasBool() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool bool = 13;</code>
*/
public boolean getBool() {
return bool_;
}
public static final int STRING_FIELD_NUMBER = 14;
private volatile java.lang.Object string_;
/**
* <code>optional string string = 14;</code>
*/
public boolean hasString() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional string string = 14;</code>
*/
public java.lang.String getString() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
}
}
/**
* <code>optional string string = 14;</code>
*/
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BYTES_FIELD_NUMBER = 15;
private com.google.protobuf.ByteString bytes_;
/**
* <code>optional bytes bytes = 15;</code>
*/
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
public static final int ENUM_FIELD_NUMBER = 16;
private int enum_;
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public boolean hasEnum() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public org.apache.avro.protobuf.multiplefiles.A getEnum() {
org.apache.avro.protobuf.multiplefiles.A result = org.apache.avro.protobuf.multiplefiles.A.valueOf(enum_);
return result == null ? org.apache.avro.protobuf.multiplefiles.A.Z : result;
}
public static final int INTARRAY_FIELD_NUMBER = 17;
private java.util.List<java.lang.Integer> intArray_;
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public java.util.List<java.lang.Integer> getIntArrayList() {
return intArray_;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArrayCount() {
return intArray_.size();
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArray(int index) {
return intArray_.get(index);
}
public static final int FOOARRAY_FIELD_NUMBER = 20;
private java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> fooArray_;
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> getFooArrayList() {
return fooArray_;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<? extends org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayOrBuilderList() {
return fooArray_;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public int getFooArrayCount() {
return fooArray_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFooArray(int index) {
return fooArray_.get(index);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooArrayOrBuilder(int index) {
return fooArray_.get(index);
}
public static final int SYMS_FIELD_NUMBER = 19;
private java.util.List<java.lang.Integer> syms_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<java.lang.Integer, org.apache.avro.protobuf.multiplefiles.A> syms_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter<java.lang.Integer, org.apache.avro.protobuf.multiplefiles.A>() {
public org.apache.avro.protobuf.multiplefiles.A convert(java.lang.Integer from) {
org.apache.avro.protobuf.multiplefiles.A result = org.apache.avro.protobuf.multiplefiles.A.valueOf(from);
return result == null ? org.apache.avro.protobuf.multiplefiles.A.X : result;
}
};
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.A> getSymsList() {
return new com.google.protobuf.Internal.ListAdapter<java.lang.Integer, org.apache.avro.protobuf.multiplefiles.A>(
syms_, syms_converter_);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public int getSymsCount() {
return syms_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public org.apache.avro.protobuf.multiplefiles.A getSyms(int index) {
return syms_converter_.convert(syms_.get(index));
}
public static final int FOO_FIELD_NUMBER = 18;
private org.apache.avro.protobuf.multiplefiles.Foo foo_;
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public boolean hasFoo() {
return ((bitField0_ & 0x00010000) == 0x00010000);
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFoo() {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooOrBuilder() {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
}
public static final int TIMESTAMP_FIELD_NUMBER = 21;
private com.google.protobuf.Timestamp timestamp_;
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00020000) == 0x00020000);
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp getTimestamp() {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1)
return true;
if (isInitialized == 0)
return false;
if (!hasInt32()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getFooArrayCount(); i++) {
if (!getFooArray(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasFoo()) {
if (!getFoo().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, int32_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, int64_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt32(3, uint32_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt64(4, uint64_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeSInt32(5, sint32_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeSInt64(6, sint64_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeFixed32(7, fixed32_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeFixed64(8, fixed64_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeSFixed32(9, sfixed32_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeSFixed64(10, sfixed64_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeFloat(11, float_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
output.writeDouble(12, double_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
output.writeBool(13, bool_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 14, string_);
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
output.writeBytes(15, bytes_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
output.writeEnum(16, enum_);
}
for (int i = 0; i < intArray_.size(); i++) {
output.writeInt32(17, intArray_.get(i));
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
output.writeMessage(18, getFoo());
}
for (int i = 0; i < syms_.size(); i++) {
output.writeEnum(19, syms_.get(i));
}
for (int i = 0; i < fooArray_.size(); i++) {
output.writeMessage(20, fooArray_.get(i));
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
output.writeMessage(21, getTimestamp());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1)
return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, int32_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, int64_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream.computeUInt32Size(3, uint32_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream.computeUInt64Size(4, uint64_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream.computeSInt32Size(5, sint32_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream.computeSInt64Size(6, sint64_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream.computeFixed32Size(7, fixed32_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream.computeFixed64Size(8, fixed64_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream.computeSFixed32Size(9, sfixed32_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream.computeSFixed64Size(10, sfixed64_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(11, float_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(12, double_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(13, bool_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(14, string_);
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(15, bytes_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(16, enum_);
}
{
int dataSize = 0;
for (int i = 0; i < intArray_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(intArray_.get(i));
}
size += dataSize;
size += 2 * getIntArrayList().size();
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getFoo());
}
{
int dataSize = 0;
for (int i = 0; i < syms_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(syms_.get(i));
}
size += dataSize;
size += 2 * syms_.size();
}
for (int i = 0; i < fooArray_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(20, fooArray_.get(i));
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(21, getTimestamp());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.avro.protobuf.multiplefiles.Foo)) {
return super.equals(obj);
}
org.apache.avro.protobuf.multiplefiles.Foo other = (org.apache.avro.protobuf.multiplefiles.Foo) obj;
boolean result = true;
result = result && (hasInt32() == other.hasInt32());
if (hasInt32()) {
result = result && (getInt32() == other.getInt32());
}
result = result && (hasInt64() == other.hasInt64());
if (hasInt64()) {
result = result && (getInt64() == other.getInt64());
}
result = result && (hasUint32() == other.hasUint32());
if (hasUint32()) {
result = result && (getUint32() == other.getUint32());
}
result = result && (hasUint64() == other.hasUint64());
if (hasUint64()) {
result = result && (getUint64() == other.getUint64());
}
result = result && (hasSint32() == other.hasSint32());
if (hasSint32()) {
result = result && (getSint32() == other.getSint32());
}
result = result && (hasSint64() == other.hasSint64());
if (hasSint64()) {
result = result && (getSint64() == other.getSint64());
}
result = result && (hasFixed32() == other.hasFixed32());
if (hasFixed32()) {
result = result && (getFixed32() == other.getFixed32());
}
result = result && (hasFixed64() == other.hasFixed64());
if (hasFixed64()) {
result = result && (getFixed64() == other.getFixed64());
}
result = result && (hasSfixed32() == other.hasSfixed32());
if (hasSfixed32()) {
result = result && (getSfixed32() == other.getSfixed32());
}
result = result && (hasSfixed64() == other.hasSfixed64());
if (hasSfixed64()) {
result = result && (getSfixed64() == other.getSfixed64());
}
result = result && (hasFloat() == other.hasFloat());
if (hasFloat()) {
result = result
&& (java.lang.Float.floatToIntBits(getFloat()) == java.lang.Float.floatToIntBits(other.getFloat()));
}
result = result && (hasDouble() == other.hasDouble());
if (hasDouble()) {
result = result
&& (java.lang.Double.doubleToLongBits(getDouble()) == java.lang.Double.doubleToLongBits(other.getDouble()));
}
result = result && (hasBool() == other.hasBool());
if (hasBool()) {
result = result && (getBool() == other.getBool());
}
result = result && (hasString() == other.hasString());
if (hasString()) {
result = result && getString().equals(other.getString());
}
result = result && (hasBytes() == other.hasBytes());
if (hasBytes()) {
result = result && getBytes().equals(other.getBytes());
}
result = result && (hasEnum() == other.hasEnum());
if (hasEnum()) {
result = result && enum_ == other.enum_;
}
result = result && getIntArrayList().equals(other.getIntArrayList());
result = result && getFooArrayList().equals(other.getFooArrayList());
result = result && syms_.equals(other.syms_);
result = result && (hasFoo() == other.hasFoo());
if (hasFoo()) {
result = result && getFoo().equals(other.getFoo());
}
result = result && (hasTimestamp() == other.hasTimestamp());
if (hasTimestamp()) {
result = result && getTimestamp().equals(other.getTimestamp());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInt32()) {
hash = (37 * hash) + INT32_FIELD_NUMBER;
hash = (53 * hash) + getInt32();
}
if (hasInt64()) {
hash = (37 * hash) + INT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getInt64());
}
if (hasUint32()) {
hash = (37 * hash) + UINT32_FIELD_NUMBER;
hash = (53 * hash) + getUint32();
}
if (hasUint64()) {
hash = (37 * hash) + UINT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getUint64());
}
if (hasSint32()) {
hash = (37 * hash) + SINT32_FIELD_NUMBER;
hash = (53 * hash) + getSint32();
}
if (hasSint64()) {
hash = (37 * hash) + SINT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSint64());
}
if (hasFixed32()) {
hash = (37 * hash) + FIXED32_FIELD_NUMBER;
hash = (53 * hash) + getFixed32();
}
if (hasFixed64()) {
hash = (37 * hash) + FIXED64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFixed64());
}
if (hasSfixed32()) {
hash = (37 * hash) + SFIXED32_FIELD_NUMBER;
hash = (53 * hash) + getSfixed32();
}
if (hasSfixed64()) {
hash = (37 * hash) + SFIXED64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSfixed64());
}
if (hasFloat()) {
hash = (37 * hash) + FLOAT_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getFloat());
}
if (hasDouble()) {
hash = (37 * hash) + DOUBLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(java.lang.Double.doubleToLongBits(getDouble()));
}
if (hasBool()) {
hash = (37 * hash) + BOOL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getBool());
}
if (hasString()) {
hash = (37 * hash) + STRING_FIELD_NUMBER;
hash = (53 * hash) + getString().hashCode();
}
if (hasBytes()) {
hash = (37 * hash) + BYTES_FIELD_NUMBER;
hash = (53 * hash) + getBytes().hashCode();
}
if (hasEnum()) {
hash = (37 * hash) + ENUM_FIELD_NUMBER;
hash = (53 * hash) + enum_;
}
if (getIntArrayCount() > 0) {
hash = (37 * hash) + INTARRAY_FIELD_NUMBER;
hash = (53 * hash) + getIntArrayList().hashCode();
}
if (getFooArrayCount() > 0) {
hash = (37 * hash) + FOOARRAY_FIELD_NUMBER;
hash = (53 * hash) + getFooArrayList().hashCode();
}
if (getSymsCount() > 0) {
hash = (37 * hash) + SYMS_FIELD_NUMBER;
hash = (53 * hash) + syms_.hashCode();
}
if (hasFoo()) {
hash = (37 * hash) + FOO_FIELD_NUMBER;
hash = (53 * hash) + getFoo().hashCode();
}
if (hasTimestamp()) {
hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + getTimestamp().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseDelimitedFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.Foo parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.avro.protobuf.multiplefiles.Foo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.avro.protobuf.multiplefiles.Foo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:org.apache.avro.protobuf.multiplefiles.Foo)
org.apache.avro.protobuf.multiplefiles.FooOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.multiplefiles.Foo.class,
org.apache.avro.protobuf.multiplefiles.Foo.Builder.class);
}
// Construct using org.apache.avro.protobuf.multiplefiles.Foo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFooArrayFieldBuilder();
getFooFieldBuilder();
getTimestampFieldBuilder();
}
}
public Builder clear() {
super.clear();
int32_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
int64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
uint32_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
uint64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
sint32_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
sint64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000020);
fixed32_ = 0;
bitField0_ = (bitField0_ & ~0x00000040);
fixed64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000080);
sfixed32_ = 0;
bitField0_ = (bitField0_ & ~0x00000100);
sfixed64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000200);
float_ = 0F;
bitField0_ = (bitField0_ & ~0x00000400);
double_ = 0D;
bitField0_ = (bitField0_ & ~0x00000800);
bool_ = false;
bitField0_ = (bitField0_ & ~0x00001000);
string_ = "";
bitField0_ = (bitField0_ & ~0x00002000);
bytes_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00004000);
enum_ = 3;
bitField0_ = (bitField0_ & ~0x00008000);
intArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00010000);
if (fooArrayBuilder_ == null) {
fooArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00020000);
} else {
fooArrayBuilder_.clear();
}
syms_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
if (fooBuilder_ == null) {
foo_ = null;
} else {
fooBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00080000);
if (timestampBuilder_ == null) {
timestamp_ = null;
} else {
timestampBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00100000);
return this;
}
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_Foo_descriptor;
}
public org.apache.avro.protobuf.multiplefiles.Foo getDefaultInstanceForType() {
return org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance();
}
public org.apache.avro.protobuf.multiplefiles.Foo build() {
org.apache.avro.protobuf.multiplefiles.Foo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.avro.protobuf.multiplefiles.Foo buildPartial() {
org.apache.avro.protobuf.multiplefiles.Foo result = new org.apache.avro.protobuf.multiplefiles.Foo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.int32_ = int32_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.int64_ = int64_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.uint32_ = uint32_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.uint64_ = uint64_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.sint32_ = sint32_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.sint64_ = sint64_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.fixed32_ = fixed32_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.fixed64_ = fixed64_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
result.sfixed32_ = sfixed32_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
result.sfixed64_ = sfixed64_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.float_ = float_;
if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000800;
}
result.double_ = double_;
if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00001000;
}
result.bool_ = bool_;
if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
to_bitField0_ |= 0x00002000;
}
result.string_ = string_;
if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
to_bitField0_ |= 0x00004000;
}
result.bytes_ = bytes_;
if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
to_bitField0_ |= 0x00008000;
}
result.enum_ = enum_;
if (((bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = java.util.Collections.unmodifiableList(intArray_);
bitField0_ = (bitField0_ & ~0x00010000);
}
result.intArray_ = intArray_;
if (fooArrayBuilder_ == null) {
if (((bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
bitField0_ = (bitField0_ & ~0x00020000);
}
result.fooArray_ = fooArray_;
} else {
result.fooArray_ = fooArrayBuilder_.build();
}
if (((bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = java.util.Collections.unmodifiableList(syms_);
bitField0_ = (bitField0_ & ~0x00040000);
}
result.syms_ = syms_;
if (((from_bitField0_ & 0x00080000) == 0x00080000)) {
to_bitField0_ |= 0x00010000;
}
if (fooBuilder_ == null) {
result.foo_ = foo_;
} else {
result.foo_ = fooBuilder_.build();
}
if (((from_bitField0_ & 0x00100000) == 0x00100000)) {
to_bitField0_ |= 0x00020000;
}
if (timestampBuilder_ == null) {
result.timestamp_ = timestamp_;
} else {
result.timestamp_ = timestampBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index,
java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.avro.protobuf.multiplefiles.Foo) {
return mergeFrom((org.apache.avro.protobuf.multiplefiles.Foo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.avro.protobuf.multiplefiles.Foo other) {
if (other == org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance())
return this;
if (other.hasInt32()) {
setInt32(other.getInt32());
}
if (other.hasInt64()) {
setInt64(other.getInt64());
}
if (other.hasUint32()) {
setUint32(other.getUint32());
}
if (other.hasUint64()) {
setUint64(other.getUint64());
}
if (other.hasSint32()) {
setSint32(other.getSint32());
}
if (other.hasSint64()) {
setSint64(other.getSint64());
}
if (other.hasFixed32()) {
setFixed32(other.getFixed32());
}
if (other.hasFixed64()) {
setFixed64(other.getFixed64());
}
if (other.hasSfixed32()) {
setSfixed32(other.getSfixed32());
}
if (other.hasSfixed64()) {
setSfixed64(other.getSfixed64());
}
if (other.hasFloat()) {
setFloat(other.getFloat());
}
if (other.hasDouble()) {
setDouble(other.getDouble());
}
if (other.hasBool()) {
setBool(other.getBool());
}
if (other.hasString()) {
bitField0_ |= 0x00002000;
string_ = other.string_;
onChanged();
}
if (other.hasBytes()) {
setBytes(other.getBytes());
}
if (other.hasEnum()) {
setEnum(other.getEnum());
}
if (!other.intArray_.isEmpty()) {
if (intArray_.isEmpty()) {
intArray_ = other.intArray_;
bitField0_ = (bitField0_ & ~0x00010000);
} else {
ensureIntArrayIsMutable();
intArray_.addAll(other.intArray_);
}
onChanged();
}
if (fooArrayBuilder_ == null) {
if (!other.fooArray_.isEmpty()) {
if (fooArray_.isEmpty()) {
fooArray_ = other.fooArray_;
bitField0_ = (bitField0_ & ~0x00020000);
} else {
ensureFooArrayIsMutable();
fooArray_.addAll(other.fooArray_);
}
onChanged();
}
} else {
if (!other.fooArray_.isEmpty()) {
if (fooArrayBuilder_.isEmpty()) {
fooArrayBuilder_.dispose();
fooArrayBuilder_ = null;
fooArray_ = other.fooArray_;
bitField0_ = (bitField0_ & ~0x00020000);
fooArrayBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFooArrayFieldBuilder()
: null;
} else {
fooArrayBuilder_.addAllMessages(other.fooArray_);
}
}
}
if (!other.syms_.isEmpty()) {
if (syms_.isEmpty()) {
syms_ = other.syms_;
bitField0_ = (bitField0_ & ~0x00040000);
} else {
ensureSymsIsMutable();
syms_.addAll(other.syms_);
}
onChanged();
}
if (other.hasFoo()) {
mergeFoo(other.getFoo());
}
if (other.hasTimestamp()) {
mergeTimestamp(other.getTimestamp());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasInt32()) {
return false;
}
for (int i = 0; i < getFooArrayCount(); i++) {
if (!getFooArray(i).isInitialized()) {
return false;
}
}
if (hasFoo()) {
if (!getFoo().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
org.apache.avro.protobuf.multiplefiles.Foo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.avro.protobuf.multiplefiles.Foo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int int32_;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public int getInt32() {
return int32_;
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public Builder setInt32(int value) {
bitField0_ |= 0x00000001;
int32_ = value;
onChanged();
return this;
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public Builder clearInt32() {
bitField0_ = (bitField0_ & ~0x00000001);
int32_ = 0;
onChanged();
return this;
}
private long int64_;
/**
* <code>optional int64 int64 = 2;</code>
*/
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public long getInt64() {
return int64_;
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public Builder setInt64(long value) {
bitField0_ |= 0x00000002;
int64_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public Builder clearInt64() {
bitField0_ = (bitField0_ & ~0x00000002);
int64_ = 0L;
onChanged();
return this;
}
private int uint32_;
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public int getUint32() {
return uint32_;
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public Builder setUint32(int value) {
bitField0_ |= 0x00000004;
uint32_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public Builder clearUint32() {
bitField0_ = (bitField0_ & ~0x00000004);
uint32_ = 0;
onChanged();
return this;
}
private long uint64_;
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public long getUint64() {
return uint64_;
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public Builder setUint64(long value) {
bitField0_ |= 0x00000008;
uint64_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public Builder clearUint64() {
bitField0_ = (bitField0_ & ~0x00000008);
uint64_ = 0L;
onChanged();
return this;
}
private int sint32_;
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public int getSint32() {
return sint32_;
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public Builder setSint32(int value) {
bitField0_ |= 0x00000010;
sint32_ = value;
onChanged();
return this;
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public Builder clearSint32() {
bitField0_ = (bitField0_ & ~0x00000010);
sint32_ = 0;
onChanged();
return this;
}
private long sint64_;
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public long getSint64() {
return sint64_;
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public Builder setSint64(long value) {
bitField0_ |= 0x00000020;
sint64_ = value;
onChanged();
return this;
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public Builder clearSint64() {
bitField0_ = (bitField0_ & ~0x00000020);
sint64_ = 0L;
onChanged();
return this;
}
private int fixed32_;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public int getFixed32() {
return fixed32_;
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public Builder setFixed32(int value) {
bitField0_ |= 0x00000040;
fixed32_ = value;
onChanged();
return this;
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public Builder clearFixed32() {
bitField0_ = (bitField0_ & ~0x00000040);
fixed32_ = 0;
onChanged();
return this;
}
private long fixed64_;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public long getFixed64() {
return fixed64_;
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public Builder setFixed64(long value) {
bitField0_ |= 0x00000080;
fixed64_ = value;
onChanged();
return this;
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public Builder clearFixed64() {
bitField0_ = (bitField0_ & ~0x00000080);
fixed64_ = 0L;
onChanged();
return this;
}
private int sfixed32_;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public int getSfixed32() {
return sfixed32_;
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public Builder setSfixed32(int value) {
bitField0_ |= 0x00000100;
sfixed32_ = value;
onChanged();
return this;
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public Builder clearSfixed32() {
bitField0_ = (bitField0_ & ~0x00000100);
sfixed32_ = 0;
onChanged();
return this;
}
private long sfixed64_;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public long getSfixed64() {
return sfixed64_;
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public Builder setSfixed64(long value) {
bitField0_ |= 0x00000200;
sfixed64_ = value;
onChanged();
return this;
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public Builder clearSfixed64() {
bitField0_ = (bitField0_ & ~0x00000200);
sfixed64_ = 0L;
onChanged();
return this;
}
private float float_;
/**
* <code>optional float float = 11;</code>
*/
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional float float = 11;</code>
*/
public float getFloat() {
return float_;
}
/**
* <code>optional float float = 11;</code>
*/
public Builder setFloat(float value) {
bitField0_ |= 0x00000400;
float_ = value;
onChanged();
return this;
}
/**
* <code>optional float float = 11;</code>
*/
public Builder clearFloat() {
bitField0_ = (bitField0_ & ~0x00000400);
float_ = 0F;
onChanged();
return this;
}
private double double_;
/**
* <code>optional double double = 12;</code>
*/
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional double double = 12;</code>
*/
public double getDouble() {
return double_;
}
/**
* <code>optional double double = 12;</code>
*/
public Builder setDouble(double value) {
bitField0_ |= 0x00000800;
double_ = value;
onChanged();
return this;
}
/**
* <code>optional double double = 12;</code>
*/
public Builder clearDouble() {
bitField0_ = (bitField0_ & ~0x00000800);
double_ = 0D;
onChanged();
return this;
}
private boolean bool_;
/**
* <code>optional bool bool = 13;</code>
*/
public boolean hasBool() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool bool = 13;</code>
*/
public boolean getBool() {
return bool_;
}
/**
* <code>optional bool bool = 13;</code>
*/
public Builder setBool(boolean value) {
bitField0_ |= 0x00001000;
bool_ = value;
onChanged();
return this;
}
/**
* <code>optional bool bool = 13;</code>
*/
public Builder clearBool() {
bitField0_ = (bitField0_ & ~0x00001000);
bool_ = false;
onChanged();
return this;
}
private java.lang.Object string_ = "";
/**
* <code>optional string string = 14;</code>
*/
public boolean hasString() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional string string = 14;</code>
*/
public java.lang.String getString() {
java.lang.Object ref = string_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string string = 14;</code>
*/
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string string = 14;</code>
*/
public Builder setString(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00002000;
string_ = value;
onChanged();
return this;
}
/**
* <code>optional string string = 14;</code>
*/
public Builder clearString() {
bitField0_ = (bitField0_ & ~0x00002000);
string_ = getDefaultInstance().getString();
onChanged();
return this;
}
/**
* <code>optional string string = 14;</code>
*/
public Builder setStringBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00002000;
string_ = value;
onChanged();
return this;
}
private com.google.protobuf.ByteString bytes_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytes = 15;</code>
*/
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public Builder setBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00004000;
bytes_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public Builder clearBytes() {
bitField0_ = (bitField0_ & ~0x00004000);
bytes_ = getDefaultInstance().getBytes();
onChanged();
return this;
}
private int enum_ = 3;
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public boolean hasEnum() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public org.apache.avro.protobuf.multiplefiles.A getEnum() {
org.apache.avro.protobuf.multiplefiles.A result = org.apache.avro.protobuf.multiplefiles.A.valueOf(enum_);
return result == null ? org.apache.avro.protobuf.multiplefiles.A.Z : result;
}
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public Builder setEnum(org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00008000;
enum_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .org.apache.avro.protobuf.multiplefiles.A enum = 16 [default = Z];</code>
*/
public Builder clearEnum() {
bitField0_ = (bitField0_ & ~0x00008000);
enum_ = 3;
onChanged();
return this;
}
private java.util.List<java.lang.Integer> intArray_ = java.util.Collections.emptyList();
private void ensureIntArrayIsMutable() {
if (!((bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = new java.util.ArrayList<java.lang.Integer>(intArray_);
bitField0_ |= 0x00010000;
}
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public java.util.List<java.lang.Integer> getIntArrayList() {
return java.util.Collections.unmodifiableList(intArray_);
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArrayCount() {
return intArray_.size();
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArray(int index) {
return intArray_.get(index);
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder setIntArray(int index, int value) {
ensureIntArrayIsMutable();
intArray_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder addIntArray(int value) {
ensureIntArrayIsMutable();
intArray_.add(value);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder addAllIntArray(java.lang.Iterable<? extends java.lang.Integer> values) {
ensureIntArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intArray_);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder clearIntArray() {
intArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00010000);
onChanged();
return this;
}
private java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> fooArray_ = java.util.Collections.emptyList();
private void ensureFooArrayIsMutable() {
if (!((bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.multiplefiles.Foo>(fooArray_);
bitField0_ |= 0x00020000;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> fooArrayBuilder_;
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.Foo> getFooArrayList() {
if (fooArrayBuilder_ == null) {
return java.util.Collections.unmodifiableList(fooArray_);
} else {
return fooArrayBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public int getFooArrayCount() {
if (fooArrayBuilder_ == null) {
return fooArray_.size();
} else {
return fooArrayBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFooArray(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.set(index, value);
onChanged();
} else {
fooArrayBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.set(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(value);
onChanged();
} else {
fooArrayBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(index, value);
onChanged();
} else {
fooArrayBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder addAllFooArray(java.lang.Iterable<? extends org.apache.avro.protobuf.multiplefiles.Foo> values) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fooArray_);
onChanged();
} else {
fooArrayBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder clearFooArray() {
if (fooArrayBuilder_ == null) {
fooArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00020000);
onChanged();
} else {
fooArrayBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public Builder removeFooArray(int index) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.remove(index);
onChanged();
} else {
fooArrayBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder getFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooArrayOrBuilder(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<? extends org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayOrBuilderList() {
if (fooArrayBuilder_ != null) {
return fooArrayBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fooArray_);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder addFooArrayBuilder() {
return getFooArrayFieldBuilder().addBuilder(org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder addFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().addBuilder(index,
org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.Foo.Builder> getFooArrayBuilderList() {
return getFooArrayFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooArrayFieldBuilder() {
if (fooArrayBuilder_ == null) {
fooArrayBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder>(
fooArray_, ((bitField0_ & 0x00020000) == 0x00020000), getParentForChildren(), isClean());
fooArray_ = null;
}
return fooArrayBuilder_;
}
private java.util.List<java.lang.Integer> syms_ = java.util.Collections.emptyList();
private void ensureSymsIsMutable() {
if (!((bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>(syms_);
bitField0_ |= 0x00040000;
}
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public java.util.List<org.apache.avro.protobuf.multiplefiles.A> getSymsList() {
return new com.google.protobuf.Internal.ListAdapter<java.lang.Integer, org.apache.avro.protobuf.multiplefiles.A>(
syms_, syms_converter_);
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public int getSymsCount() {
return syms_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public org.apache.avro.protobuf.multiplefiles.A getSyms(int index) {
return syms_converter_.convert(syms_.get(index));
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public Builder setSyms(int index, org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public Builder addSyms(org.apache.avro.protobuf.multiplefiles.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.add(value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public Builder addAllSyms(java.lang.Iterable<? extends org.apache.avro.protobuf.multiplefiles.A> values) {
ensureSymsIsMutable();
for (org.apache.avro.protobuf.multiplefiles.A value : values) {
syms_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.multiplefiles.A syms = 19;</code>
*/
public Builder clearSyms() {
syms_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
onChanged();
return this;
}
private org.apache.avro.protobuf.multiplefiles.Foo foo_ = null;
private com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> fooBuilder_;
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public boolean hasFoo() {
return ((bitField0_ & 0x00080000) == 0x00080000);
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo getFoo() {
if (fooBuilder_ == null) {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
} else {
return fooBuilder_.getMessage();
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
foo_ = value;
onChanged();
} else {
fooBuilder_.setMessage(value);
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.multiplefiles.Foo.Builder builderForValue) {
if (fooBuilder_ == null) {
foo_ = builderForValue.build();
onChanged();
} else {
fooBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder mergeFoo(org.apache.avro.protobuf.multiplefiles.Foo value) {
if (fooBuilder_ == null) {
if (((bitField0_ & 0x00080000) == 0x00080000) && foo_ != null
&& foo_ != org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance()) {
foo_ = org.apache.avro.protobuf.multiplefiles.Foo.newBuilder(foo_).mergeFrom(value).buildPartial();
} else {
foo_ = value;
}
onChanged();
} else {
fooBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public Builder clearFoo() {
if (fooBuilder_ == null) {
foo_ = null;
onChanged();
} else {
fooBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00080000);
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.Foo.Builder getFooBuilder() {
bitField0_ |= 0x00080000;
onChanged();
return getFooFieldBuilder().getBuilder();
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.multiplefiles.FooOrBuilder getFooOrBuilder() {
if (fooBuilder_ != null) {
return fooBuilder_.getMessageOrBuilder();
} else {
return foo_ == null ? org.apache.avro.protobuf.multiplefiles.Foo.getDefaultInstance() : foo_;
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.multiplefiles.Foo foo = 18;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder> getFooFieldBuilder() {
if (fooBuilder_ == null) {
fooBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.multiplefiles.Foo, org.apache.avro.protobuf.multiplefiles.Foo.Builder, org.apache.avro.protobuf.multiplefiles.FooOrBuilder>(
getFoo(), getParentForChildren(), isClean());
foo_ = null;
}
return fooBuilder_;
}
private com.google.protobuf.Timestamp timestamp_ = null;
private com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timestampBuilder_;
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00100000) == 0x00100000);
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp getTimestamp() {
if (timestampBuilder_ == null) {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
} else {
return timestampBuilder_.getMessage();
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timestamp_ = value;
onChanged();
} else {
timestampBuilder_.setMessage(value);
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp.Builder builderForValue) {
if (timestampBuilder_ == null) {
timestamp_ = builderForValue.build();
onChanged();
} else {
timestampBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder mergeTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (((bitField0_ & 0x00100000) == 0x00100000) && timestamp_ != null
&& timestamp_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
timestamp_ = com.google.protobuf.Timestamp.newBuilder(timestamp_).mergeFrom(value).buildPartial();
} else {
timestamp_ = value;
}
onChanged();
} else {
timestampBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder clearTimestamp() {
if (timestampBuilder_ == null) {
timestamp_ = null;
onChanged();
} else {
timestampBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00100000);
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp.Builder getTimestampBuilder() {
bitField0_ |= 0x00100000;
onChanged();
return getTimestampFieldBuilder().getBuilder();
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() {
if (timestampBuilder_ != null) {
return timestampBuilder_.getMessageOrBuilder();
} else {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getTimestampFieldBuilder() {
if (timestampBuilder_ == null) {
timestampBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getTimestamp(), getParentForChildren(), isClean());
timestamp_ = null;
}
return timestampBuilder_;
}
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.multiplefiles.Foo)
}
// @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.multiplefiles.Foo)
private static final org.apache.avro.protobuf.multiplefiles.Foo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.avro.protobuf.multiplefiles.Foo();
}
public static org.apache.avro.protobuf.multiplefiles.Foo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated
public static final com.google.protobuf.Parser<Foo> PARSER = new com.google.protobuf.AbstractParser<Foo>() {
public Foo parsePartialFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Foo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Foo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Foo> getParserForType() {
return PARSER;
}
public org.apache.avro.protobuf.multiplefiles.Foo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| 7,581 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/MOrBuilder.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
public interface MOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.apache.avro.protobuf.multiplefiles.M)
com.google.protobuf.MessageOrBuilder {
}
| 7,582 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/M.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
/**
* <pre>
* a nested enum
* </pre>
*
* Protobuf type {@code org.apache.avro.protobuf.multiplefiles.M}
*/
public final class M extends com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.multiplefiles.M)
MOrBuilder {
private static final long serialVersionUID = 0L;
// Use M.newBuilder() to construct.
private M(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private M() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private M(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_M_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.multiplefiles.M.class,
org.apache.avro.protobuf.multiplefiles.M.Builder.class);
}
/**
* Protobuf enum {@code org.apache.avro.protobuf.multiplefiles.M.N}
*/
public enum N implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>A = 1;</code>
*/
A(1),;
/**
* <code>A = 1;</code>
*/
public static final int A_VALUE = 1;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static N valueOf(int value) {
return forNumber(value);
}
public static N forNumber(int value) {
switch (value) {
case 1:
return A;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<N> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<N> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<N>() {
public N findValueByNumber(int number) {
return N.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.M.getDescriptor().getEnumTypes().get(0);
}
private static final N[] VALUES = values();
public static N valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private N(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.multiplefiles.M.N)
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1)
return true;
if (isInitialized == 0)
return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1)
return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.avro.protobuf.multiplefiles.M)) {
return super.equals(obj);
}
org.apache.avro.protobuf.multiplefiles.M other = (org.apache.avro.protobuf.multiplefiles.M) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.M parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.M parseDelimitedFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.multiplefiles.M parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.avro.protobuf.multiplefiles.M prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* a nested enum
* </pre>
*
* Protobuf type {@code org.apache.avro.protobuf.multiplefiles.M}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:org.apache.avro.protobuf.multiplefiles.M)
org.apache.avro.protobuf.multiplefiles.MOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_M_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.multiplefiles.M.class,
org.apache.avro.protobuf.multiplefiles.M.Builder.class);
}
// Construct using org.apache.avro.protobuf.multiplefiles.M.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.internal_static_org_apache_avro_protobuf_multiplefiles_M_descriptor;
}
public org.apache.avro.protobuf.multiplefiles.M getDefaultInstanceForType() {
return org.apache.avro.protobuf.multiplefiles.M.getDefaultInstance();
}
public org.apache.avro.protobuf.multiplefiles.M build() {
org.apache.avro.protobuf.multiplefiles.M result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.avro.protobuf.multiplefiles.M buildPartial() {
org.apache.avro.protobuf.multiplefiles.M result = new org.apache.avro.protobuf.multiplefiles.M(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index,
java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.avro.protobuf.multiplefiles.M) {
return mergeFrom((org.apache.avro.protobuf.multiplefiles.M) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.avro.protobuf.multiplefiles.M other) {
if (other == org.apache.avro.protobuf.multiplefiles.M.getDefaultInstance())
return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
org.apache.avro.protobuf.multiplefiles.M parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.avro.protobuf.multiplefiles.M) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.multiplefiles.M)
}
// @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.multiplefiles.M)
private static final org.apache.avro.protobuf.multiplefiles.M DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.avro.protobuf.multiplefiles.M();
}
public static org.apache.avro.protobuf.multiplefiles.M getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated
public static final com.google.protobuf.Parser<M> PARSER = new com.google.protobuf.AbstractParser<M>() {
public M parsePartialFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new M(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<M> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<M> getParserForType() {
return PARSER;
}
public org.apache.avro.protobuf.multiplefiles.M getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| 7,583 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/multiplefiles/A.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test_multiple_files.proto
package org.apache.avro.protobuf.multiplefiles;
/**
* <pre>
* an enum
* </pre>
*
* Protobuf enum {@code org.apache.avro.protobuf.multiplefiles.A}
*/
public enum A implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>X = 1;</code>
*/
X(1),
/**
* <code>Y = 2;</code>
*/
Y(2),
/**
* <code>Z = 3;</code>
*/
Z(3),;
/**
* <code>X = 1;</code>
*/
public static final int X_VALUE = 1;
/**
* <code>Y = 2;</code>
*/
public static final int Y_VALUE = 2;
/**
* <code>Z = 3;</code>
*/
public static final int Z_VALUE = 3;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static A valueOf(int value) {
return forNumber(value);
}
public static A forNumber(int value) {
switch (value) {
case 1:
return X;
case 2:
return Y;
case 3:
return Z;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<A> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<A> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<A>() {
public A findValueByNumber(int number) {
return A.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.multiplefiles.TestMultipleFiles.getDescriptor().getEnumTypes().get(0);
}
private static final A[] VALUES = values();
public static A valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private A(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.multiplefiles.A)
}
| 7,584 |
0 | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf | Create_ds/avro/lang/java/protobuf/src/test/java/org/apache/avro/protobuf/noopt/Test.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: src/test/protobuf/test.proto
package org.apache.avro.protobuf.noopt;
public final class Test {
private Test() {
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
/**
* <pre>
* an enum
* </pre>
*
* Protobuf enum {@code org.apache.avro.protobuf.noopt.A}
*/
public enum A implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>X = 1;</code>
*/
X(1),
/**
* <code>Y = 2;</code>
*/
Y(2),
/**
* <code>Z = 3;</code>
*/
Z(3),;
/**
* <code>X = 1;</code>
*/
public static final int X_VALUE = 1;
/**
* <code>Y = 2;</code>
*/
public static final int Y_VALUE = 2;
/**
* <code>Z = 3;</code>
*/
public static final int Z_VALUE = 3;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static A valueOf(int value) {
return forNumber(value);
}
public static A forNumber(int value) {
switch (value) {
case 1:
return X;
case 2:
return Y;
case 3:
return Z;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<A> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<A> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<A>() {
public A findValueByNumber(int number) {
return A.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.getDescriptor().getEnumTypes().get(0);
}
private static final A[] VALUES = values();
public static A valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private A(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.noopt.A)
}
public interface FooOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.apache.avro.protobuf.noopt.Foo)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
boolean hasInt32();
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
int getInt32();
/**
* <code>optional int64 int64 = 2;</code>
*/
boolean hasInt64();
/**
* <code>optional int64 int64 = 2;</code>
*/
long getInt64();
/**
* <code>optional uint32 uint32 = 3;</code>
*/
boolean hasUint32();
/**
* <code>optional uint32 uint32 = 3;</code>
*/
int getUint32();
/**
* <code>optional uint64 uint64 = 4;</code>
*/
boolean hasUint64();
/**
* <code>optional uint64 uint64 = 4;</code>
*/
long getUint64();
/**
* <code>optional sint32 sint32 = 5;</code>
*/
boolean hasSint32();
/**
* <code>optional sint32 sint32 = 5;</code>
*/
int getSint32();
/**
* <code>optional sint64 sint64 = 6;</code>
*/
boolean hasSint64();
/**
* <code>optional sint64 sint64 = 6;</code>
*/
long getSint64();
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
boolean hasFixed32();
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
int getFixed32();
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
boolean hasFixed64();
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
long getFixed64();
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
boolean hasSfixed32();
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
int getSfixed32();
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
boolean hasSfixed64();
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
long getSfixed64();
/**
* <code>optional float float = 11;</code>
*/
boolean hasFloat();
/**
* <code>optional float float = 11;</code>
*/
float getFloat();
/**
* <code>optional double double = 12;</code>
*/
boolean hasDouble();
/**
* <code>optional double double = 12;</code>
*/
double getDouble();
/**
* <code>optional bool bool = 13;</code>
*/
boolean hasBool();
/**
* <code>optional bool bool = 13;</code>
*/
boolean getBool();
/**
* <code>optional string string = 14;</code>
*/
boolean hasString();
/**
* <code>optional string string = 14;</code>
*/
java.lang.String getString();
/**
* <code>optional string string = 14;</code>
*/
com.google.protobuf.ByteString getStringBytes();
/**
* <code>optional bytes bytes = 15;</code>
*/
boolean hasBytes();
/**
* <code>optional bytes bytes = 15;</code>
*/
com.google.protobuf.ByteString getBytes();
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
boolean hasEnum();
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
org.apache.avro.protobuf.noopt.Test.A getEnum();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
java.util.List<java.lang.Integer> getIntArrayList();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
int getIntArrayCount();
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
int getIntArray(int index);
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> getFooArrayList();
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
org.apache.avro.protobuf.noopt.Test.Foo getFooArray(int index);
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
int getFooArrayCount();
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
java.util.List<? extends org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooArrayOrBuilderList();
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooArrayOrBuilder(int index);
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
java.util.List<org.apache.avro.protobuf.noopt.Test.A> getSymsList();
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
int getSymsCount();
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
org.apache.avro.protobuf.noopt.Test.A getSyms(int index);
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
boolean hasFoo();
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
org.apache.avro.protobuf.noopt.Test.Foo getFoo();
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooOrBuilder();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
boolean hasTimestamp();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
com.google.protobuf.Timestamp getTimestamp();
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder();
}
/**
* Protobuf type {@code org.apache.avro.protobuf.noopt.Foo}
*/
public static final class Foo extends com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.noopt.Foo)
FooOrBuilder {
private static final long serialVersionUID = 0L;
// Use Foo.newBuilder() to construct.
private Foo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Foo() {
int32_ = 0;
int64_ = 0L;
uint32_ = 0;
uint64_ = 0L;
sint32_ = 0;
sint64_ = 0L;
fixed32_ = 0;
fixed64_ = 0L;
sfixed32_ = 0;
sfixed64_ = 0L;
float_ = 0F;
double_ = 0D;
bool_ = false;
string_ = "";
bytes_ = com.google.protobuf.ByteString.EMPTY;
enum_ = 3;
intArray_ = java.util.Collections.emptyList();
fooArray_ = java.util.Collections.emptyList();
syms_ = java.util.Collections.emptyList();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Foo(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
int32_ = input.readInt32();
break;
}
case 16: {
bitField0_ |= 0x00000002;
int64_ = input.readInt64();
break;
}
case 24: {
bitField0_ |= 0x00000004;
uint32_ = input.readUInt32();
break;
}
case 32: {
bitField0_ |= 0x00000008;
uint64_ = input.readUInt64();
break;
}
case 40: {
bitField0_ |= 0x00000010;
sint32_ = input.readSInt32();
break;
}
case 48: {
bitField0_ |= 0x00000020;
sint64_ = input.readSInt64();
break;
}
case 61: {
bitField0_ |= 0x00000040;
fixed32_ = input.readFixed32();
break;
}
case 65: {
bitField0_ |= 0x00000080;
fixed64_ = input.readFixed64();
break;
}
case 77: {
bitField0_ |= 0x00000100;
sfixed32_ = input.readSFixed32();
break;
}
case 81: {
bitField0_ |= 0x00000200;
sfixed64_ = input.readSFixed64();
break;
}
case 93: {
bitField0_ |= 0x00000400;
float_ = input.readFloat();
break;
}
case 97: {
bitField0_ |= 0x00000800;
double_ = input.readDouble();
break;
}
case 104: {
bitField0_ |= 0x00001000;
bool_ = input.readBool();
break;
}
case 114: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00002000;
string_ = bs;
break;
}
case 122: {
bitField0_ |= 0x00004000;
bytes_ = input.readBytes();
break;
}
case 128: {
int rawValue = input.readEnum();
org.apache.avro.protobuf.noopt.Test.A value = org.apache.avro.protobuf.noopt.Test.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(16, rawValue);
} else {
bitField0_ |= 0x00008000;
enum_ = rawValue;
}
break;
}
case 136: {
if (!((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00010000;
}
intArray_.add(input.readInt32());
break;
}
case 138: {
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
if (!((mutable_bitField0_ & 0x00010000) == 0x00010000) && input.getBytesUntilLimit() > 0) {
intArray_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00010000;
}
while (input.getBytesUntilLimit() > 0) {
intArray_.add(input.readInt32());
}
input.popLimit(limit);
break;
}
case 146: {
org.apache.avro.protobuf.noopt.Test.Foo.Builder subBuilder = null;
if (((bitField0_ & 0x00010000) == 0x00010000)) {
subBuilder = foo_.toBuilder();
}
foo_ = input.readMessage(org.apache.avro.protobuf.noopt.Test.Foo.PARSER, extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(foo_);
foo_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00010000;
break;
}
case 152: {
int rawValue = input.readEnum();
org.apache.avro.protobuf.noopt.Test.A value = org.apache.avro.protobuf.noopt.Test.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(19, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00040000;
}
syms_.add(rawValue);
}
break;
}
case 154: {
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
int rawValue = input.readEnum();
org.apache.avro.protobuf.noopt.Test.A value = org.apache.avro.protobuf.noopt.Test.A.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(19, rawValue);
} else {
if (!((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>();
mutable_bitField0_ |= 0x00040000;
}
syms_.add(rawValue);
}
}
input.popLimit(oldLimit);
break;
}
case 162: {
if (!((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.noopt.Test.Foo>();
mutable_bitField0_ |= 0x00020000;
}
fooArray_.add(input.readMessage(org.apache.avro.protobuf.noopt.Test.Foo.PARSER, extensionRegistry));
break;
}
case 170: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (((bitField0_ & 0x00020000) == 0x00020000)) {
subBuilder = timestamp_.toBuilder();
}
timestamp_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(timestamp_);
timestamp_ = subBuilder.buildPartial();
}
bitField0_ |= 0x00020000;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = java.util.Collections.unmodifiableList(intArray_);
}
if (((mutable_bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = java.util.Collections.unmodifiableList(syms_);
}
if (((mutable_bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.Foo.class,
org.apache.avro.protobuf.noopt.Test.Foo.Builder.class);
}
private int bitField0_;
public static final int INT32_FIELD_NUMBER = 1;
private int int32_;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public int getInt32() {
return int32_;
}
public static final int INT64_FIELD_NUMBER = 2;
private long int64_;
/**
* <code>optional int64 int64 = 2;</code>
*/
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public long getInt64() {
return int64_;
}
public static final int UINT32_FIELD_NUMBER = 3;
private int uint32_;
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public int getUint32() {
return uint32_;
}
public static final int UINT64_FIELD_NUMBER = 4;
private long uint64_;
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public long getUint64() {
return uint64_;
}
public static final int SINT32_FIELD_NUMBER = 5;
private int sint32_;
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public int getSint32() {
return sint32_;
}
public static final int SINT64_FIELD_NUMBER = 6;
private long sint64_;
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public long getSint64() {
return sint64_;
}
public static final int FIXED32_FIELD_NUMBER = 7;
private int fixed32_;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public int getFixed32() {
return fixed32_;
}
public static final int FIXED64_FIELD_NUMBER = 8;
private long fixed64_;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public long getFixed64() {
return fixed64_;
}
public static final int SFIXED32_FIELD_NUMBER = 9;
private int sfixed32_;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public int getSfixed32() {
return sfixed32_;
}
public static final int SFIXED64_FIELD_NUMBER = 10;
private long sfixed64_;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public long getSfixed64() {
return sfixed64_;
}
public static final int FLOAT_FIELD_NUMBER = 11;
private float float_;
/**
* <code>optional float float = 11;</code>
*/
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional float float = 11;</code>
*/
public float getFloat() {
return float_;
}
public static final int DOUBLE_FIELD_NUMBER = 12;
private double double_;
/**
* <code>optional double double = 12;</code>
*/
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional double double = 12;</code>
*/
public double getDouble() {
return double_;
}
public static final int BOOL_FIELD_NUMBER = 13;
private boolean bool_;
/**
* <code>optional bool bool = 13;</code>
*/
public boolean hasBool() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool bool = 13;</code>
*/
public boolean getBool() {
return bool_;
}
public static final int STRING_FIELD_NUMBER = 14;
private volatile java.lang.Object string_;
/**
* <code>optional string string = 14;</code>
*/
public boolean hasString() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional string string = 14;</code>
*/
public java.lang.String getString() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
}
}
/**
* <code>optional string string = 14;</code>
*/
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int BYTES_FIELD_NUMBER = 15;
private com.google.protobuf.ByteString bytes_;
/**
* <code>optional bytes bytes = 15;</code>
*/
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
public static final int ENUM_FIELD_NUMBER = 16;
private int enum_;
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public boolean hasEnum() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public org.apache.avro.protobuf.noopt.Test.A getEnum() {
org.apache.avro.protobuf.noopt.Test.A result = org.apache.avro.protobuf.noopt.Test.A.valueOf(enum_);
return result == null ? org.apache.avro.protobuf.noopt.Test.A.Z : result;
}
public static final int INTARRAY_FIELD_NUMBER = 17;
private java.util.List<java.lang.Integer> intArray_;
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public java.util.List<java.lang.Integer> getIntArrayList() {
return intArray_;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArrayCount() {
return intArray_.size();
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArray(int index) {
return intArray_.get(index);
}
public static final int FOOARRAY_FIELD_NUMBER = 20;
private java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> fooArray_;
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> getFooArrayList() {
return fooArray_;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public java.util.List<? extends org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooArrayOrBuilderList() {
return fooArray_;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public int getFooArrayCount() {
return fooArray_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo getFooArray(int index) {
return fooArray_.get(index);
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooArrayOrBuilder(int index) {
return fooArray_.get(index);
}
public static final int SYMS_FIELD_NUMBER = 19;
private java.util.List<java.lang.Integer> syms_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<java.lang.Integer, org.apache.avro.protobuf.noopt.Test.A> syms_converter_ = new com.google.protobuf.Internal.ListAdapter.Converter<java.lang.Integer, org.apache.avro.protobuf.noopt.Test.A>() {
public org.apache.avro.protobuf.noopt.Test.A convert(java.lang.Integer from) {
org.apache.avro.protobuf.noopt.Test.A result = org.apache.avro.protobuf.noopt.Test.A.valueOf(from);
return result == null ? org.apache.avro.protobuf.noopt.Test.A.X : result;
}
};
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public java.util.List<org.apache.avro.protobuf.noopt.Test.A> getSymsList() {
return new com.google.protobuf.Internal.ListAdapter<java.lang.Integer, org.apache.avro.protobuf.noopt.Test.A>(
syms_, syms_converter_);
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public int getSymsCount() {
return syms_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public org.apache.avro.protobuf.noopt.Test.A getSyms(int index) {
return syms_converter_.convert(syms_.get(index));
}
public static final int FOO_FIELD_NUMBER = 18;
private org.apache.avro.protobuf.noopt.Test.Foo foo_;
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public boolean hasFoo() {
return ((bitField0_ & 0x00010000) == 0x00010000);
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo getFoo() {
return foo_ == null ? org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance() : foo_;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooOrBuilder() {
return foo_ == null ? org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance() : foo_;
}
public static final int TIMESTAMP_FIELD_NUMBER = 21;
private com.google.protobuf.Timestamp timestamp_;
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00020000) == 0x00020000);
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp getTimestamp() {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1)
return true;
if (isInitialized == 0)
return false;
if (!hasInt32()) {
memoizedIsInitialized = 0;
return false;
}
for (int i = 0; i < getFooArrayCount(); i++) {
if (!getFooArray(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
if (hasFoo()) {
if (!getFoo().isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeInt32(1, int32_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeInt64(2, int64_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeUInt32(3, uint32_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
output.writeUInt64(4, uint64_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
output.writeSInt32(5, sint32_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
output.writeSInt64(6, sint64_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
output.writeFixed32(7, fixed32_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
output.writeFixed64(8, fixed64_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
output.writeSFixed32(9, sfixed32_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
output.writeSFixed64(10, sfixed64_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
output.writeFloat(11, float_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
output.writeDouble(12, double_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
output.writeBool(13, bool_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 14, string_);
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
output.writeBytes(15, bytes_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
output.writeEnum(16, enum_);
}
for (int i = 0; i < intArray_.size(); i++) {
output.writeInt32(17, intArray_.get(i));
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
output.writeMessage(18, getFoo());
}
for (int i = 0; i < syms_.size(); i++) {
output.writeEnum(19, syms_.get(i));
}
for (int i = 0; i < fooArray_.size(); i++) {
output.writeMessage(20, fooArray_.get(i));
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
output.writeMessage(21, getTimestamp());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1)
return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, int32_);
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, int64_);
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream.computeUInt32Size(3, uint32_);
}
if (((bitField0_ & 0x00000008) == 0x00000008)) {
size += com.google.protobuf.CodedOutputStream.computeUInt64Size(4, uint64_);
}
if (((bitField0_ & 0x00000010) == 0x00000010)) {
size += com.google.protobuf.CodedOutputStream.computeSInt32Size(5, sint32_);
}
if (((bitField0_ & 0x00000020) == 0x00000020)) {
size += com.google.protobuf.CodedOutputStream.computeSInt64Size(6, sint64_);
}
if (((bitField0_ & 0x00000040) == 0x00000040)) {
size += com.google.protobuf.CodedOutputStream.computeFixed32Size(7, fixed32_);
}
if (((bitField0_ & 0x00000080) == 0x00000080)) {
size += com.google.protobuf.CodedOutputStream.computeFixed64Size(8, fixed64_);
}
if (((bitField0_ & 0x00000100) == 0x00000100)) {
size += com.google.protobuf.CodedOutputStream.computeSFixed32Size(9, sfixed32_);
}
if (((bitField0_ & 0x00000200) == 0x00000200)) {
size += com.google.protobuf.CodedOutputStream.computeSFixed64Size(10, sfixed64_);
}
if (((bitField0_ & 0x00000400) == 0x00000400)) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(11, float_);
}
if (((bitField0_ & 0x00000800) == 0x00000800)) {
size += com.google.protobuf.CodedOutputStream.computeDoubleSize(12, double_);
}
if (((bitField0_ & 0x00001000) == 0x00001000)) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(13, bool_);
}
if (((bitField0_ & 0x00002000) == 0x00002000)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(14, string_);
}
if (((bitField0_ & 0x00004000) == 0x00004000)) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(15, bytes_);
}
if (((bitField0_ & 0x00008000) == 0x00008000)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(16, enum_);
}
{
int dataSize = 0;
for (int i = 0; i < intArray_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(intArray_.get(i));
}
size += dataSize;
size += 2 * getIntArrayList().size();
}
if (((bitField0_ & 0x00010000) == 0x00010000)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(18, getFoo());
}
{
int dataSize = 0;
for (int i = 0; i < syms_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(syms_.get(i));
}
size += dataSize;
size += 2 * syms_.size();
}
for (int i = 0; i < fooArray_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(20, fooArray_.get(i));
}
if (((bitField0_ & 0x00020000) == 0x00020000)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(21, getTimestamp());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.avro.protobuf.noopt.Test.Foo)) {
return super.equals(obj);
}
org.apache.avro.protobuf.noopt.Test.Foo other = (org.apache.avro.protobuf.noopt.Test.Foo) obj;
boolean result = true;
result = result && (hasInt32() == other.hasInt32());
if (hasInt32()) {
result = result && (getInt32() == other.getInt32());
}
result = result && (hasInt64() == other.hasInt64());
if (hasInt64()) {
result = result && (getInt64() == other.getInt64());
}
result = result && (hasUint32() == other.hasUint32());
if (hasUint32()) {
result = result && (getUint32() == other.getUint32());
}
result = result && (hasUint64() == other.hasUint64());
if (hasUint64()) {
result = result && (getUint64() == other.getUint64());
}
result = result && (hasSint32() == other.hasSint32());
if (hasSint32()) {
result = result && (getSint32() == other.getSint32());
}
result = result && (hasSint64() == other.hasSint64());
if (hasSint64()) {
result = result && (getSint64() == other.getSint64());
}
result = result && (hasFixed32() == other.hasFixed32());
if (hasFixed32()) {
result = result && (getFixed32() == other.getFixed32());
}
result = result && (hasFixed64() == other.hasFixed64());
if (hasFixed64()) {
result = result && (getFixed64() == other.getFixed64());
}
result = result && (hasSfixed32() == other.hasSfixed32());
if (hasSfixed32()) {
result = result && (getSfixed32() == other.getSfixed32());
}
result = result && (hasSfixed64() == other.hasSfixed64());
if (hasSfixed64()) {
result = result && (getSfixed64() == other.getSfixed64());
}
result = result && (hasFloat() == other.hasFloat());
if (hasFloat()) {
result = result
&& (java.lang.Float.floatToIntBits(getFloat()) == java.lang.Float.floatToIntBits(other.getFloat()));
}
result = result && (hasDouble() == other.hasDouble());
if (hasDouble()) {
result = result
&& (java.lang.Double.doubleToLongBits(getDouble()) == java.lang.Double.doubleToLongBits(other.getDouble()));
}
result = result && (hasBool() == other.hasBool());
if (hasBool()) {
result = result && (getBool() == other.getBool());
}
result = result && (hasString() == other.hasString());
if (hasString()) {
result = result && getString().equals(other.getString());
}
result = result && (hasBytes() == other.hasBytes());
if (hasBytes()) {
result = result && getBytes().equals(other.getBytes());
}
result = result && (hasEnum() == other.hasEnum());
if (hasEnum()) {
result = result && enum_ == other.enum_;
}
result = result && getIntArrayList().equals(other.getIntArrayList());
result = result && getFooArrayList().equals(other.getFooArrayList());
result = result && syms_.equals(other.syms_);
result = result && (hasFoo() == other.hasFoo());
if (hasFoo()) {
result = result && getFoo().equals(other.getFoo());
}
result = result && (hasTimestamp() == other.hasTimestamp());
if (hasTimestamp()) {
result = result && getTimestamp().equals(other.getTimestamp());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInt32()) {
hash = (37 * hash) + INT32_FIELD_NUMBER;
hash = (53 * hash) + getInt32();
}
if (hasInt64()) {
hash = (37 * hash) + INT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getInt64());
}
if (hasUint32()) {
hash = (37 * hash) + UINT32_FIELD_NUMBER;
hash = (53 * hash) + getUint32();
}
if (hasUint64()) {
hash = (37 * hash) + UINT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getUint64());
}
if (hasSint32()) {
hash = (37 * hash) + SINT32_FIELD_NUMBER;
hash = (53 * hash) + getSint32();
}
if (hasSint64()) {
hash = (37 * hash) + SINT64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSint64());
}
if (hasFixed32()) {
hash = (37 * hash) + FIXED32_FIELD_NUMBER;
hash = (53 * hash) + getFixed32();
}
if (hasFixed64()) {
hash = (37 * hash) + FIXED64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFixed64());
}
if (hasSfixed32()) {
hash = (37 * hash) + SFIXED32_FIELD_NUMBER;
hash = (53 * hash) + getSfixed32();
}
if (hasSfixed64()) {
hash = (37 * hash) + SFIXED64_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSfixed64());
}
if (hasFloat()) {
hash = (37 * hash) + FLOAT_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getFloat());
}
if (hasDouble()) {
hash = (37 * hash) + DOUBLE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(java.lang.Double.doubleToLongBits(getDouble()));
}
if (hasBool()) {
hash = (37 * hash) + BOOL_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getBool());
}
if (hasString()) {
hash = (37 * hash) + STRING_FIELD_NUMBER;
hash = (53 * hash) + getString().hashCode();
}
if (hasBytes()) {
hash = (37 * hash) + BYTES_FIELD_NUMBER;
hash = (53 * hash) + getBytes().hashCode();
}
if (hasEnum()) {
hash = (37 * hash) + ENUM_FIELD_NUMBER;
hash = (53 * hash) + enum_;
}
if (getIntArrayCount() > 0) {
hash = (37 * hash) + INTARRAY_FIELD_NUMBER;
hash = (53 * hash) + getIntArrayList().hashCode();
}
if (getFooArrayCount() > 0) {
hash = (37 * hash) + FOOARRAY_FIELD_NUMBER;
hash = (53 * hash) + getFooArrayList().hashCode();
}
if (getSymsCount() > 0) {
hash = (37 * hash) + SYMS_FIELD_NUMBER;
hash = (53 * hash) + syms_.hashCode();
}
if (hasFoo()) {
hash = (37 * hash) + FOO_FIELD_NUMBER;
hash = (53 * hash) + getFoo().hashCode();
}
if (hasTimestamp()) {
hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
hash = (53 * hash) + getTimestamp().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseDelimitedFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.Foo parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.avro.protobuf.noopt.Test.Foo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.avro.protobuf.noopt.Foo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:org.apache.avro.protobuf.noopt.Foo)
org.apache.avro.protobuf.noopt.Test.FooOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.Foo.class,
org.apache.avro.protobuf.noopt.Test.Foo.Builder.class);
}
// Construct using org.apache.avro.protobuf.noopt.Test.Foo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFooArrayFieldBuilder();
getFooFieldBuilder();
getTimestampFieldBuilder();
}
}
public Builder clear() {
super.clear();
int32_ = 0;
bitField0_ = (bitField0_ & ~0x00000001);
int64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000002);
uint32_ = 0;
bitField0_ = (bitField0_ & ~0x00000004);
uint64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000008);
sint32_ = 0;
bitField0_ = (bitField0_ & ~0x00000010);
sint64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000020);
fixed32_ = 0;
bitField0_ = (bitField0_ & ~0x00000040);
fixed64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000080);
sfixed32_ = 0;
bitField0_ = (bitField0_ & ~0x00000100);
sfixed64_ = 0L;
bitField0_ = (bitField0_ & ~0x00000200);
float_ = 0F;
bitField0_ = (bitField0_ & ~0x00000400);
double_ = 0D;
bitField0_ = (bitField0_ & ~0x00000800);
bool_ = false;
bitField0_ = (bitField0_ & ~0x00001000);
string_ = "";
bitField0_ = (bitField0_ & ~0x00002000);
bytes_ = com.google.protobuf.ByteString.EMPTY;
bitField0_ = (bitField0_ & ~0x00004000);
enum_ = 3;
bitField0_ = (bitField0_ & ~0x00008000);
intArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00010000);
if (fooArrayBuilder_ == null) {
fooArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00020000);
} else {
fooArrayBuilder_.clear();
}
syms_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
if (fooBuilder_ == null) {
foo_ = null;
} else {
fooBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00080000);
if (timestampBuilder_ == null) {
timestamp_ = null;
} else {
timestampBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00100000);
return this;
}
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor;
}
public org.apache.avro.protobuf.noopt.Test.Foo getDefaultInstanceForType() {
return org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance();
}
public org.apache.avro.protobuf.noopt.Test.Foo build() {
org.apache.avro.protobuf.noopt.Test.Foo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.avro.protobuf.noopt.Test.Foo buildPartial() {
org.apache.avro.protobuf.noopt.Test.Foo result = new org.apache.avro.protobuf.noopt.Test.Foo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.int32_ = int32_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.int64_ = int64_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.uint32_ = uint32_;
if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
to_bitField0_ |= 0x00000008;
}
result.uint64_ = uint64_;
if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
to_bitField0_ |= 0x00000010;
}
result.sint32_ = sint32_;
if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
to_bitField0_ |= 0x00000020;
}
result.sint64_ = sint64_;
if (((from_bitField0_ & 0x00000040) == 0x00000040)) {
to_bitField0_ |= 0x00000040;
}
result.fixed32_ = fixed32_;
if (((from_bitField0_ & 0x00000080) == 0x00000080)) {
to_bitField0_ |= 0x00000080;
}
result.fixed64_ = fixed64_;
if (((from_bitField0_ & 0x00000100) == 0x00000100)) {
to_bitField0_ |= 0x00000100;
}
result.sfixed32_ = sfixed32_;
if (((from_bitField0_ & 0x00000200) == 0x00000200)) {
to_bitField0_ |= 0x00000200;
}
result.sfixed64_ = sfixed64_;
if (((from_bitField0_ & 0x00000400) == 0x00000400)) {
to_bitField0_ |= 0x00000400;
}
result.float_ = float_;
if (((from_bitField0_ & 0x00000800) == 0x00000800)) {
to_bitField0_ |= 0x00000800;
}
result.double_ = double_;
if (((from_bitField0_ & 0x00001000) == 0x00001000)) {
to_bitField0_ |= 0x00001000;
}
result.bool_ = bool_;
if (((from_bitField0_ & 0x00002000) == 0x00002000)) {
to_bitField0_ |= 0x00002000;
}
result.string_ = string_;
if (((from_bitField0_ & 0x00004000) == 0x00004000)) {
to_bitField0_ |= 0x00004000;
}
result.bytes_ = bytes_;
if (((from_bitField0_ & 0x00008000) == 0x00008000)) {
to_bitField0_ |= 0x00008000;
}
result.enum_ = enum_;
if (((bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = java.util.Collections.unmodifiableList(intArray_);
bitField0_ = (bitField0_ & ~0x00010000);
}
result.intArray_ = intArray_;
if (fooArrayBuilder_ == null) {
if (((bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = java.util.Collections.unmodifiableList(fooArray_);
bitField0_ = (bitField0_ & ~0x00020000);
}
result.fooArray_ = fooArray_;
} else {
result.fooArray_ = fooArrayBuilder_.build();
}
if (((bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = java.util.Collections.unmodifiableList(syms_);
bitField0_ = (bitField0_ & ~0x00040000);
}
result.syms_ = syms_;
if (((from_bitField0_ & 0x00080000) == 0x00080000)) {
to_bitField0_ |= 0x00010000;
}
if (fooBuilder_ == null) {
result.foo_ = foo_;
} else {
result.foo_ = fooBuilder_.build();
}
if (((from_bitField0_ & 0x00100000) == 0x00100000)) {
to_bitField0_ |= 0x00020000;
}
if (timestampBuilder_ == null) {
result.timestamp_ = timestamp_;
} else {
result.timestamp_ = timestampBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index,
java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.avro.protobuf.noopt.Test.Foo) {
return mergeFrom((org.apache.avro.protobuf.noopt.Test.Foo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.avro.protobuf.noopt.Test.Foo other) {
if (other == org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance())
return this;
if (other.hasInt32()) {
setInt32(other.getInt32());
}
if (other.hasInt64()) {
setInt64(other.getInt64());
}
if (other.hasUint32()) {
setUint32(other.getUint32());
}
if (other.hasUint64()) {
setUint64(other.getUint64());
}
if (other.hasSint32()) {
setSint32(other.getSint32());
}
if (other.hasSint64()) {
setSint64(other.getSint64());
}
if (other.hasFixed32()) {
setFixed32(other.getFixed32());
}
if (other.hasFixed64()) {
setFixed64(other.getFixed64());
}
if (other.hasSfixed32()) {
setSfixed32(other.getSfixed32());
}
if (other.hasSfixed64()) {
setSfixed64(other.getSfixed64());
}
if (other.hasFloat()) {
setFloat(other.getFloat());
}
if (other.hasDouble()) {
setDouble(other.getDouble());
}
if (other.hasBool()) {
setBool(other.getBool());
}
if (other.hasString()) {
bitField0_ |= 0x00002000;
string_ = other.string_;
onChanged();
}
if (other.hasBytes()) {
setBytes(other.getBytes());
}
if (other.hasEnum()) {
setEnum(other.getEnum());
}
if (!other.intArray_.isEmpty()) {
if (intArray_.isEmpty()) {
intArray_ = other.intArray_;
bitField0_ = (bitField0_ & ~0x00010000);
} else {
ensureIntArrayIsMutable();
intArray_.addAll(other.intArray_);
}
onChanged();
}
if (fooArrayBuilder_ == null) {
if (!other.fooArray_.isEmpty()) {
if (fooArray_.isEmpty()) {
fooArray_ = other.fooArray_;
bitField0_ = (bitField0_ & ~0x00020000);
} else {
ensureFooArrayIsMutable();
fooArray_.addAll(other.fooArray_);
}
onChanged();
}
} else {
if (!other.fooArray_.isEmpty()) {
if (fooArrayBuilder_.isEmpty()) {
fooArrayBuilder_.dispose();
fooArrayBuilder_ = null;
fooArray_ = other.fooArray_;
bitField0_ = (bitField0_ & ~0x00020000);
fooArrayBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFooArrayFieldBuilder()
: null;
} else {
fooArrayBuilder_.addAllMessages(other.fooArray_);
}
}
}
if (!other.syms_.isEmpty()) {
if (syms_.isEmpty()) {
syms_ = other.syms_;
bitField0_ = (bitField0_ & ~0x00040000);
} else {
ensureSymsIsMutable();
syms_.addAll(other.syms_);
}
onChanged();
}
if (other.hasFoo()) {
mergeFoo(other.getFoo());
}
if (other.hasTimestamp()) {
mergeTimestamp(other.getTimestamp());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
if (!hasInt32()) {
return false;
}
for (int i = 0; i < getFooArrayCount(); i++) {
if (!getFooArray(i).isInitialized()) {
return false;
}
}
if (hasFoo()) {
if (!getFoo().isInitialized()) {
return false;
}
}
return true;
}
public Builder mergeFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
org.apache.avro.protobuf.noopt.Test.Foo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.avro.protobuf.noopt.Test.Foo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int int32_;
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public boolean hasInt32() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public int getInt32() {
return int32_;
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public Builder setInt32(int value) {
bitField0_ |= 0x00000001;
int32_ = value;
onChanged();
return this;
}
/**
* <pre>
* all the primitive types
* </pre>
*
* <code>required int32 int32 = 1;</code>
*/
public Builder clearInt32() {
bitField0_ = (bitField0_ & ~0x00000001);
int32_ = 0;
onChanged();
return this;
}
private long int64_;
/**
* <code>optional int64 int64 = 2;</code>
*/
public boolean hasInt64() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public long getInt64() {
return int64_;
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public Builder setInt64(long value) {
bitField0_ |= 0x00000002;
int64_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 int64 = 2;</code>
*/
public Builder clearInt64() {
bitField0_ = (bitField0_ & ~0x00000002);
int64_ = 0L;
onChanged();
return this;
}
private int uint32_;
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public boolean hasUint32() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public int getUint32() {
return uint32_;
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public Builder setUint32(int value) {
bitField0_ |= 0x00000004;
uint32_ = value;
onChanged();
return this;
}
/**
* <code>optional uint32 uint32 = 3;</code>
*/
public Builder clearUint32() {
bitField0_ = (bitField0_ & ~0x00000004);
uint32_ = 0;
onChanged();
return this;
}
private long uint64_;
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public boolean hasUint64() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public long getUint64() {
return uint64_;
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public Builder setUint64(long value) {
bitField0_ |= 0x00000008;
uint64_ = value;
onChanged();
return this;
}
/**
* <code>optional uint64 uint64 = 4;</code>
*/
public Builder clearUint64() {
bitField0_ = (bitField0_ & ~0x00000008);
uint64_ = 0L;
onChanged();
return this;
}
private int sint32_;
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public boolean hasSint32() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public int getSint32() {
return sint32_;
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public Builder setSint32(int value) {
bitField0_ |= 0x00000010;
sint32_ = value;
onChanged();
return this;
}
/**
* <code>optional sint32 sint32 = 5;</code>
*/
public Builder clearSint32() {
bitField0_ = (bitField0_ & ~0x00000010);
sint32_ = 0;
onChanged();
return this;
}
private long sint64_;
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public boolean hasSint64() {
return ((bitField0_ & 0x00000020) == 0x00000020);
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public long getSint64() {
return sint64_;
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public Builder setSint64(long value) {
bitField0_ |= 0x00000020;
sint64_ = value;
onChanged();
return this;
}
/**
* <code>optional sint64 sint64 = 6;</code>
*/
public Builder clearSint64() {
bitField0_ = (bitField0_ & ~0x00000020);
sint64_ = 0L;
onChanged();
return this;
}
private int fixed32_;
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public boolean hasFixed32() {
return ((bitField0_ & 0x00000040) == 0x00000040);
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public int getFixed32() {
return fixed32_;
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public Builder setFixed32(int value) {
bitField0_ |= 0x00000040;
fixed32_ = value;
onChanged();
return this;
}
/**
* <code>optional fixed32 fixed32 = 7;</code>
*/
public Builder clearFixed32() {
bitField0_ = (bitField0_ & ~0x00000040);
fixed32_ = 0;
onChanged();
return this;
}
private long fixed64_;
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public boolean hasFixed64() {
return ((bitField0_ & 0x00000080) == 0x00000080);
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public long getFixed64() {
return fixed64_;
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public Builder setFixed64(long value) {
bitField0_ |= 0x00000080;
fixed64_ = value;
onChanged();
return this;
}
/**
* <code>optional fixed64 fixed64 = 8;</code>
*/
public Builder clearFixed64() {
bitField0_ = (bitField0_ & ~0x00000080);
fixed64_ = 0L;
onChanged();
return this;
}
private int sfixed32_;
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public boolean hasSfixed32() {
return ((bitField0_ & 0x00000100) == 0x00000100);
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public int getSfixed32() {
return sfixed32_;
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public Builder setSfixed32(int value) {
bitField0_ |= 0x00000100;
sfixed32_ = value;
onChanged();
return this;
}
/**
* <code>optional sfixed32 sfixed32 = 9;</code>
*/
public Builder clearSfixed32() {
bitField0_ = (bitField0_ & ~0x00000100);
sfixed32_ = 0;
onChanged();
return this;
}
private long sfixed64_;
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public boolean hasSfixed64() {
return ((bitField0_ & 0x00000200) == 0x00000200);
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public long getSfixed64() {
return sfixed64_;
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public Builder setSfixed64(long value) {
bitField0_ |= 0x00000200;
sfixed64_ = value;
onChanged();
return this;
}
/**
* <code>optional sfixed64 sfixed64 = 10;</code>
*/
public Builder clearSfixed64() {
bitField0_ = (bitField0_ & ~0x00000200);
sfixed64_ = 0L;
onChanged();
return this;
}
private float float_;
/**
* <code>optional float float = 11;</code>
*/
public boolean hasFloat() {
return ((bitField0_ & 0x00000400) == 0x00000400);
}
/**
* <code>optional float float = 11;</code>
*/
public float getFloat() {
return float_;
}
/**
* <code>optional float float = 11;</code>
*/
public Builder setFloat(float value) {
bitField0_ |= 0x00000400;
float_ = value;
onChanged();
return this;
}
/**
* <code>optional float float = 11;</code>
*/
public Builder clearFloat() {
bitField0_ = (bitField0_ & ~0x00000400);
float_ = 0F;
onChanged();
return this;
}
private double double_;
/**
* <code>optional double double = 12;</code>
*/
public boolean hasDouble() {
return ((bitField0_ & 0x00000800) == 0x00000800);
}
/**
* <code>optional double double = 12;</code>
*/
public double getDouble() {
return double_;
}
/**
* <code>optional double double = 12;</code>
*/
public Builder setDouble(double value) {
bitField0_ |= 0x00000800;
double_ = value;
onChanged();
return this;
}
/**
* <code>optional double double = 12;</code>
*/
public Builder clearDouble() {
bitField0_ = (bitField0_ & ~0x00000800);
double_ = 0D;
onChanged();
return this;
}
private boolean bool_;
/**
* <code>optional bool bool = 13;</code>
*/
public boolean hasBool() {
return ((bitField0_ & 0x00001000) == 0x00001000);
}
/**
* <code>optional bool bool = 13;</code>
*/
public boolean getBool() {
return bool_;
}
/**
* <code>optional bool bool = 13;</code>
*/
public Builder setBool(boolean value) {
bitField0_ |= 0x00001000;
bool_ = value;
onChanged();
return this;
}
/**
* <code>optional bool bool = 13;</code>
*/
public Builder clearBool() {
bitField0_ = (bitField0_ & ~0x00001000);
bool_ = false;
onChanged();
return this;
}
private java.lang.Object string_ = "";
/**
* <code>optional string string = 14;</code>
*/
public boolean hasString() {
return ((bitField0_ & 0x00002000) == 0x00002000);
}
/**
* <code>optional string string = 14;</code>
*/
public java.lang.String getString() {
java.lang.Object ref = string_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
string_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string string = 14;</code>
*/
public com.google.protobuf.ByteString getStringBytes() {
java.lang.Object ref = string_;
if (ref instanceof String) {
com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
string_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string string = 14;</code>
*/
public Builder setString(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00002000;
string_ = value;
onChanged();
return this;
}
/**
* <code>optional string string = 14;</code>
*/
public Builder clearString() {
bitField0_ = (bitField0_ & ~0x00002000);
string_ = getDefaultInstance().getString();
onChanged();
return this;
}
/**
* <code>optional string string = 14;</code>
*/
public Builder setStringBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00002000;
string_ = value;
onChanged();
return this;
}
private com.google.protobuf.ByteString bytes_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes bytes = 15;</code>
*/
public boolean hasBytes() {
return ((bitField0_ & 0x00004000) == 0x00004000);
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public com.google.protobuf.ByteString getBytes() {
return bytes_;
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public Builder setBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00004000;
bytes_ = value;
onChanged();
return this;
}
/**
* <code>optional bytes bytes = 15;</code>
*/
public Builder clearBytes() {
bitField0_ = (bitField0_ & ~0x00004000);
bytes_ = getDefaultInstance().getBytes();
onChanged();
return this;
}
private int enum_ = 3;
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public boolean hasEnum() {
return ((bitField0_ & 0x00008000) == 0x00008000);
}
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public org.apache.avro.protobuf.noopt.Test.A getEnum() {
org.apache.avro.protobuf.noopt.Test.A result = org.apache.avro.protobuf.noopt.Test.A.valueOf(enum_);
return result == null ? org.apache.avro.protobuf.noopt.Test.A.Z : result;
}
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public Builder setEnum(org.apache.avro.protobuf.noopt.Test.A value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00008000;
enum_ = value.getNumber();
onChanged();
return this;
}
/**
* <code>optional .org.apache.avro.protobuf.noopt.A enum = 16 [default = Z];</code>
*/
public Builder clearEnum() {
bitField0_ = (bitField0_ & ~0x00008000);
enum_ = 3;
onChanged();
return this;
}
private java.util.List<java.lang.Integer> intArray_ = java.util.Collections.emptyList();
private void ensureIntArrayIsMutable() {
if (!((bitField0_ & 0x00010000) == 0x00010000)) {
intArray_ = new java.util.ArrayList<java.lang.Integer>(intArray_);
bitField0_ |= 0x00010000;
}
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public java.util.List<java.lang.Integer> getIntArrayList() {
return java.util.Collections.unmodifiableList(intArray_);
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArrayCount() {
return intArray_.size();
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public int getIntArray(int index) {
return intArray_.get(index);
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder setIntArray(int index, int value) {
ensureIntArrayIsMutable();
intArray_.set(index, value);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder addIntArray(int value) {
ensureIntArrayIsMutable();
intArray_.add(value);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder addAllIntArray(java.lang.Iterable<? extends java.lang.Integer> values) {
ensureIntArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, intArray_);
onChanged();
return this;
}
/**
* <pre>
* some repeated types
* </pre>
*
* <code>repeated int32 intArray = 17;</code>
*/
public Builder clearIntArray() {
intArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00010000);
onChanged();
return this;
}
private java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> fooArray_ = java.util.Collections.emptyList();
private void ensureFooArrayIsMutable() {
if (!((bitField0_ & 0x00020000) == 0x00020000)) {
fooArray_ = new java.util.ArrayList<org.apache.avro.protobuf.noopt.Test.Foo>(fooArray_);
bitField0_ |= 0x00020000;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder> fooArrayBuilder_;
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.noopt.Test.Foo> getFooArrayList() {
if (fooArrayBuilder_ == null) {
return java.util.Collections.unmodifiableList(fooArray_);
} else {
return fooArrayBuilder_.getMessageList();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public int getFooArrayCount() {
if (fooArrayBuilder_ == null) {
return fooArray_.size();
} else {
return fooArrayBuilder_.getCount();
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo getFooArray(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessage(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.noopt.Test.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.set(index, value);
onChanged();
} else {
fooArrayBuilder_.setMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder setFooArray(int index, org.apache.avro.protobuf.noopt.Test.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.set(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.noopt.Test.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(value);
onChanged();
} else {
fooArrayBuilder_.addMessage(value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.noopt.Test.Foo value) {
if (fooArrayBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFooArrayIsMutable();
fooArray_.add(index, value);
onChanged();
} else {
fooArrayBuilder_.addMessage(index, value);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder addFooArray(org.apache.avro.protobuf.noopt.Test.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder addFooArray(int index, org.apache.avro.protobuf.noopt.Test.Foo.Builder builderForValue) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.add(index, builderForValue.build());
onChanged();
} else {
fooArrayBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder addAllFooArray(java.lang.Iterable<? extends org.apache.avro.protobuf.noopt.Test.Foo> values) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, fooArray_);
onChanged();
} else {
fooArrayBuilder_.addAllMessages(values);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder clearFooArray() {
if (fooArrayBuilder_ == null) {
fooArray_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00020000);
onChanged();
} else {
fooArrayBuilder_.clear();
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public Builder removeFooArray(int index) {
if (fooArrayBuilder_ == null) {
ensureFooArrayIsMutable();
fooArray_.remove(index);
onChanged();
} else {
fooArrayBuilder_.remove(index);
}
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo.Builder getFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooArrayOrBuilder(int index) {
if (fooArrayBuilder_ == null) {
return fooArray_.get(index);
} else {
return fooArrayBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public java.util.List<? extends org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooArrayOrBuilderList() {
if (fooArrayBuilder_ != null) {
return fooArrayBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(fooArray_);
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo.Builder addFooArrayBuilder() {
return getFooArrayFieldBuilder().addBuilder(org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo.Builder addFooArrayBuilder(int index) {
return getFooArrayFieldBuilder().addBuilder(index,
org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance());
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.Foo fooArray = 20;</code>
*/
public java.util.List<org.apache.avro.protobuf.noopt.Test.Foo.Builder> getFooArrayBuilderList() {
return getFooArrayFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooArrayFieldBuilder() {
if (fooArrayBuilder_ == null) {
fooArrayBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder>(
fooArray_, ((bitField0_ & 0x00020000) == 0x00020000), getParentForChildren(), isClean());
fooArray_ = null;
}
return fooArrayBuilder_;
}
private java.util.List<java.lang.Integer> syms_ = java.util.Collections.emptyList();
private void ensureSymsIsMutable() {
if (!((bitField0_ & 0x00040000) == 0x00040000)) {
syms_ = new java.util.ArrayList<java.lang.Integer>(syms_);
bitField0_ |= 0x00040000;
}
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public java.util.List<org.apache.avro.protobuf.noopt.Test.A> getSymsList() {
return new com.google.protobuf.Internal.ListAdapter<java.lang.Integer, org.apache.avro.protobuf.noopt.Test.A>(
syms_, syms_converter_);
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public int getSymsCount() {
return syms_.size();
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public org.apache.avro.protobuf.noopt.Test.A getSyms(int index) {
return syms_converter_.convert(syms_.get(index));
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public Builder setSyms(int index, org.apache.avro.protobuf.noopt.Test.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.set(index, value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public Builder addSyms(org.apache.avro.protobuf.noopt.Test.A value) {
if (value == null) {
throw new NullPointerException();
}
ensureSymsIsMutable();
syms_.add(value.getNumber());
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public Builder addAllSyms(java.lang.Iterable<? extends org.apache.avro.protobuf.noopt.Test.A> values) {
ensureSymsIsMutable();
for (org.apache.avro.protobuf.noopt.Test.A value : values) {
syms_.add(value.getNumber());
}
onChanged();
return this;
}
/**
* <code>repeated .org.apache.avro.protobuf.noopt.A syms = 19;</code>
*/
public Builder clearSyms() {
syms_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00040000);
onChanged();
return this;
}
private org.apache.avro.protobuf.noopt.Test.Foo foo_ = null;
private com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder> fooBuilder_;
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public boolean hasFoo() {
return ((bitField0_ & 0x00080000) == 0x00080000);
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo getFoo() {
if (fooBuilder_ == null) {
return foo_ == null ? org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance() : foo_;
} else {
return fooBuilder_.getMessage();
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.noopt.Test.Foo value) {
if (fooBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
foo_ = value;
onChanged();
} else {
fooBuilder_.setMessage(value);
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public Builder setFoo(org.apache.avro.protobuf.noopt.Test.Foo.Builder builderForValue) {
if (fooBuilder_ == null) {
foo_ = builderForValue.build();
onChanged();
} else {
fooBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public Builder mergeFoo(org.apache.avro.protobuf.noopt.Test.Foo value) {
if (fooBuilder_ == null) {
if (((bitField0_ & 0x00080000) == 0x00080000) && foo_ != null
&& foo_ != org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance()) {
foo_ = org.apache.avro.protobuf.noopt.Test.Foo.newBuilder(foo_).mergeFrom(value).buildPartial();
} else {
foo_ = value;
}
onChanged();
} else {
fooBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00080000;
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public Builder clearFoo() {
if (fooBuilder_ == null) {
foo_ = null;
onChanged();
} else {
fooBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00080000);
return this;
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.noopt.Test.Foo.Builder getFooBuilder() {
bitField0_ |= 0x00080000;
onChanged();
return getFooFieldBuilder().getBuilder();
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
public org.apache.avro.protobuf.noopt.Test.FooOrBuilder getFooOrBuilder() {
if (fooBuilder_ != null) {
return fooBuilder_.getMessageOrBuilder();
} else {
return foo_ == null ? org.apache.avro.protobuf.noopt.Test.Foo.getDefaultInstance() : foo_;
}
}
/**
* <pre>
* a recursive type
* </pre>
*
* <code>optional .org.apache.avro.protobuf.noopt.Foo foo = 18;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder> getFooFieldBuilder() {
if (fooBuilder_ == null) {
fooBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<org.apache.avro.protobuf.noopt.Test.Foo, org.apache.avro.protobuf.noopt.Test.Foo.Builder, org.apache.avro.protobuf.noopt.Test.FooOrBuilder>(
getFoo(), getParentForChildren(), isClean());
foo_ = null;
}
return fooBuilder_;
}
private com.google.protobuf.Timestamp timestamp_ = null;
private com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> timestampBuilder_;
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00100000) == 0x00100000);
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp getTimestamp() {
if (timestampBuilder_ == null) {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
} else {
return timestampBuilder_.getMessage();
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
timestamp_ = value;
onChanged();
} else {
timestampBuilder_.setMessage(value);
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder setTimestamp(com.google.protobuf.Timestamp.Builder builderForValue) {
if (timestampBuilder_ == null) {
timestamp_ = builderForValue.build();
onChanged();
} else {
timestampBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder mergeTimestamp(com.google.protobuf.Timestamp value) {
if (timestampBuilder_ == null) {
if (((bitField0_ & 0x00100000) == 0x00100000) && timestamp_ != null
&& timestamp_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
timestamp_ = com.google.protobuf.Timestamp.newBuilder(timestamp_).mergeFrom(value).buildPartial();
} else {
timestamp_ = value;
}
onChanged();
} else {
timestampBuilder_.mergeFrom(value);
}
bitField0_ |= 0x00100000;
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public Builder clearTimestamp() {
if (timestampBuilder_ == null) {
timestamp_ = null;
onChanged();
} else {
timestampBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00100000);
return this;
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.Timestamp.Builder getTimestampBuilder() {
bitField0_ |= 0x00100000;
onChanged();
return getTimestampFieldBuilder().getBuilder();
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
public com.google.protobuf.TimestampOrBuilder getTimestampOrBuilder() {
if (timestampBuilder_ != null) {
return timestampBuilder_.getMessageOrBuilder();
} else {
return timestamp_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : timestamp_;
}
}
/**
* <pre>
* a predefined message type
* </pre>
*
* <code>optional .google.protobuf.Timestamp timestamp = 21;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getTimestampFieldBuilder() {
if (timestampBuilder_ == null) {
timestampBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getTimestamp(), getParentForChildren(), isClean());
timestamp_ = null;
}
return timestampBuilder_;
}
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.noopt.Foo)
}
// @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.noopt.Foo)
private static final org.apache.avro.protobuf.noopt.Test.Foo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.avro.protobuf.noopt.Test.Foo();
}
public static org.apache.avro.protobuf.noopt.Test.Foo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated
public static final com.google.protobuf.Parser<Foo> PARSER = new com.google.protobuf.AbstractParser<Foo>() {
public Foo parsePartialFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Foo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Foo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Foo> getParserForType() {
return PARSER;
}
public org.apache.avro.protobuf.noopt.Test.Foo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public interface MOrBuilder extends
// @@protoc_insertion_point(interface_extends:org.apache.avro.protobuf.noopt.M)
com.google.protobuf.MessageOrBuilder {
}
/**
* <pre>
* a nested enum
* </pre>
*
* Protobuf type {@code org.apache.avro.protobuf.noopt.M}
*/
public static final class M extends com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:org.apache.avro.protobuf.noopt.M)
MOrBuilder {
private static final long serialVersionUID = 0L;
// Use M.newBuilder() to construct.
private M(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private M() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private M(com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.M.class,
org.apache.avro.protobuf.noopt.Test.M.Builder.class);
}
/**
* Protobuf enum {@code org.apache.avro.protobuf.noopt.M.N}
*/
public enum N implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>A = 1;</code>
*/
A(1),;
/**
* <code>A = 1;</code>
*/
public static final int A_VALUE = 1;
public final int getNumber() {
return value;
}
/**
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static N valueOf(int value) {
return forNumber(value);
}
public static N forNumber(int value) {
switch (value) {
case 1:
return A;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<N> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<N> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<N>() {
public N findValueByNumber(int number) {
return N.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.M.getDescriptor().getEnumTypes().get(0);
}
private static final N[] VALUES = values();
public static N valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int value;
private N(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:org.apache.avro.protobuf.noopt.M.N)
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1)
return true;
if (isInitialized == 0)
return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1)
return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof org.apache.avro.protobuf.noopt.Test.M)) {
return super.equals(obj);
}
org.apache.avro.protobuf.noopt.Test.M other = (org.apache.avro.protobuf.noopt.Test.M) obj;
boolean result = true;
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseDelimitedFrom(java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static org.apache.avro.protobuf.noopt.Test.M parseFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(org.apache.avro.protobuf.noopt.Test.M prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* a nested enum
* </pre>
*
* Protobuf type {@code org.apache.avro.protobuf.noopt.M}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:org.apache.avro.protobuf.noopt.M)
org.apache.avro.protobuf.noopt.Test.MOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_fieldAccessorTable
.ensureFieldAccessorsInitialized(org.apache.avro.protobuf.noopt.Test.M.class,
org.apache.avro.protobuf.noopt.Test.M.Builder.class);
}
// Construct using org.apache.avro.protobuf.noopt.Test.M.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
return this;
}
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return org.apache.avro.protobuf.noopt.Test.internal_static_org_apache_avro_protobuf_noopt_M_descriptor;
}
public org.apache.avro.protobuf.noopt.Test.M getDefaultInstanceForType() {
return org.apache.avro.protobuf.noopt.Test.M.getDefaultInstance();
}
public org.apache.avro.protobuf.noopt.Test.M build() {
org.apache.avro.protobuf.noopt.Test.M result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public org.apache.avro.protobuf.noopt.Test.M buildPartial() {
org.apache.avro.protobuf.noopt.Test.M result = new org.apache.avro.protobuf.noopt.Test.M(this);
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, int index,
java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.avro.protobuf.noopt.Test.M) {
return mergeFrom((org.apache.avro.protobuf.noopt.Test.M) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(org.apache.avro.protobuf.noopt.Test.M other) {
if (other == org.apache.avro.protobuf.noopt.Test.M.getDefaultInstance())
return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException {
org.apache.avro.protobuf.noopt.Test.M parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.avro.protobuf.noopt.Test.M) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
public final Builder mergeUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:org.apache.avro.protobuf.noopt.M)
}
// @@protoc_insertion_point(class_scope:org.apache.avro.protobuf.noopt.M)
private static final org.apache.avro.protobuf.noopt.Test.M DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new org.apache.avro.protobuf.noopt.Test.M();
}
public static org.apache.avro.protobuf.noopt.Test.M getDefaultInstance() {
return DEFAULT_INSTANCE;
}
@java.lang.Deprecated
public static final com.google.protobuf.Parser<M> PARSER = new com.google.protobuf.AbstractParser<M>() {
public M parsePartialFrom(com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new M(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<M> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<M> getParserForType() {
return PARSER;
}
public org.apache.avro.protobuf.noopt.Test.M getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
private static final com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor;
private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_org_apache_avro_protobuf_noopt_Foo_fieldAccessorTable;
private static final com.google.protobuf.Descriptors.Descriptor internal_static_org_apache_avro_protobuf_noopt_M_descriptor;
private static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_org_apache_avro_protobuf_noopt_M_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = { "\n\034src/test/protobuf/test.proto\022\036org.apac"
+ "he.avro.protobuf.noopt\032\037google/protobuf/"
+ "timestamp.proto\"\204\004\n\003Foo\022\r\n\005int32\030\001 \002(\005\022\r"
+ "\n\005int64\030\002 \001(\003\022\016\n\006uint32\030\003 \001(\r\022\016\n\006uint64\030"
+ "\004 \001(\004\022\016\n\006sint32\030\005 \001(\021\022\016\n\006sint64\030\006 \001(\022\022\017\n"
+ "\007fixed32\030\007 \001(\007\022\017\n\007fixed64\030\010 \001(\006\022\020\n\010sfixe"
+ "d32\030\t \001(\017\022\020\n\010sfixed64\030\n \001(\020\022\r\n\005float\030\013 \001"
+ "(\002\022\016\n\006double\030\014 \001(\001\022\014\n\004bool\030\r \001(\010\022\016\n\006stri"
+ "ng\030\016 \001(\t\022\r\n\005bytes\030\017 \001(\014\0222\n\004enum\030\020 \001(\0162!."
+ "org.apache.avro.protobuf.noopt.A:\001Z\022\020\n\010i"
+ "ntArray\030\021 \003(\005\0225\n\010fooArray\030\024 \003(\0132#.org.ap"
+ "ache.avro.protobuf.noopt.Foo\022/\n\004syms\030\023 \003" + "(\0162!.org.apache.avro.protobuf.noopt.A\0220\n"
+ "\003foo\030\022 \001(\0132#.org.apache.avro.protobuf.no"
+ "opt.Foo\022-\n\ttimestamp\030\025 \001(\0132\032.google.prot"
+ "obuf.Timestamp\"\017\n\001M\"\n\n\001N\022\005\n\001A\020\001*\030\n\001A\022\005\n\001"
+ "X\020\001\022\005\n\001Y\020\002\022\005\n\001Z\020\003" };
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.protobuf.TimestampProto.getDescriptor(), },
assigner);
internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor = getDescriptor().getMessageTypes().get(0);
internal_static_org_apache_avro_protobuf_noopt_Foo_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_org_apache_avro_protobuf_noopt_Foo_descriptor,
new java.lang.String[] { "Int32", "Int64", "Uint32", "Uint64", "Sint32", "Sint64", "Fixed32", "Fixed64",
"Sfixed32", "Sfixed64", "Float", "Double", "Bool", "String", "Bytes", "Enum", "IntArray", "FooArray",
"Syms", "Foo", "Timestamp", });
internal_static_org_apache_avro_protobuf_noopt_M_descriptor = getDescriptor().getMessageTypes().get(1);
internal_static_org_apache_avro_protobuf_noopt_M_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_org_apache_avro_protobuf_noopt_M_descriptor, new java.lang.String[] {});
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| 7,585 |
0 | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtoConversions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import com.google.protobuf.Timestamp;
import org.apache.avro.Conversion;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
public class ProtoConversions {
private static final int THOUSAND = 1000;
private static final int MILLION = 1000000;
// second value must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z
// inclusive.
static final long SECONDS_LOWERLIMIT = -62135596800L;
static final long SECONDS_UPPERLIMIT = 253402300799L;
// nano value Must be from 0 to 999,999,999 inclusive.
private static final int NANOSECONDS_LOWERLIMIT = 0;
private static final int NANOSECONDS_UPPERLIMIT = 999999999;
// timestamp precise of conversion from long
private enum TimestampPrecise {
Millis, Micros
};
public static class TimestampMillisConversion extends Conversion<Timestamp> {
@Override
public Class<Timestamp> getConvertedType() {
return Timestamp.class;
}
@Override
public String getLogicalTypeName() {
return "timestamp-millis";
}
@Override
public Timestamp fromLong(Long millisFromEpoch, Schema schema, LogicalType type) throws IllegalArgumentException {
return ProtoConversions.fromLong(millisFromEpoch, TimestampPrecise.Millis);
}
@Override
public Long toLong(Timestamp value, Schema schema, LogicalType type) {
return ProtoConversions.toLong(value, TimestampPrecise.Millis);
}
@Override
public Schema getRecommendedSchema() {
return LogicalTypes.timestampMillis().addToSchema(Schema.create(Schema.Type.LONG));
}
}
public static class TimestampMicrosConversion extends Conversion<Timestamp> {
@Override
public Class<Timestamp> getConvertedType() {
return Timestamp.class;
}
@Override
public String getLogicalTypeName() {
return "timestamp-micros";
}
@Override
public Timestamp fromLong(Long microsFromEpoch, Schema schema, LogicalType type) throws IllegalArgumentException {
return ProtoConversions.fromLong(microsFromEpoch, TimestampPrecise.Micros);
}
@Override
public Long toLong(Timestamp value, Schema schema, LogicalType type) {
return ProtoConversions.toLong(value, TimestampPrecise.Micros);
}
@Override
public Schema getRecommendedSchema() {
return LogicalTypes.timestampMicros().addToSchema(Schema.create(Schema.Type.LONG));
}
}
private static long toLong(Timestamp value, TimestampPrecise precise) {
long rv = 0L;
switch (precise) {
case Millis:
rv = value.getSeconds() * THOUSAND + value.getNanos() / MILLION;
break;
case Micros:
rv = value.getSeconds() * MILLION + value.getNanos() / THOUSAND;
break;
}
return rv;
}
private static Timestamp fromLong(Long elapsedSinceEpoch, TimestampPrecise precise) throws IllegalArgumentException {
long seconds = 0L;
int nanos = 0;
switch (precise) {
case Millis:
seconds = Math.floorDiv(elapsedSinceEpoch, (long) THOUSAND);
nanos = (int) Math.floorMod(elapsedSinceEpoch, (long) THOUSAND) * MILLION;
break;
case Micros:
seconds = Math.floorDiv(elapsedSinceEpoch, (long) MILLION);
nanos = (int) Math.floorMod(elapsedSinceEpoch, (long) MILLION) * THOUSAND;
break;
}
if (seconds < SECONDS_LOWERLIMIT || seconds > SECONDS_UPPERLIMIT) {
throw new IllegalArgumentException("given seconds is out of range");
}
if (nanos < NANOSECONDS_LOWERLIMIT || nanos > NANOSECONDS_UPPERLIMIT) {
// NOTE here is unexpected cases because exceeded part is
// moved to seconds by floor methods
throw new IllegalArgumentException("given nanos is out of range");
}
return Timestamp.newBuilder().setSeconds(seconds).setNanos(nanos).build();
}
}
| 7,586 |
0 | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import java.util.List;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Map;
import java.util.IdentityHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.io.IOException;
import java.io.File;
import org.apache.avro.Conversion;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import com.google.protobuf.ByteString;
import com.google.protobuf.Message;
import com.google.protobuf.Message.Builder;
import com.google.protobuf.MessageOrBuilder;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.EnumDescriptor;
import com.google.protobuf.Descriptors.EnumValueDescriptor;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.DescriptorProtos.FileOptions;
import org.apache.avro.util.ClassUtils;
import org.apache.avro.util.internal.Accessor;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
/** Utilities for serializing Protobuf data in Avro format. */
public class ProtobufData extends GenericData {
private static final ProtobufData INSTANCE = new ProtobufData();
protected ProtobufData() {
}
/** Return the singleton instance. */
public static ProtobufData get() {
return INSTANCE;
}
@Override
public DatumReader createDatumReader(Schema schema) {
return new ProtobufDatumReader(schema, schema, this);
}
@Override
public DatumWriter createDatumWriter(Schema schema) {
return new ProtobufDatumWriter(schema, this);
}
@Override
public void setField(Object r, String n, int pos, Object value) {
setField(r, n, pos, value, getRecordState(r, getSchema(r.getClass())));
}
@Override
public Object getField(Object r, String name, int pos) {
return getField(r, name, pos, getRecordState(r, getSchema(r.getClass())));
}
@Override
protected void setField(Object record, String name, int position, Object value, Object state) {
Builder b = (Builder) record;
FieldDescriptor f = ((FieldDescriptor[]) state)[position];
switch (f.getType()) {
case MESSAGE:
if (value == null) {
b.clearField(f);
break;
}
default:
b.setField(f, value);
}
}
@Override
protected Object getField(Object record, String name, int pos, Object state) {
Message m = (Message) record;
FieldDescriptor f = ((FieldDescriptor[]) state)[pos];
switch (f.getType()) {
case MESSAGE:
if (!f.isRepeated() && !m.hasField(f))
return null;
default:
return m.getField(f);
}
}
private final Map<Descriptor, FieldDescriptor[]> fieldCache = new ConcurrentHashMap<>();
@Override
protected Object getRecordState(Object r, Schema s) {
Descriptor d = ((MessageOrBuilder) r).getDescriptorForType();
FieldDescriptor[] fields = fieldCache.get(d);
if (fields == null) { // cache miss
fields = new FieldDescriptor[s.getFields().size()];
for (Field f : s.getFields())
fields[f.pos()] = d.findFieldByName(f.name());
fieldCache.put(d, fields); // update cache
}
return fields;
}
@Override
protected boolean isRecord(Object datum) {
return datum instanceof Message;
}
@Override
public Object newRecord(Object old, Schema schema) {
try {
Class c = SpecificData.get().getClass(schema);
if (c == null)
return super.newRecord(old, schema); // punt to generic
if (c.isInstance(old))
return old; // reuse instance
return c.getMethod("newBuilder").invoke(null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected boolean isArray(Object datum) {
return datum instanceof List;
}
@Override
protected boolean isBytes(Object datum) {
return datum instanceof ByteString;
}
@Override
protected Schema getRecordSchema(Object record) {
Descriptor descriptor = ((Message) record).getDescriptorForType();
Schema schema = schemaCache.get(descriptor);
if (schema == null) {
schema = getSchema(descriptor);
schemaCache.put(descriptor, schema);
}
return schema;
}
private final Map<Object, Schema> schemaCache = new ConcurrentHashMap<>();
/** Return a record schema given a protobuf message class. */
public Schema getSchema(Class c) {
Schema schema = schemaCache.get(c);
if (schema == null) { // cache miss
try {
Object descriptor = c.getMethod("getDescriptor").invoke(null);
if (c.isEnum())
schema = getSchema((EnumDescriptor) descriptor);
else
schema = getSchema((Descriptor) descriptor);
} catch (Exception e) {
throw new RuntimeException(e);
}
schemaCache.put(c, schema); // update cache
}
return schema;
}
private static final ThreadLocal<Map<Descriptor, Schema>> SEEN = ThreadLocal.withInitial(IdentityHashMap::new);
public Schema getSchema(Descriptor descriptor) {
Map<Descriptor, Schema> seen = SEEN.get();
if (seen.containsKey(descriptor)) // stop recursion
return seen.get(descriptor);
boolean first = seen.isEmpty();
Conversion conversion = getConversionByDescriptor(descriptor);
if (conversion != null) {
Schema converted = conversion.getRecommendedSchema();
seen.put(descriptor, converted);
return converted;
}
try {
Schema result = Schema.createRecord(descriptor.getName(), null,
getNamespace(descriptor.getFile(), descriptor.getContainingType()), false);
seen.put(descriptor, result);
List<Field> fields = new ArrayList<>(descriptor.getFields().size());
for (FieldDescriptor f : descriptor.getFields())
fields.add(Accessor.createField(f.getName(), getSchema(f), null, getDefault(f)));
result.setFields(fields);
return result;
} finally {
if (first)
seen.clear();
}
}
public String getNamespace(FileDescriptor fd, Descriptor containing) {
FileOptions o = fd.getOptions();
String p = o.hasJavaPackage() ? o.getJavaPackage() : fd.getPackage();
String outer = "";
if (!o.getJavaMultipleFiles()) {
if (o.hasJavaOuterClassname()) {
outer = o.getJavaOuterClassname();
} else {
outer = new File(fd.getName()).getName();
outer = outer.substring(0, outer.lastIndexOf('.'));
outer = toCamelCase(outer);
}
}
StringBuilder inner = new StringBuilder();
while (containing != null) {
if (inner.length() == 0) {
inner.insert(0, containing.getName());
} else {
inner.insert(0, containing.getName() + "$");
}
containing = containing.getContainingType();
}
String d1 = (!outer.isEmpty() || inner.length() != 0 ? "." : "");
String d2 = (!outer.isEmpty() && inner.length() != 0 ? "$" : "");
return p + d1 + outer + d2 + inner;
}
private static String toCamelCase(String s) {
String[] parts = s.split("_");
StringBuilder camelCaseString = new StringBuilder(s.length());
for (String part : parts) {
camelCaseString.append(cap(part));
}
return camelCaseString.toString();
}
private static String cap(String s) {
return s.substring(0, 1).toUpperCase() + s.substring(1).toLowerCase();
}
private static final Schema NULL = Schema.create(Schema.Type.NULL);
public Schema getSchema(FieldDescriptor f) {
Schema s = getNonRepeatedSchema(f);
if (f.isRepeated())
s = Schema.createArray(s);
return s;
}
private Schema getNonRepeatedSchema(FieldDescriptor f) {
Schema result;
switch (f.getType()) {
case BOOL:
return Schema.create(Schema.Type.BOOLEAN);
case FLOAT:
return Schema.create(Schema.Type.FLOAT);
case DOUBLE:
return Schema.create(Schema.Type.DOUBLE);
case STRING:
Schema s = Schema.create(Schema.Type.STRING);
GenericData.setStringType(s, GenericData.StringType.String);
return s;
case BYTES:
return Schema.create(Schema.Type.BYTES);
case INT32:
case UINT32:
case SINT32:
case FIXED32:
case SFIXED32:
return Schema.create(Schema.Type.INT);
case INT64:
case UINT64:
case SINT64:
case FIXED64:
case SFIXED64:
return Schema.create(Schema.Type.LONG);
case ENUM:
return getSchema(f.getEnumType());
case MESSAGE:
result = getSchema(f.getMessageType());
if (f.isOptional())
// wrap optional record fields in a union with null
result = Schema.createUnion(Arrays.asList(NULL, result));
return result;
case GROUP: // groups are deprecated
default:
throw new RuntimeException("Unexpected type: " + f.getType());
}
}
public Schema getSchema(EnumDescriptor d) {
List<String> symbols = new ArrayList<>(d.getValues().size());
for (EnumValueDescriptor e : d.getValues()) {
symbols.add(e.getName());
}
return Schema.createEnum(d.getName(), null, getNamespace(d.getFile(), d.getContainingType()), symbols);
}
private static final JsonFactory FACTORY = new JsonFactory();
private static final ObjectMapper MAPPER = new ObjectMapper(FACTORY);
private static final JsonNodeFactory NODES = JsonNodeFactory.instance;
private JsonNode getDefault(FieldDescriptor f) {
if (f.isRequired()) // no default
return null;
if (f.isRepeated()) // empty array as repeated fields' default value
return NODES.arrayNode();
if (f.hasDefaultValue()) { // parse spec'd default value
Object value = f.getDefaultValue();
switch (f.getType()) {
case ENUM:
value = ((EnumValueDescriptor) value).getName();
break;
}
String json = toString(value);
try {
return MAPPER.readTree(FACTORY.createParser(json));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
switch (f.getType()) { // generate default for type
case BOOL:
return NODES.booleanNode(false);
case FLOAT:
return NODES.numberNode(0.0F);
case DOUBLE:
return NODES.numberNode(0.0D);
case INT32:
case UINT32:
case SINT32:
case FIXED32:
case SFIXED32:
case INT64:
case UINT64:
case SINT64:
case FIXED64:
case SFIXED64:
return NODES.numberNode(0);
case STRING:
case BYTES:
return NODES.textNode("");
case ENUM:
return NODES.textNode(f.getEnumType().getValues().get(0).getName());
case MESSAGE:
return NODES.nullNode();
case GROUP: // groups are deprecated
default:
throw new RuntimeException("Unexpected type: " + f.getType());
}
}
/**
* Get Conversion from protobuf descriptor via protobuf classname.
*
* @param descriptor protobuf descriptor
* @return Conversion | null
*/
private Conversion getConversionByDescriptor(Descriptor descriptor) {
String namespace = getNamespace(descriptor.getFile(), descriptor.getContainingType());
String name = descriptor.getName();
String dot = namespace.endsWith("$") ? "" : "."; // back-compatibly handle $
try {
Class clazz = ClassUtils.forName(getClassLoader(), namespace + dot + name);
return getConversionByClass(clazz);
} catch (ClassNotFoundException e) {
return null;
}
}
}
| 7,587 |
0 | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.ResolvingDecoder;
import com.google.protobuf.ByteString;
import com.google.protobuf.Message;
import com.google.protobuf.ProtocolMessageEnum;
/**
* {@link org.apache.avro.io.DatumReader DatumReader} for generated Protobuf
* classes.
*/
public class ProtobufDatumReader<T> extends GenericDatumReader<T> {
public ProtobufDatumReader() {
this(null, null, ProtobufData.get());
}
public ProtobufDatumReader(Class<T> c) {
this(ProtobufData.get().getSchema(c));
}
/** Construct where the writer's and reader's schemas are the same. */
public ProtobufDatumReader(Schema schema) {
this(schema, schema, ProtobufData.get());
}
/** Construct given writer's and reader's schema. */
public ProtobufDatumReader(Schema writer, Schema reader) {
this(writer, reader, ProtobufData.get());
}
protected ProtobufDatumReader(Schema writer, Schema reader, ProtobufData data) {
super(writer, reader, data);
}
@Override
protected Object readRecord(Object old, Schema expected, ResolvingDecoder in) throws IOException {
Message.Builder b = (Message.Builder) super.readRecord(old, expected, in);
return b.build(); // build instance
}
@Override
protected Object createEnum(String symbol, Schema schema) {
try {
Class c = SpecificData.get().getClass(schema);
if (c == null)
return super.createEnum(symbol, schema); // punt to generic
return ((ProtocolMessageEnum) Enum.valueOf(c, symbol)).getValueDescriptor();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected Object readBytes(Object old, Decoder in) throws IOException {
return ByteString.copyFrom(((ByteBuffer) super.readBytes(old, in)).array());
}
}
| 7,588 |
0 | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro | Create_ds/avro/lang/java/protobuf/src/main/java/org/apache/avro/protobuf/ProtobufDatumWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.protobuf;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.Encoder;
import com.google.protobuf.ByteString;
import com.google.protobuf.Descriptors.EnumValueDescriptor;
/**
* {@link org.apache.avro.io.DatumWriter DatumWriter} for generated protobuf
* classes.
*/
public class ProtobufDatumWriter<T> extends GenericDatumWriter<T> {
public ProtobufDatumWriter() {
super(ProtobufData.get());
}
public ProtobufDatumWriter(Class<T> c) {
super(ProtobufData.get().getSchema(c), ProtobufData.get());
}
public ProtobufDatumWriter(Schema schema) {
super(schema, ProtobufData.get());
}
protected ProtobufDatumWriter(Schema root, ProtobufData protobufData) {
super(root, protobufData);
}
protected ProtobufDatumWriter(ProtobufData protobufData) {
super(protobufData);
}
@Override
protected void writeEnum(Schema schema, Object datum, Encoder out) throws IOException {
if (!(datum instanceof EnumValueDescriptor))
super.writeEnum(schema, datum, out); // punt to generic
else
out.writeEnum(schema.getEnumOrdinal(((EnumValueDescriptor) datum).getName()));
}
@Override
protected void writeBytes(Object datum, Encoder out) throws IOException {
ByteString bytes = (ByteString) datum;
out.writeBytes(bytes.toByteArray(), 0, bytes.size());
}
}
| 7,589 |
0 | Create_ds/avro/doc/examples/java-example/src/main/java | Create_ds/avro/doc/examples/java-example/src/main/java/example/GenericMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Parser;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
public class GenericMain {
public static void main(String[] args) throws IOException {
Schema schema = new Parser().parse(new File("./../user.avsc"));
GenericRecord user1 = new GenericData.Record(schema);
user1.put("name", "Alyssa");
user1.put("favorite_number", 256);
// Leave favorite color null
GenericRecord user2 = new GenericData.Record(schema);
user2.put("name", "Ben");
user2.put("favorite_number", 7);
user2.put("favorite_color", "red");
// Serialize user1 and user2 to disk
File file = new File("users.avro");
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);
DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
dataFileWriter.create(schema, file);
dataFileWriter.append(user1);
dataFileWriter.append(user2);
dataFileWriter.close();
// Deserialize users from disk
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
GenericRecord user = null;
try(DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(file, datumReader)){
while (dataFileReader.hasNext()) {
// Reuse user object by passing it to next(). This saves us from
// allocating and garbage collecting many objects for files with
// many items.
user = dataFileReader.next(user);
System.out.println(user);
}
}
}
}
| 7,590 |
0 | Create_ds/avro/doc/examples/java-example/src/main/java | Create_ds/avro/doc/examples/java-example/src/main/java/example/SpecificMain.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.File;
import java.io.IOException;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import example.avro.User;
public class SpecificMain {
public static void main(String[] args) throws IOException {
User user1 = new User();
user1.setName("Alyssa");
user1.setFavoriteNumber(256);
// Leave favorite color null
// Alternate constructor
User user2 = new User("Ben", 7, "red");
// Construct via builder
User user3 = User.newBuilder()
.setName("Charlie")
.setFavoriteColor("blue")
.setFavoriteNumber(null)
.build();
// Serialize user1 and user2 to disk
File file = new File("users.avro");
DatumWriter<User> userDatumWriter = new SpecificDatumWriter<User>(User.class);
DataFileWriter<User> dataFileWriter = new DataFileWriter<User>(userDatumWriter);
dataFileWriter.create(user1.getSchema(), file);
dataFileWriter.append(user1);
dataFileWriter.append(user2);
dataFileWriter.append(user3);
dataFileWriter.close();
// Deserialize Users from disk
DatumReader<User> userDatumReader = new SpecificDatumReader<User>(User.class);
User user = null;
try(DataFileReader<User> dataFileReader = new DataFileReader<User>(file, userDatumReader)){
while (dataFileReader.hasNext()) {
// Reuse user object by passing it to next(). This saves us from
// allocating and garbage collecting many objects for files with
// many items.
user = dataFileReader.next(user);
System.out.println(user);
}
}
}
}
| 7,591 |
0 | Create_ds/avro/doc/examples/mr-example/src/main/java | Create_ds/avro/doc/examples/mr-example/src/main/java/example/MapReduceAvroWordCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.IOException;
import java.util.*;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.mapred.AvroWrapper;
import org.apache.avro.mapred.Pair;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* The classic WordCount example modified to output Avro Pair<CharSequence,
* Integer> records instead of text.
*/
public class MapReduceAvroWordCount extends Configured implements Tool {
public static class Map
extends Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
context.write(word, one);
}
}
}
public static class Reduce
extends Reducer<Text, IntWritable,
AvroWrapper<Pair<CharSequence, Integer>>, NullWritable> {
public void reduce(Text key, Iterable<IntWritable> values,
Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(new AvroWrapper<Pair<CharSequence, Integer>>
(new Pair<CharSequence, Integer>(key.toString(), sum)),
NullWritable.get());
}
}
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: AvroWordCount <input path> <output path>");
return -1;
}
Job job = new Job(getConf());
job.setJarByClass(MapReduceAvroWordCount.class);
job.setJobName("wordcount");
// We call setOutputSchema first so we can override the configuration
// parameters it sets
AvroJob.setOutputKeySchema(job,
Pair.getPairSchema(Schema.create(Type.STRING),
Schema.create(Type.INT)));
job.setOutputValueClass(NullWritable.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setSortComparatorClass(Text.Comparator.class);
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
return 0;
}
public static void main(String[] args) throws Exception {
int res =
ToolRunner.run(new Configuration(), new MapReduceAvroWordCount(), args);
System.exit(res);
}
}
| 7,592 |
0 | Create_ds/avro/doc/examples/mr-example/src/main/java | Create_ds/avro/doc/examples/mr-example/src/main/java/example/AvroWordCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.IOException;
import java.util.*;
import org.apache.avro.*;
import org.apache.avro.Schema.Type;
import org.apache.avro.mapred.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
/**
* The classic WordCount example modified to output Avro Pair<CharSequence,
* Integer> records instead of text.
*/
public class AvroWordCount extends Configured implements Tool {
public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter)
throws IOException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
output.collect(word, one);
}
}
}
public static class Reduce extends MapReduceBase
implements Reducer<Text, IntWritable,
AvroWrapper<Pair<CharSequence, Integer>>, NullWritable> {
public void reduce(Text key, Iterator<IntWritable> values,
OutputCollector<AvroWrapper<Pair<CharSequence, Integer>>, NullWritable> output,
Reporter reporter) throws IOException {
int sum = 0;
while (values.hasNext()) {
sum += values.next().get();
}
output.collect(new AvroWrapper<Pair<CharSequence, Integer>>(
new Pair<CharSequence, Integer>(key.toString(), sum)),
NullWritable.get());
}
}
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: AvroWordCount <input path> <output path>");
return -1;
}
JobConf conf = new JobConf(AvroWordCount.class);
conf.setJobName("wordcount");
// We call setOutputSchema first so we can override the configuration
// parameters it sets
AvroJob.setOutputSchema(conf, Pair.getPairSchema(Schema.create(Type.STRING),
Schema.create(Type.INT)));
conf.setMapperClass(Map.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(TextInputFormat.class);
conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(IntWritable.class);
conf.setOutputKeyComparatorClass(Text.Comparator.class);
FileInputFormat.setInputPaths(conf, new Path(args[0]));
FileOutputFormat.setOutputPath(conf, new Path(args[1]));
JobClient.runJob(conf);
return 0;
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new AvroWordCount(), args);
System.exit(res);
}
}
| 7,593 |
0 | Create_ds/avro/doc/examples/mr-example/src/main/java | Create_ds/avro/doc/examples/mr-example/src/main/java/example/MapredColorCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.IOException;
import org.apache.avro.*;
import org.apache.avro.Schema.Type;
import org.apache.avro.mapred.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import example.avro.User;
public class MapredColorCount extends Configured implements Tool {
public static class ColorCountMapper extends AvroMapper<User, Pair<CharSequence, Integer>> {
@Override
public void map(User user, AvroCollector<Pair<CharSequence, Integer>> collector, Reporter reporter)
throws IOException {
CharSequence color = user.getFavoriteColor();
// We need this check because the User.favorite_color field has type ["string", "null"]
if (color == null) {
color = "none";
}
collector.collect(new Pair<CharSequence, Integer>(color, 1));
}
}
public static class ColorCountReducer extends AvroReducer<CharSequence, Integer,
Pair<CharSequence, Integer>> {
@Override
public void reduce(CharSequence key, Iterable<Integer> values,
AvroCollector<Pair<CharSequence, Integer>> collector,
Reporter reporter)
throws IOException {
int sum = 0;
for (Integer value : values) {
sum += value;
}
collector.collect(new Pair<CharSequence, Integer>(key, sum));
}
}
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: MapredColorCount <input path> <output path>");
return -1;
}
JobConf conf = new JobConf(getConf(), MapredColorCount.class);
conf.setJobName("colorcount");
FileInputFormat.setInputPaths(conf, new Path(args[0]));
FileOutputFormat.setOutputPath(conf, new Path(args[1]));
AvroJob.setMapperClass(conf, ColorCountMapper.class);
AvroJob.setReducerClass(conf, ColorCountReducer.class);
// Note that AvroJob.setInputSchema and AvroJob.setOutputSchema set
// relevant config options such as input/output format, map output
// classes, and output key class.
AvroJob.setInputSchema(conf, User.getClassSchema());
AvroJob.setOutputSchema(conf, Pair.getPairSchema(Schema.create(Type.STRING),
Schema.create(Type.INT)));
JobClient.runJob(conf);
return 0;
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new MapredColorCount(), args);
System.exit(res);
}
}
| 7,594 |
0 | Create_ds/avro/doc/examples/mr-example/src/main/java | Create_ds/avro/doc/examples/mr-example/src/main/java/example/GenerateData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificDatumWriter;
import example.avro.User;
public class GenerateData {
public static final String[] COLORS = {"red", "orange", "yellow", "green", "blue", "purple", null};
public static final int USERS = 20;
public static final String PATH = "./input/users.avro";
public static void main(String[] args) throws IOException {
// Open data file
File file = new File(PATH);
if (file.getParentFile() != null) {
file.getParentFile().mkdirs();
}
DatumWriter<User> userDatumWriter = new SpecificDatumWriter<User>(User.class);
DataFileWriter<User> dataFileWriter = new DataFileWriter<User>(userDatumWriter);
dataFileWriter.create(User.SCHEMA$, file);
// Create random users
User user;
Random random = new Random();
for (int i = 0; i < USERS; i++) {
user = new User("user", null, COLORS[random.nextInt(COLORS.length)]);
dataFileWriter.append(user);
System.out.println(user);
}
dataFileWriter.close();
}
}
| 7,595 |
0 | Create_ds/avro/doc/examples/mr-example/src/main/java | Create_ds/avro/doc/examples/mr-example/src/main/java/example/MapReduceColorCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.avro.mapreduce.AvroKeyValueOutputFormat;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import example.avro.User;
public class MapReduceColorCount extends Configured implements Tool {
public static class ColorCountMapper extends
Mapper<AvroKey<User>, NullWritable, Text, IntWritable> {
@Override
public void map(AvroKey<User> key, NullWritable value, Context context)
throws IOException, InterruptedException {
CharSequence color = key.datum().getFavoriteColor();
if (color == null) {
color = "none";
}
context.write(new Text(color.toString()), new IntWritable(1));
}
}
public static class ColorCountReducer extends
Reducer<Text, IntWritable, AvroKey<CharSequence>, AvroValue<Integer>> {
@Override
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(new AvroKey<CharSequence>(key.toString()), new AvroValue<Integer>(sum));
}
}
public int run(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: MapReduceColorCount <input path> <output path>");
return -1;
}
Job job = new Job(getConf());
job.setJarByClass(MapReduceColorCount.class);
job.setJobName("Color Count");
FileInputFormat.setInputPaths(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setInputFormatClass(AvroKeyInputFormat.class);
job.setMapperClass(ColorCountMapper.class);
AvroJob.setInputKeySchema(job, User.getClassSchema());
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputFormatClass(AvroKeyValueOutputFormat.class);
job.setReducerClass(ColorCountReducer.class);
AvroJob.setOutputKeySchema(job, Schema.create(Schema.Type.STRING));
AvroJob.setOutputValueSchema(job, Schema.create(Schema.Type.INT));
return (job.waitForCompletion(true) ? 0 : 1);
}
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new MapReduceColorCount(), args);
System.exit(res);
}
}
| 7,596 |
0 | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf/maven/PluginTransformer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.maven;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import org.apache.maven.plugins.shade.resource.ResourceTransformer;
import org.jdom.Content;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
public class PluginTransformer implements ResourceTransformer {
public static final String XSI_NS = "http://www.w3.org/2001/XMLSchema-instance";
String resource;
Document doc;
public PluginTransformer() {
super();
}
public boolean canTransformResource(String r) {
r = r.toLowerCase();
if (resource != null && resource.equalsIgnoreCase(r)) {
return true;
}
return false;
}
public void processResource(String resource, InputStream is, List relocators) throws IOException {
processResource(is);
}
public void processResource(InputStream is) throws IOException {
Document r;
try {
r = new SAXBuilder().build(is);
} catch (JDOMException e) {
throw new RuntimeException(e);
}
if (doc == null) {
doc = r;
Element el = doc.getRootElement();
el.setAttribute("name", "default");
el.setAttribute("provider", "cxf.apache.org");
} else {
Element root = r.getRootElement();
for (Iterator itr = root.getChildren().iterator(); itr.hasNext();) {
Content n = (Content)itr.next();
itr.remove();
doc.getRootElement().addContent(n);
}
}
}
public boolean hasTransformedResource() {
return doc != null;
}
public void modifyOutputStream(JarOutputStream jos) throws IOException {
jos.putNextEntry(new JarEntry(resource));
new XMLOutputter(Format.getPrettyFormat()).output(doc, jos);
doc = null;
}
}
| 7,597 |
0 | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf/maven/CXFAllTransformer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.maven;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream;
import org.apache.maven.plugins.shade.resource.ResourceTransformer;
import org.jdom.Content;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.JDOMException;
import org.jdom.input.SAXBuilder;
public class CXFAllTransformer implements ResourceTransformer {
byte buffer[] = new byte[1024];
Map<String, ByteArrayOutputStream> extensions
= new LinkedHashMap<String, ByteArrayOutputStream>();
String lastResource;
public CXFAllTransformer() {
super();
}
public boolean canTransformResource(String r) {
if (r.startsWith("META-INF/cxf/cxf-extension-")
&& r.endsWith(".xml")) {
lastResource = r;
return true;
}
return false;
}
public boolean hasTransformedResource() {
return !extensions.isEmpty();
}
public void processResource(String resource, InputStream is, List relocators) throws IOException {
processResource(is);
}
public void processResource(InputStream is) throws IOException {
ByteArrayOutputStream bout = new ByteArrayOutputStream(1024);
int i = is.read(buffer);
while (i != -1) {
bout.write(buffer, 0, i);
i = is.read(buffer);
}
extensions.put(lastResource, bout);
}
public void modifyOutputStream(JarOutputStream jos) throws IOException {
List<String> imps = new ArrayList<String>(extensions.keySet());
for (Map.Entry<String, ByteArrayOutputStream> ent : extensions.entrySet()) {
jos.putNextEntry(new JarEntry(ent.getKey()));
ent.getValue().writeTo(jos);
try {
Document r = new SAXBuilder()
.build(new ByteArrayInputStream(ent.getValue().toByteArray()));
Element root = r.getRootElement();
for (Iterator itr = root.getChildren().iterator(); itr.hasNext();) {
Content n = (Content)itr.next();
if (n instanceof Element) {
Element e = (Element)n;
if ("import".equals(e.getName())
&& "http://www.springframework.org/schema/beans".equals(e.getNamespaceURI())) {
//remove stuff that is imported from other extensions to
//keep them from being loaded twice. (it's not an issue
//to load them twice, there just is a performance
//penalty in doing so
String loc = e.getAttributeValue("resource");
if (loc.startsWith("classpath:META-INF/cxf/cxf")) {
loc = loc.substring(10);
imps.remove(loc);
}
}
}
}
} catch (JDOMException e) {
throw new RuntimeException(e);
}
}
if (imps.size() > 0) {
jos.putNextEntry(new JarEntry("META-INF/cxf/cxf-all.xml"));
Writer writer = new OutputStreamWriter(jos, "UTF-8");
writer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
writer.append("<beans xmlns=\"http://www.springframework.org/schema/beans\"\n");
writer.append(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n");
writer.append(" xsi:schemaLocation=\"");
writer.append("http://www.springframework.org/schema/beans ");
writer.append("http://www.springframework.org/schema/beans/spring-beans.xsd\">\n");
writer.append(" <import resource=\"classpath:META-INF/cxf/cxf.xml\"/>\n");
for (String res : imps) {
writer.append(" <import resource=\"classpath:");
writer.append(res);
writer.append("\"/>\n");
}
writer.append("</beans>");
writer.flush();
}
}
}
| 7,598 |
0 | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf | Create_ds/cxf-build-utils/buildtools/src/main/java/org/apache/cxf/pmd/UnsafeStringConstructorRule.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.pmd;
import java.util.List;
import net.sourceforge.pmd.lang.java.rule.AbstractJavaRule;
import net.sourceforge.pmd.RuleContext;
import net.sourceforge.pmd.lang.java.ast.ASTAdditiveExpression;
import net.sourceforge.pmd.lang.java.ast.ASTAllocationExpression;
import net.sourceforge.pmd.lang.java.ast.ASTArgumentList;
import net.sourceforge.pmd.lang.java.ast.ASTArrayDimsAndInits;
import net.sourceforge.pmd.lang.java.ast.ASTClassOrInterfaceType;
import net.sourceforge.pmd.lang.java.ast.ASTExpression;
import net.sourceforge.pmd.lang.java.ast.ASTName;
import net.sourceforge.pmd.lang.java.ast.JavaNode;
import net.sourceforge.pmd.lang.java.typeresolution.TypeHelper;
/**
* Look for new String(byte[]) or new String(byte[], start, end)
* and complain.
*/
public class UnsafeStringConstructorRule extends AbstractJavaRule {
/** {@inheritDoc} */
@Override
public Object visit(ASTAllocationExpression node, Object data) {
if (!(node.jjtGetChild(0) instanceof ASTClassOrInterfaceType)) {
return data;
}
if (!TypeHelper.isA((ASTClassOrInterfaceType)node.jjtGetChild(0), String.class)) {
return data;
}
ASTArgumentList arglist = node.getFirstChildOfType(ASTArgumentList.class);
if (arglist == null) { // unlikely
return data;
}
// one of the two possibilities ...
if (arglist.jjtGetNumChildren() == 1 || arglist.jjtGetNumChildren() == 3) {
ASTExpression firstArgExpr = arglist.getFirstChildOfType(ASTExpression.class);
Class<?> exprType = firstArgExpr.getType();
// pmd reports the type as byte, not byte[]. But since
// there is no such thing as new String(byte), it seems
// safe enough to take that as good enough.
if (exprType != null) {
if (exprType == Byte.TYPE ||
(exprType.isArray() && exprType.getComponentType() == Byte.TYPE)) {
addViolation(data, node);
}
}
}
return data;
}
}
| 7,599 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.