index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/FooOrBar.java | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.avro.thrift.test;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-03-18")
public class FooOrBar extends org.apache.thrift.TUnion<FooOrBar, FooOrBar._Fields> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"FooOrBar");
private static final org.apache.thrift.protocol.TField FOO_FIELD_DESC = new org.apache.thrift.protocol.TField("foo",
org.apache.thrift.protocol.TType.STRING, (short) 1);
private static final org.apache.thrift.protocol.TField BAR_FIELD_DESC = new org.apache.thrift.protocol.TField("bar",
org.apache.thrift.protocol.TType.STRING, (short) 2);
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
FOO((short) 1, "foo"), BAR((short) 2, "bar");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // FOO
return FOO;
case 2: // BAR
return BAR;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.FOO,
new org.apache.thrift.meta_data.FieldMetaData("foo", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.BAR,
new org.apache.thrift.meta_data.FieldMetaData("bar", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(FooOrBar.class, metaDataMap);
}
public FooOrBar() {
super();
}
public FooOrBar(_Fields setField, java.lang.Object value) {
super(setField, value);
}
public FooOrBar(FooOrBar other) {
super(other);
}
public FooOrBar deepCopy() {
return new FooOrBar(this);
}
public static FooOrBar foo(java.lang.String value) {
FooOrBar x = new FooOrBar();
x.setFoo(value);
return x;
}
public static FooOrBar bar(java.lang.String value) {
FooOrBar x = new FooOrBar();
x.setBar(value);
return x;
}
@Override
protected void checkType(_Fields setField, java.lang.Object value) throws java.lang.ClassCastException {
switch (setField) {
case FOO:
if (value instanceof java.lang.String) {
break;
}
throw new java.lang.ClassCastException(
"Was expecting value of type java.lang.String for field 'foo', but got " + value.getClass().getSimpleName());
case BAR:
if (value instanceof java.lang.String) {
break;
}
throw new java.lang.ClassCastException(
"Was expecting value of type java.lang.String for field 'bar', but got " + value.getClass().getSimpleName());
default:
throw new java.lang.IllegalArgumentException("Unknown field id " + setField);
}
}
@Override
protected java.lang.Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot,
org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
_Fields setField = _Fields.findByThriftId(field.id);
if (setField != null) {
switch (setField) {
case FOO:
if (field.type == FOO_FIELD_DESC.type) {
java.lang.String foo;
foo = iprot.readString();
return foo;
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
return null;
}
case BAR:
if (field.type == BAR_FIELD_DESC.type) {
java.lang.String bar;
bar = iprot.readString();
return bar;
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
return null;
}
default:
throw new java.lang.IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
}
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
return null;
}
}
@Override
protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot)
throws org.apache.thrift.TException {
switch (setField_) {
case FOO:
java.lang.String foo = (java.lang.String) value_;
oprot.writeString(foo);
return;
case BAR:
java.lang.String bar = (java.lang.String) value_;
oprot.writeString(bar);
return;
default:
throw new java.lang.IllegalStateException("Cannot write union with unknown field " + setField_);
}
}
@Override
protected java.lang.Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID)
throws org.apache.thrift.TException {
_Fields setField = _Fields.findByThriftId(fieldID);
if (setField != null) {
switch (setField) {
case FOO:
java.lang.String foo;
foo = iprot.readString();
return foo;
case BAR:
java.lang.String bar;
bar = iprot.readString();
return bar;
default:
throw new java.lang.IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
}
} else {
throw new org.apache.thrift.protocol.TProtocolException("Couldn't find a field with field id " + fieldID);
}
}
@Override
protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
switch (setField_) {
case FOO:
java.lang.String foo = (java.lang.String) value_;
oprot.writeString(foo);
return;
case BAR:
java.lang.String bar = (java.lang.String) value_;
oprot.writeString(bar);
return;
default:
throw new java.lang.IllegalStateException("Cannot write union with unknown field " + setField_);
}
}
@Override
protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
switch (setField) {
case FOO:
return FOO_FIELD_DESC;
case BAR:
return BAR_FIELD_DESC;
default:
throw new java.lang.IllegalArgumentException("Unknown field id " + setField);
}
}
@Override
protected org.apache.thrift.protocol.TStruct getStructDesc() {
return STRUCT_DESC;
}
@Override
protected _Fields enumForId(short id) {
return _Fields.findByThriftIdOrThrow(id);
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public java.lang.String getFoo() {
if (getSetField() == _Fields.FOO) {
return (java.lang.String) getFieldValue();
} else {
throw new java.lang.RuntimeException(
"Cannot get field 'foo' because union is currently set to " + getFieldDesc(getSetField()).name);
}
}
public void setFoo(java.lang.String value) {
setField_ = _Fields.FOO;
value_ = java.util.Objects.requireNonNull(value, "_Fields.FOO");
}
public java.lang.String getBar() {
if (getSetField() == _Fields.BAR) {
return (java.lang.String) getFieldValue();
} else {
throw new java.lang.RuntimeException(
"Cannot get field 'bar' because union is currently set to " + getFieldDesc(getSetField()).name);
}
}
public void setBar(java.lang.String value) {
setField_ = _Fields.BAR;
value_ = java.util.Objects.requireNonNull(value, "_Fields.BAR");
}
public boolean isSetFoo() {
return setField_ == _Fields.FOO;
}
public boolean isSetBar() {
return setField_ == _Fields.BAR;
}
public boolean equals(java.lang.Object other) {
if (other instanceof FooOrBar) {
return equals((FooOrBar) other);
} else {
return false;
}
}
public boolean equals(FooOrBar other) {
return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
}
@Override
public int compareTo(FooOrBar other) {
int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
if (lastComparison == 0) {
return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
}
return lastComparison;
}
@Override
public int hashCode() {
java.util.List<java.lang.Object> list = new java.util.ArrayList<java.lang.Object>();
list.add(this.getClass().getName());
org.apache.thrift.TFieldIdEnum setField = getSetField();
if (setField != null) {
list.add(setField.getThriftFieldId());
java.lang.Object value = getFieldValue();
if (value instanceof org.apache.thrift.TEnum) {
list.add(((org.apache.thrift.TEnum) getFieldValue()).getValue());
} else {
list.add(value);
}
}
return list.hashCode();
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
}
| 7,100 |
0 | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Foo.java | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.avro.thrift.test;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-03-18")
public class Foo {
public interface Iface {
public void ping() throws org.apache.thrift.TException;
public int add(int num1, int num2) throws org.apache.thrift.TException;
public void zip() throws org.apache.thrift.TException;
}
public interface AsyncIface {
public void ping(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException;
public void add(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer> resultHandler)
throws org.apache.thrift.TException;
public void zip(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException;
}
public static class Client extends org.apache.thrift.TServiceClient implements Iface {
public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
public Factory() {
}
public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
return new Client(prot);
}
public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
return new Client(iprot, oprot);
}
}
public Client(org.apache.thrift.protocol.TProtocol prot) {
super(prot, prot);
}
public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
super(iprot, oprot);
}
public void ping() throws org.apache.thrift.TException {
send_ping();
recv_ping();
}
public void send_ping() throws org.apache.thrift.TException {
ping_args args = new ping_args();
sendBase("ping", args);
}
public void recv_ping() throws org.apache.thrift.TException {
ping_result result = new ping_result();
receiveBase(result, "ping");
return;
}
public int add(int num1, int num2) throws org.apache.thrift.TException {
send_add(num1, num2);
return recv_add();
}
public void send_add(int num1, int num2) throws org.apache.thrift.TException {
add_args args = new add_args();
args.setNum1(num1);
args.setNum2(num2);
sendBase("add", args);
}
public int recv_add() throws org.apache.thrift.TException {
add_result result = new add_result();
receiveBase(result, "add");
if (result.isSetSuccess()) {
return result.success;
}
throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
"add failed: unknown result");
}
public void zip() throws org.apache.thrift.TException {
send_zip();
}
public void send_zip() throws org.apache.thrift.TException {
zip_args args = new zip_args();
sendBaseOneway("zip", args);
}
}
public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
private org.apache.thrift.async.TAsyncClientManager clientManager;
private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
public Factory(org.apache.thrift.async.TAsyncClientManager clientManager,
org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
this.clientManager = clientManager;
this.protocolFactory = protocolFactory;
}
public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
return new AsyncClient(protocolFactory, clientManager, transport);
}
}
public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory,
org.apache.thrift.async.TAsyncClientManager clientManager,
org.apache.thrift.transport.TNonblockingTransport transport) {
super(protocolFactory, clientManager, transport);
}
public void ping(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException {
checkReady();
ping_call method_call = new ping_call(resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class ping_call extends org.apache.thrift.async.TAsyncMethodCall<Void> {
public ping_call(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler,
org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory,
org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(
new org.apache.thrift.protocol.TMessage("ping", org.apache.thrift.protocol.TMessageType.CALL, 0));
ping_args args = new ping_args();
args.write(prot);
prot.writeMessageEnd();
}
public Void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(
getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return null;
}
}
public void add(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer> resultHandler)
throws org.apache.thrift.TException {
checkReady();
add_call method_call = new add_call(num1, num2, resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class add_call extends org.apache.thrift.async.TAsyncMethodCall<java.lang.Integer> {
private int num1;
private int num2;
public add_call(int num1, int num2, org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer> resultHandler,
org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory,
org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, false);
this.num1 = num1;
this.num2 = num2;
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(
new org.apache.thrift.protocol.TMessage("add", org.apache.thrift.protocol.TMessageType.CALL, 0));
add_args args = new add_args();
args.setNum1(num1);
args.setNum2(num2);
args.write(prot);
prot.writeMessageEnd();
}
public java.lang.Integer getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(
getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return (new Client(prot)).recv_add();
}
}
public void zip(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException {
checkReady();
zip_call method_call = new zip_call(resultHandler, this, ___protocolFactory, ___transport);
this.___currentMethod = method_call;
___manager.call(method_call);
}
public static class zip_call extends org.apache.thrift.async.TAsyncMethodCall<Void> {
public zip_call(org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler,
org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory,
org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
super(client, protocolFactory, transport, resultHandler, true);
}
public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
prot.writeMessageBegin(
new org.apache.thrift.protocol.TMessage("zip", org.apache.thrift.protocol.TMessageType.ONEWAY, 0));
zip_args args = new zip_args();
args.write(prot);
prot.writeMessageEnd();
}
public Void getResult() throws org.apache.thrift.TException {
if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
throw new java.lang.IllegalStateException("Method call not finished!");
}
org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(
getFrameBuffer().array());
org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
return null;
}
}
}
public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I>
implements org.apache.thrift.TProcessor {
private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName());
public Processor(I iface) {
super(iface, getProcessMap(
new java.util.HashMap<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
}
protected Processor(I iface,
java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends Iface> java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(
java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
processMap.put("ping", new ping());
processMap.put("add", new add());
processMap.put("zip", new zip());
return processMap;
}
public static class ping<I extends Iface> extends org.apache.thrift.ProcessFunction<I, ping_args> {
public ping() {
super("ping");
}
public ping_args getEmptyArgsInstance() {
return new ping_args();
}
protected boolean isOneway() {
return false;
}
@Override
protected boolean rethrowUnhandledExceptions() {
return false;
}
public ping_result getResult(I iface, ping_args args) throws org.apache.thrift.TException {
ping_result result = new ping_result();
iface.ping();
return result;
}
}
public static class add<I extends Iface> extends org.apache.thrift.ProcessFunction<I, add_args> {
public add() {
super("add");
}
public add_args getEmptyArgsInstance() {
return new add_args();
}
protected boolean isOneway() {
return false;
}
@Override
protected boolean rethrowUnhandledExceptions() {
return false;
}
public add_result getResult(I iface, add_args args) throws org.apache.thrift.TException {
add_result result = new add_result();
result.success = iface.add(args.num1, args.num2);
result.setSuccessIsSet(true);
return result;
}
}
public static class zip<I extends Iface> extends org.apache.thrift.ProcessFunction<I, zip_args> {
public zip() {
super("zip");
}
public zip_args getEmptyArgsInstance() {
return new zip_args();
}
protected boolean isOneway() {
return true;
}
@Override
protected boolean rethrowUnhandledExceptions() {
return false;
}
public org.apache.thrift.TBase getResult(I iface, zip_args args) throws org.apache.thrift.TException {
iface.zip();
return null;
}
}
}
public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName());
public AsyncProcessor(I iface) {
super(iface, getProcessMap(
new java.util.HashMap<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
}
protected AsyncProcessor(I iface,
java.util.Map<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
super(iface, getProcessMap(processMap));
}
private static <I extends AsyncIface> java.util.Map<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> getProcessMap(
java.util.Map<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
processMap.put("ping", new ping());
processMap.put("add", new add());
processMap.put("zip", new zip());
return processMap;
}
public static class ping<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, ping_args, Void> {
public ping() {
super("ping");
}
public ping_args getEmptyArgsInstance() {
return new ping_args();
}
public org.apache.thrift.async.AsyncMethodCallback<Void> getResultHandler(
final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new org.apache.thrift.async.AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
ping_result result = new ping_result();
try {
fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY, seqid);
} catch (org.apache.thrift.transport.TTransportException e) {
_LOGGER.error("TTransportException writing to internal frame buffer", e);
fb.close();
} catch (java.lang.Exception e) {
_LOGGER.error("Exception writing to internal frame buffer", e);
onError(e);
}
}
public void onError(java.lang.Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TSerializable msg;
ping_result result = new ping_result();
if (e instanceof org.apache.thrift.transport.TTransportException) {
_LOGGER.error("TTransportException inside handler", e);
fb.close();
return;
} else if (e instanceof org.apache.thrift.TApplicationException) {
_LOGGER.error("TApplicationException inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TApplicationException) e;
} else {
_LOGGER.error("Exception inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
e.getMessage());
}
try {
fcall.sendResponse(fb, msg, msgType, seqid);
} catch (java.lang.Exception ex) {
_LOGGER.error("Exception writing to internal frame buffer", ex);
fb.close();
}
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, ping_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException {
iface.ping(resultHandler);
}
}
public static class add<I extends AsyncIface>
extends org.apache.thrift.AsyncProcessFunction<I, add_args, java.lang.Integer> {
public add() {
super("add");
}
public add_args getEmptyArgsInstance() {
return new add_args();
}
public org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer> getResultHandler(
final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer>() {
public void onComplete(java.lang.Integer o) {
add_result result = new add_result();
result.success = o;
result.setSuccessIsSet(true);
try {
fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY, seqid);
} catch (org.apache.thrift.transport.TTransportException e) {
_LOGGER.error("TTransportException writing to internal frame buffer", e);
fb.close();
} catch (java.lang.Exception e) {
_LOGGER.error("Exception writing to internal frame buffer", e);
onError(e);
}
}
public void onError(java.lang.Exception e) {
byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
org.apache.thrift.TSerializable msg;
add_result result = new add_result();
if (e instanceof org.apache.thrift.transport.TTransportException) {
_LOGGER.error("TTransportException inside handler", e);
fb.close();
return;
} else if (e instanceof org.apache.thrift.TApplicationException) {
_LOGGER.error("TApplicationException inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = (org.apache.thrift.TApplicationException) e;
} else {
_LOGGER.error("Exception inside handler", e);
msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
e.getMessage());
}
try {
fcall.sendResponse(fb, msg, msgType, seqid);
} catch (java.lang.Exception ex) {
_LOGGER.error("Exception writing to internal frame buffer", ex);
fb.close();
}
}
};
}
protected boolean isOneway() {
return false;
}
public void start(I iface, add_args args,
org.apache.thrift.async.AsyncMethodCallback<java.lang.Integer> resultHandler)
throws org.apache.thrift.TException {
iface.add(args.num1, args.num2, resultHandler);
}
}
public static class zip<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, zip_args, Void> {
public zip() {
super("zip");
}
public zip_args getEmptyArgsInstance() {
return new zip_args();
}
public org.apache.thrift.async.AsyncMethodCallback<Void> getResultHandler(
final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {
final org.apache.thrift.AsyncProcessFunction fcall = this;
return new org.apache.thrift.async.AsyncMethodCallback<Void>() {
public void onComplete(Void o) {
}
public void onError(java.lang.Exception e) {
if (e instanceof org.apache.thrift.transport.TTransportException) {
_LOGGER.error("TTransportException inside handler", e);
fb.close();
} else {
_LOGGER.error("Exception inside oneway handler", e);
}
}
};
}
protected boolean isOneway() {
return true;
}
public void start(I iface, zip_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler)
throws org.apache.thrift.TException {
iface.zip(resultHandler);
}
}
}
public static class ping_args implements org.apache.thrift.TBase<ping_args, ping_args._Fields>, java.io.Serializable,
Cloneable, Comparable<ping_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"ping_args");
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ping_argsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ping_argsTupleSchemeFactory();
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ping_args.class, metaDataMap);
}
public ping_args() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ping_args(ping_args other) {
}
public ping_args deepCopy() {
return new ping_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof ping_args)
return this.equals((ping_args) that);
return false;
}
public boolean equals(ping_args that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(ping_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ping_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ping_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ping_argsStandardScheme getScheme() {
return new ping_argsStandardScheme();
}
}
private static class ping_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<ping_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ping_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ping_args struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ping_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ping_argsTupleScheme getScheme() {
return new ping_argsTupleScheme();
}
}
private static class ping_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<ping_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ping_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ping_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class ping_result implements org.apache.thrift.TBase<ping_result, ping_result._Fields>,
java.io.Serializable, Cloneable, Comparable<ping_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"ping_result");
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ping_resultStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ping_resultTupleSchemeFactory();
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ping_result.class, metaDataMap);
}
public ping_result() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ping_result(ping_result other) {
}
public ping_result deepCopy() {
return new ping_result(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof ping_result)
return this.equals((ping_result) that);
return false;
}
public boolean equals(ping_result that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(ping_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ping_result(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ping_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ping_resultStandardScheme getScheme() {
return new ping_resultStandardScheme();
}
}
private static class ping_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<ping_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ping_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ping_result struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ping_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ping_resultTupleScheme getScheme() {
return new ping_resultTupleScheme();
}
}
private static class ping_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<ping_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ping_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ping_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class add_args implements org.apache.thrift.TBase<add_args, add_args._Fields>, java.io.Serializable,
Cloneable, Comparable<add_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"add_args");
private static final org.apache.thrift.protocol.TField NUM1_FIELD_DESC = new org.apache.thrift.protocol.TField(
"num1", org.apache.thrift.protocol.TType.I32, (short) 1);
private static final org.apache.thrift.protocol.TField NUM2_FIELD_DESC = new org.apache.thrift.protocol.TField(
"num2", org.apache.thrift.protocol.TType.I32, (short) 2);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new add_argsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new add_argsTupleSchemeFactory();
private int num1; // required
private int num2; // required
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NUM1((short) 1, "num1"), NUM2((short) 2, "num2");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // NUM1
return NUM1;
case 2: // NUM2
return NUM2;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __NUM1_ISSET_ID = 0;
private static final int __NUM2_ISSET_ID = 1;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.NUM1,
new org.apache.thrift.meta_data.FieldMetaData("num1", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.NUM2,
new org.apache.thrift.meta_data.FieldMetaData("num2", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_args.class, metaDataMap);
}
public add_args() {
}
public add_args(int num1, int num2) {
this();
this.num1 = num1;
setNum1IsSet(true);
this.num2 = num2;
setNum2IsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public add_args(add_args other) {
__isset_bitfield = other.__isset_bitfield;
this.num1 = other.num1;
this.num2 = other.num2;
}
public add_args deepCopy() {
return new add_args(this);
}
@Override
public void clear() {
setNum1IsSet(false);
this.num1 = 0;
setNum2IsSet(false);
this.num2 = 0;
}
public int getNum1() {
return this.num1;
}
public void setNum1(int num1) {
this.num1 = num1;
setNum1IsSet(true);
}
public void unsetNum1() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM1_ISSET_ID);
}
/**
* Returns true if field num1 is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetNum1() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM1_ISSET_ID);
}
public void setNum1IsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM1_ISSET_ID, value);
}
public int getNum2() {
return this.num2;
}
public void setNum2(int num2) {
this.num2 = num2;
setNum2IsSet(true);
}
public void unsetNum2() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM2_ISSET_ID);
}
/**
* Returns true if field num2 is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetNum2() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM2_ISSET_ID);
}
public void setNum2IsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM2_ISSET_ID, value);
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case NUM1:
if (value == null) {
unsetNum1();
} else {
setNum1((java.lang.Integer) value);
}
break;
case NUM2:
if (value == null) {
unsetNum2();
} else {
setNum2((java.lang.Integer) value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case NUM1:
return getNum1();
case NUM2:
return getNum2();
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case NUM1:
return isSetNum1();
case NUM2:
return isSetNum2();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof add_args)
return this.equals((add_args) that);
return false;
}
public boolean equals(add_args that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_num1 = true;
boolean that_present_num1 = true;
if (this_present_num1 || that_present_num1) {
if (!(this_present_num1 && that_present_num1))
return false;
if (this.num1 != that.num1)
return false;
}
boolean this_present_num2 = true;
boolean that_present_num2 = true;
if (this_present_num2 || that_present_num2) {
if (!(this_present_num2 && that_present_num2))
return false;
if (this.num2 != that.num2)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + num1;
hashCode = hashCode * 8191 + num2;
return hashCode;
}
@Override
public int compareTo(add_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetNum1(), other.isSetNum1());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNum1()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.num1, other.num1);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetNum2(), other.isSetNum2());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNum2()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.num2, other.num2);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("add_args(");
boolean first = true;
sb.append("num1:");
sb.append(this.num1);
first = false;
if (!first)
sb.append(", ");
sb.append("num2:");
sb.append(this.num2);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is
// wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class add_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public add_argsStandardScheme getScheme() {
return new add_argsStandardScheme();
}
}
private static class add_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<add_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, add_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NUM1
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.num1 = iprot.readI32();
struct.setNum1IsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // NUM2
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.num2 = iprot.readI32();
struct.setNum2IsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, add_args struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(NUM1_FIELD_DESC);
oprot.writeI32(struct.num1);
oprot.writeFieldEnd();
oprot.writeFieldBegin(NUM2_FIELD_DESC);
oprot.writeI32(struct.num2);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class add_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public add_argsTupleScheme getScheme() {
return new add_argsTupleScheme();
}
}
private static class add_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<add_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, add_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetNum1()) {
optionals.set(0);
}
if (struct.isSetNum2()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetNum1()) {
oprot.writeI32(struct.num1);
}
if (struct.isSetNum2()) {
oprot.writeI32(struct.num2);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, add_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.num1 = iprot.readI32();
struct.setNum1IsSet(true);
}
if (incoming.get(1)) {
struct.num2 = iprot.readI32();
struct.setNum2IsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class add_result implements org.apache.thrift.TBase<add_result, add_result._Fields>,
java.io.Serializable, Cloneable, Comparable<add_result> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"add_result");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField(
"success", org.apache.thrift.protocol.TType.I32, (short) 0);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new add_resultStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new add_resultTupleSchemeFactory();
private int success; // required
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short) 0, "success");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 0: // SUCCESS
return SUCCESS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.SUCCESS,
new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(add_result.class, metaDataMap);
}
public add_result() {
}
public add_result(int success) {
this();
this.success = success;
setSuccessIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public add_result(add_result other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
}
public add_result deepCopy() {
return new add_result(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = 0;
}
public int getSuccess() {
return this.success;
}
public void setSuccess(int success) {
this.success = success;
setSuccessIsSet(true);
}
public void unsetSuccess() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/**
* Returns true if field success is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetSuccess() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((java.lang.Integer) value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return getSuccess();
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof add_result)
return this.equals((add_result) that);
return false;
}
public boolean equals(add_result that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + success;
return hashCode;
}
@Override
public int compareTo(add_result other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("add_result(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is
// wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class add_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public add_resultStandardScheme getScheme() {
return new add_resultStandardScheme();
}
}
private static class add_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<add_result> {
public void read(org.apache.thrift.protocol.TProtocol iprot, add_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 0: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.success = iprot.readI32();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, add_result struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetSuccess()) {
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeI32(struct.success);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class add_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public add_resultTupleScheme getScheme() {
return new add_resultTupleScheme();
}
}
private static class add_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<add_result> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, add_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetSuccess()) {
oprot.writeI32(struct.success);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, add_result struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.success = iprot.readI32();
struct.setSuccessIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
public static class zip_args implements org.apache.thrift.TBase<zip_args, zip_args._Fields>, java.io.Serializable,
Cloneable, Comparable<zip_args> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"zip_args");
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new zip_argsStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new zip_argsTupleSchemeFactory();
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
;
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(zip_args.class, metaDataMap);
}
public zip_args() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public zip_args(zip_args other) {
}
public zip_args deepCopy() {
return new zip_args(this);
}
@Override
public void clear() {
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof zip_args)
return this.equals((zip_args) that);
return false;
}
public boolean equals(zip_args that) {
if (that == null)
return false;
if (this == that)
return true;
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
return hashCode;
}
@Override
public int compareTo(zip_args other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("zip_args(");
boolean first = true;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class zip_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public zip_argsStandardScheme getScheme() {
return new zip_argsStandardScheme();
}
}
private static class zip_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<zip_args> {
public void read(org.apache.thrift.protocol.TProtocol iprot, zip_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, zip_args struct)
throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class zip_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public zip_argsTupleScheme getScheme() {
return new zip_argsTupleScheme();
}
}
private static class zip_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<zip_args> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, zip_args struct)
throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, zip_args struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
}
| 7,101 |
0 | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Test.java | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.avro.thrift.test;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-03-18")
public class Test
implements org.apache.thrift.TBase<Test, Test._Fields>, java.io.Serializable, Cloneable, Comparable<Test> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Test");
private static final org.apache.thrift.protocol.TField BOOL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"boolField", org.apache.thrift.protocol.TType.BOOL, (short) 1);
private static final org.apache.thrift.protocol.TField BYTE_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"byteField", org.apache.thrift.protocol.TType.BYTE, (short) 2);
private static final org.apache.thrift.protocol.TField BYTE_OPTIONAL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"byteOptionalField", org.apache.thrift.protocol.TType.BYTE, (short) 16);
private static final org.apache.thrift.protocol.TField I16_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"i16Field", org.apache.thrift.protocol.TType.I16, (short) 3);
private static final org.apache.thrift.protocol.TField I16_OPTIONAL_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"i16OptionalField", org.apache.thrift.protocol.TType.I16, (short) 15);
private static final org.apache.thrift.protocol.TField I32_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"i32Field", org.apache.thrift.protocol.TType.I32, (short) 4);
private static final org.apache.thrift.protocol.TField I64_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"i64Field", org.apache.thrift.protocol.TType.I64, (short) 5);
private static final org.apache.thrift.protocol.TField DOUBLE_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"doubleField", org.apache.thrift.protocol.TType.DOUBLE, (short) 6);
private static final org.apache.thrift.protocol.TField STRING_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"stringField", org.apache.thrift.protocol.TType.STRING, (short) 7);
private static final org.apache.thrift.protocol.TField BINARY_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"binaryField", org.apache.thrift.protocol.TType.STRING, (short) 8);
private static final org.apache.thrift.protocol.TField MAP_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"mapField", org.apache.thrift.protocol.TType.MAP, (short) 9);
private static final org.apache.thrift.protocol.TField LIST_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"listField", org.apache.thrift.protocol.TType.LIST, (short) 10);
private static final org.apache.thrift.protocol.TField SET_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"setField", org.apache.thrift.protocol.TType.SET, (short) 11);
private static final org.apache.thrift.protocol.TField ENUM_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"enumField", org.apache.thrift.protocol.TType.I32, (short) 12);
private static final org.apache.thrift.protocol.TField STRUCT_FIELD_FIELD_DESC = new org.apache.thrift.protocol.TField(
"structField", org.apache.thrift.protocol.TType.STRUCT, (short) 13);
private static final org.apache.thrift.protocol.TField FOO_OR_BAR_FIELD_DESC = new org.apache.thrift.protocol.TField(
"fooOrBar", org.apache.thrift.protocol.TType.STRUCT, (short) 14);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TestStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TestTupleSchemeFactory();
private boolean boolField; // required
private byte byteField; // required
private byte byteOptionalField; // optional
private short i16Field; // required
private short i16OptionalField; // optional
private int i32Field; // optional
private long i64Field; // required
private double doubleField; // required
private @org.apache.thrift.annotation.Nullable java.lang.String stringField; // required
private @org.apache.thrift.annotation.Nullable java.nio.ByteBuffer binaryField; // optional
private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String, java.lang.Integer> mapField; // required
private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> listField; // required
private @org.apache.thrift.annotation.Nullable java.util.Set<java.lang.Integer> setField; // required
private @org.apache.thrift.annotation.Nullable E enumField; // required
private @org.apache.thrift.annotation.Nullable Nested structField; // required
private @org.apache.thrift.annotation.Nullable FooOrBar fooOrBar; // required
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
BOOL_FIELD((short) 1, "boolField"), BYTE_FIELD((short) 2, "byteField"),
BYTE_OPTIONAL_FIELD((short) 16, "byteOptionalField"), I16_FIELD((short) 3, "i16Field"),
I16_OPTIONAL_FIELD((short) 15, "i16OptionalField"), I32_FIELD((short) 4, "i32Field"),
I64_FIELD((short) 5, "i64Field"), DOUBLE_FIELD((short) 6, "doubleField"), STRING_FIELD((short) 7, "stringField"),
BINARY_FIELD((short) 8, "binaryField"), MAP_FIELD((short) 9, "mapField"), LIST_FIELD((short) 10, "listField"),
SET_FIELD((short) 11, "setField"),
/**
*
* @see E
*/
ENUM_FIELD((short) 12, "enumField"), STRUCT_FIELD((short) 13, "structField"), FOO_OR_BAR((short) 14, "fooOrBar");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // BOOL_FIELD
return BOOL_FIELD;
case 2: // BYTE_FIELD
return BYTE_FIELD;
case 16: // BYTE_OPTIONAL_FIELD
return BYTE_OPTIONAL_FIELD;
case 3: // I16_FIELD
return I16_FIELD;
case 15: // I16_OPTIONAL_FIELD
return I16_OPTIONAL_FIELD;
case 4: // I32_FIELD
return I32_FIELD;
case 5: // I64_FIELD
return I64_FIELD;
case 6: // DOUBLE_FIELD
return DOUBLE_FIELD;
case 7: // STRING_FIELD
return STRING_FIELD;
case 8: // BINARY_FIELD
return BINARY_FIELD;
case 9: // MAP_FIELD
return MAP_FIELD;
case 10: // LIST_FIELD
return LIST_FIELD;
case 11: // SET_FIELD
return SET_FIELD;
case 12: // ENUM_FIELD
return ENUM_FIELD;
case 13: // STRUCT_FIELD
return STRUCT_FIELD;
case 14: // FOO_OR_BAR
return FOO_OR_BAR;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __BOOLFIELD_ISSET_ID = 0;
private static final int __BYTEFIELD_ISSET_ID = 1;
private static final int __BYTEOPTIONALFIELD_ISSET_ID = 2;
private static final int __I16FIELD_ISSET_ID = 3;
private static final int __I16OPTIONALFIELD_ISSET_ID = 4;
private static final int __I32FIELD_ISSET_ID = 5;
private static final int __I64FIELD_ISSET_ID = 6;
private static final int __DOUBLEFIELD_ISSET_ID = 7;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = { _Fields.BYTE_OPTIONAL_FIELD, _Fields.I16_OPTIONAL_FIELD,
_Fields.I32_FIELD, _Fields.BINARY_FIELD };
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.BOOL_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("boolField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.BYTE_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("byteField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
tmpMap.put(_Fields.BYTE_OPTIONAL_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("byteOptionalField",
org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BYTE)));
tmpMap.put(_Fields.I16_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("i16Field", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
tmpMap.put(_Fields.I16_OPTIONAL_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("i16OptionalField",
org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16)));
tmpMap.put(_Fields.I32_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("i32Field", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.I64_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("i64Field", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.DOUBLE_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("doubleField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
tmpMap.put(_Fields.STRING_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("stringField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.BINARY_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("binaryField", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING, true)));
tmpMap.put(_Fields.MAP_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("mapField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
tmpMap.put(_Fields.LIST_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("listField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
tmpMap.put(_Fields.SET_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("setField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
tmpMap.put(_Fields.ENUM_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("enumField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, E.class)));
tmpMap.put(_Fields.STRUCT_FIELD,
new org.apache.thrift.meta_data.FieldMetaData("structField", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Nested.class)));
tmpMap.put(_Fields.FOO_OR_BAR,
new org.apache.thrift.meta_data.FieldMetaData("fooOrBar", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, FooOrBar.class)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Test.class, metaDataMap);
}
public Test() {
}
public Test(boolean boolField, byte byteField, short i16Field, long i64Field, double doubleField,
java.lang.String stringField, java.util.Map<java.lang.String, java.lang.Integer> mapField,
java.util.List<java.lang.Integer> listField, java.util.Set<java.lang.Integer> setField, E enumField,
Nested structField, FooOrBar fooOrBar) {
this();
this.boolField = boolField;
setBoolFieldIsSet(true);
this.byteField = byteField;
setByteFieldIsSet(true);
this.i16Field = i16Field;
setI16FieldIsSet(true);
this.i64Field = i64Field;
setI64FieldIsSet(true);
this.doubleField = doubleField;
setDoubleFieldIsSet(true);
this.stringField = stringField;
this.mapField = mapField;
this.listField = listField;
this.setField = setField;
this.enumField = enumField;
this.structField = structField;
this.fooOrBar = fooOrBar;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Test(Test other) {
__isset_bitfield = other.__isset_bitfield;
this.boolField = other.boolField;
this.byteField = other.byteField;
this.byteOptionalField = other.byteOptionalField;
this.i16Field = other.i16Field;
this.i16OptionalField = other.i16OptionalField;
this.i32Field = other.i32Field;
this.i64Field = other.i64Field;
this.doubleField = other.doubleField;
if (other.isSetStringField()) {
this.stringField = other.stringField;
}
if (other.isSetBinaryField()) {
this.binaryField = org.apache.thrift.TBaseHelper.copyBinary(other.binaryField);
}
if (other.isSetMapField()) {
java.util.Map<java.lang.String, java.lang.Integer> __this__mapField = new java.util.HashMap<java.lang.String, java.lang.Integer>(
other.mapField);
this.mapField = __this__mapField;
}
if (other.isSetListField()) {
java.util.List<java.lang.Integer> __this__listField = new java.util.ArrayList<java.lang.Integer>(other.listField);
this.listField = __this__listField;
}
if (other.isSetSetField()) {
java.util.Set<java.lang.Integer> __this__setField = new java.util.HashSet<java.lang.Integer>(other.setField);
this.setField = __this__setField;
}
if (other.isSetEnumField()) {
this.enumField = other.enumField;
}
if (other.isSetStructField()) {
this.structField = new Nested(other.structField);
}
if (other.isSetFooOrBar()) {
this.fooOrBar = new FooOrBar(other.fooOrBar);
}
}
public Test deepCopy() {
return new Test(this);
}
@Override
public void clear() {
setBoolFieldIsSet(false);
this.boolField = false;
setByteFieldIsSet(false);
this.byteField = 0;
setByteOptionalFieldIsSet(false);
this.byteOptionalField = 0;
setI16FieldIsSet(false);
this.i16Field = 0;
setI16OptionalFieldIsSet(false);
this.i16OptionalField = 0;
setI32FieldIsSet(false);
this.i32Field = 0;
setI64FieldIsSet(false);
this.i64Field = 0;
setDoubleFieldIsSet(false);
this.doubleField = 0.0;
this.stringField = null;
this.binaryField = null;
this.mapField = null;
this.listField = null;
this.setField = null;
this.enumField = null;
this.structField = null;
this.fooOrBar = null;
}
public boolean isBoolField() {
return this.boolField;
}
public void setBoolField(boolean boolField) {
this.boolField = boolField;
setBoolFieldIsSet(true);
}
public void unsetBoolField() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __BOOLFIELD_ISSET_ID);
}
/**
* Returns true if field boolField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetBoolField() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __BOOLFIELD_ISSET_ID);
}
public void setBoolFieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __BOOLFIELD_ISSET_ID, value);
}
public byte getByteField() {
return this.byteField;
}
public void setByteField(byte byteField) {
this.byteField = byteField;
setByteFieldIsSet(true);
}
public void unsetByteField() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __BYTEFIELD_ISSET_ID);
}
/**
* Returns true if field byteField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetByteField() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __BYTEFIELD_ISSET_ID);
}
public void setByteFieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __BYTEFIELD_ISSET_ID, value);
}
public byte getByteOptionalField() {
return this.byteOptionalField;
}
public void setByteOptionalField(byte byteOptionalField) {
this.byteOptionalField = byteOptionalField;
setByteOptionalFieldIsSet(true);
}
public void unsetByteOptionalField() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID);
}
/**
* Returns true if field byteOptionalField is set (has been assigned a value)
* and false otherwise
*/
public boolean isSetByteOptionalField() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID);
}
public void setByteOptionalFieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __BYTEOPTIONALFIELD_ISSET_ID, value);
}
public short getI16Field() {
return this.i16Field;
}
public void setI16Field(short i16Field) {
this.i16Field = i16Field;
setI16FieldIsSet(true);
}
public void unsetI16Field() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __I16FIELD_ISSET_ID);
}
/**
* Returns true if field i16Field is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetI16Field() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __I16FIELD_ISSET_ID);
}
public void setI16FieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __I16FIELD_ISSET_ID, value);
}
public short getI16OptionalField() {
return this.i16OptionalField;
}
public void setI16OptionalField(short i16OptionalField) {
this.i16OptionalField = i16OptionalField;
setI16OptionalFieldIsSet(true);
}
public void unsetI16OptionalField() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID);
}
/**
* Returns true if field i16OptionalField is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetI16OptionalField() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID);
}
public void setI16OptionalFieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __I16OPTIONALFIELD_ISSET_ID, value);
}
public int getI32Field() {
return this.i32Field;
}
public void setI32Field(int i32Field) {
this.i32Field = i32Field;
setI32FieldIsSet(true);
}
public void unsetI32Field() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __I32FIELD_ISSET_ID);
}
/**
* Returns true if field i32Field is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetI32Field() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __I32FIELD_ISSET_ID);
}
public void setI32FieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __I32FIELD_ISSET_ID, value);
}
public long getI64Field() {
return this.i64Field;
}
public void setI64Field(long i64Field) {
this.i64Field = i64Field;
setI64FieldIsSet(true);
}
public void unsetI64Field() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __I64FIELD_ISSET_ID);
}
/**
* Returns true if field i64Field is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetI64Field() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __I64FIELD_ISSET_ID);
}
public void setI64FieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __I64FIELD_ISSET_ID, value);
}
public double getDoubleField() {
return this.doubleField;
}
public void setDoubleField(double doubleField) {
this.doubleField = doubleField;
setDoubleFieldIsSet(true);
}
public void unsetDoubleField() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID);
}
/**
* Returns true if field doubleField is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetDoubleField() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID);
}
public void setDoubleFieldIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __DOUBLEFIELD_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getStringField() {
return this.stringField;
}
public void setStringField(@org.apache.thrift.annotation.Nullable java.lang.String stringField) {
this.stringField = stringField;
}
public void unsetStringField() {
this.stringField = null;
}
/**
* Returns true if field stringField is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetStringField() {
return this.stringField != null;
}
public void setStringFieldIsSet(boolean value) {
if (!value) {
this.stringField = null;
}
}
public byte[] getBinaryField() {
setBinaryField(org.apache.thrift.TBaseHelper.rightSize(binaryField));
return binaryField == null ? null : binaryField.array();
}
public java.nio.ByteBuffer bufferForBinaryField() {
return org.apache.thrift.TBaseHelper.copyBinary(binaryField);
}
public void setBinaryField(byte[] binaryField) {
this.binaryField = binaryField == null ? (java.nio.ByteBuffer) null : java.nio.ByteBuffer.wrap(binaryField.clone());
}
public void setBinaryField(@org.apache.thrift.annotation.Nullable java.nio.ByteBuffer binaryField) {
this.binaryField = org.apache.thrift.TBaseHelper.copyBinary(binaryField);
}
public void unsetBinaryField() {
this.binaryField = null;
}
/**
* Returns true if field binaryField is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetBinaryField() {
return this.binaryField != null;
}
public void setBinaryFieldIsSet(boolean value) {
if (!value) {
this.binaryField = null;
}
}
public int getMapFieldSize() {
return (this.mapField == null) ? 0 : this.mapField.size();
}
public void putToMapField(java.lang.String key, int val) {
if (this.mapField == null) {
this.mapField = new java.util.HashMap<java.lang.String, java.lang.Integer>();
}
this.mapField.put(key, val);
}
@org.apache.thrift.annotation.Nullable
public java.util.Map<java.lang.String, java.lang.Integer> getMapField() {
return this.mapField;
}
public void setMapField(
@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String, java.lang.Integer> mapField) {
this.mapField = mapField;
}
public void unsetMapField() {
this.mapField = null;
}
/**
* Returns true if field mapField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetMapField() {
return this.mapField != null;
}
public void setMapFieldIsSet(boolean value) {
if (!value) {
this.mapField = null;
}
}
public int getListFieldSize() {
return (this.listField == null) ? 0 : this.listField.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.Integer> getListFieldIterator() {
return (this.listField == null) ? null : this.listField.iterator();
}
public void addToListField(int elem) {
if (this.listField == null) {
this.listField = new java.util.ArrayList<java.lang.Integer>();
}
this.listField.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.List<java.lang.Integer> getListField() {
return this.listField;
}
public void setListField(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.Integer> listField) {
this.listField = listField;
}
public void unsetListField() {
this.listField = null;
}
/**
* Returns true if field listField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetListField() {
return this.listField != null;
}
public void setListFieldIsSet(boolean value) {
if (!value) {
this.listField = null;
}
}
public int getSetFieldSize() {
return (this.setField == null) ? 0 : this.setField.size();
}
@org.apache.thrift.annotation.Nullable
public java.util.Iterator<java.lang.Integer> getSetFieldIterator() {
return (this.setField == null) ? null : this.setField.iterator();
}
public void addToSetField(int elem) {
if (this.setField == null) {
this.setField = new java.util.HashSet<java.lang.Integer>();
}
this.setField.add(elem);
}
@org.apache.thrift.annotation.Nullable
public java.util.Set<java.lang.Integer> getSetField() {
return this.setField;
}
public void setSetField(@org.apache.thrift.annotation.Nullable java.util.Set<java.lang.Integer> setField) {
this.setField = setField;
}
public void unsetSetField() {
this.setField = null;
}
/**
* Returns true if field setField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetSetField() {
return this.setField != null;
}
public void setSetFieldIsSet(boolean value) {
if (!value) {
this.setField = null;
}
}
/**
*
* @see E
*/
@org.apache.thrift.annotation.Nullable
public E getEnumField() {
return this.enumField;
}
/**
*
* @see E
*/
public void setEnumField(@org.apache.thrift.annotation.Nullable E enumField) {
this.enumField = enumField;
}
public void unsetEnumField() {
this.enumField = null;
}
/**
* Returns true if field enumField is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetEnumField() {
return this.enumField != null;
}
public void setEnumFieldIsSet(boolean value) {
if (!value) {
this.enumField = null;
}
}
@org.apache.thrift.annotation.Nullable
public Nested getStructField() {
return this.structField;
}
public void setStructField(@org.apache.thrift.annotation.Nullable Nested structField) {
this.structField = structField;
}
public void unsetStructField() {
this.structField = null;
}
/**
* Returns true if field structField is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetStructField() {
return this.structField != null;
}
public void setStructFieldIsSet(boolean value) {
if (!value) {
this.structField = null;
}
}
@org.apache.thrift.annotation.Nullable
public FooOrBar getFooOrBar() {
return this.fooOrBar;
}
public void setFooOrBar(@org.apache.thrift.annotation.Nullable FooOrBar fooOrBar) {
this.fooOrBar = fooOrBar;
}
public void unsetFooOrBar() {
this.fooOrBar = null;
}
/**
* Returns true if field fooOrBar is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetFooOrBar() {
return this.fooOrBar != null;
}
public void setFooOrBarIsSet(boolean value) {
if (!value) {
this.fooOrBar = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case BOOL_FIELD:
if (value == null) {
unsetBoolField();
} else {
setBoolField((java.lang.Boolean) value);
}
break;
case BYTE_FIELD:
if (value == null) {
unsetByteField();
} else {
setByteField((java.lang.Byte) value);
}
break;
case BYTE_OPTIONAL_FIELD:
if (value == null) {
unsetByteOptionalField();
} else {
setByteOptionalField((java.lang.Byte) value);
}
break;
case I16_FIELD:
if (value == null) {
unsetI16Field();
} else {
setI16Field((java.lang.Short) value);
}
break;
case I16_OPTIONAL_FIELD:
if (value == null) {
unsetI16OptionalField();
} else {
setI16OptionalField((java.lang.Short) value);
}
break;
case I32_FIELD:
if (value == null) {
unsetI32Field();
} else {
setI32Field((java.lang.Integer) value);
}
break;
case I64_FIELD:
if (value == null) {
unsetI64Field();
} else {
setI64Field((java.lang.Long) value);
}
break;
case DOUBLE_FIELD:
if (value == null) {
unsetDoubleField();
} else {
setDoubleField((java.lang.Double) value);
}
break;
case STRING_FIELD:
if (value == null) {
unsetStringField();
} else {
setStringField((java.lang.String) value);
}
break;
case BINARY_FIELD:
if (value == null) {
unsetBinaryField();
} else {
if (value instanceof byte[]) {
setBinaryField((byte[]) value);
} else {
setBinaryField((java.nio.ByteBuffer) value);
}
}
break;
case MAP_FIELD:
if (value == null) {
unsetMapField();
} else {
setMapField((java.util.Map<java.lang.String, java.lang.Integer>) value);
}
break;
case LIST_FIELD:
if (value == null) {
unsetListField();
} else {
setListField((java.util.List<java.lang.Integer>) value);
}
break;
case SET_FIELD:
if (value == null) {
unsetSetField();
} else {
setSetField((java.util.Set<java.lang.Integer>) value);
}
break;
case ENUM_FIELD:
if (value == null) {
unsetEnumField();
} else {
setEnumField((E) value);
}
break;
case STRUCT_FIELD:
if (value == null) {
unsetStructField();
} else {
setStructField((Nested) value);
}
break;
case FOO_OR_BAR:
if (value == null) {
unsetFooOrBar();
} else {
setFooOrBar((FooOrBar) value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case BOOL_FIELD:
return isBoolField();
case BYTE_FIELD:
return getByteField();
case BYTE_OPTIONAL_FIELD:
return getByteOptionalField();
case I16_FIELD:
return getI16Field();
case I16_OPTIONAL_FIELD:
return getI16OptionalField();
case I32_FIELD:
return getI32Field();
case I64_FIELD:
return getI64Field();
case DOUBLE_FIELD:
return getDoubleField();
case STRING_FIELD:
return getStringField();
case BINARY_FIELD:
return getBinaryField();
case MAP_FIELD:
return getMapField();
case LIST_FIELD:
return getListField();
case SET_FIELD:
return getSetField();
case ENUM_FIELD:
return getEnumField();
case STRUCT_FIELD:
return getStructField();
case FOO_OR_BAR:
return getFooOrBar();
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case BOOL_FIELD:
return isSetBoolField();
case BYTE_FIELD:
return isSetByteField();
case BYTE_OPTIONAL_FIELD:
return isSetByteOptionalField();
case I16_FIELD:
return isSetI16Field();
case I16_OPTIONAL_FIELD:
return isSetI16OptionalField();
case I32_FIELD:
return isSetI32Field();
case I64_FIELD:
return isSetI64Field();
case DOUBLE_FIELD:
return isSetDoubleField();
case STRING_FIELD:
return isSetStringField();
case BINARY_FIELD:
return isSetBinaryField();
case MAP_FIELD:
return isSetMapField();
case LIST_FIELD:
return isSetListField();
case SET_FIELD:
return isSetSetField();
case ENUM_FIELD:
return isSetEnumField();
case STRUCT_FIELD:
return isSetStructField();
case FOO_OR_BAR:
return isSetFooOrBar();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof Test)
return this.equals((Test) that);
return false;
}
public boolean equals(Test that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_boolField = true;
boolean that_present_boolField = true;
if (this_present_boolField || that_present_boolField) {
if (!(this_present_boolField && that_present_boolField))
return false;
if (this.boolField != that.boolField)
return false;
}
boolean this_present_byteField = true;
boolean that_present_byteField = true;
if (this_present_byteField || that_present_byteField) {
if (!(this_present_byteField && that_present_byteField))
return false;
if (this.byteField != that.byteField)
return false;
}
boolean this_present_byteOptionalField = true && this.isSetByteOptionalField();
boolean that_present_byteOptionalField = true && that.isSetByteOptionalField();
if (this_present_byteOptionalField || that_present_byteOptionalField) {
if (!(this_present_byteOptionalField && that_present_byteOptionalField))
return false;
if (this.byteOptionalField != that.byteOptionalField)
return false;
}
boolean this_present_i16Field = true;
boolean that_present_i16Field = true;
if (this_present_i16Field || that_present_i16Field) {
if (!(this_present_i16Field && that_present_i16Field))
return false;
if (this.i16Field != that.i16Field)
return false;
}
boolean this_present_i16OptionalField = true && this.isSetI16OptionalField();
boolean that_present_i16OptionalField = true && that.isSetI16OptionalField();
if (this_present_i16OptionalField || that_present_i16OptionalField) {
if (!(this_present_i16OptionalField && that_present_i16OptionalField))
return false;
if (this.i16OptionalField != that.i16OptionalField)
return false;
}
boolean this_present_i32Field = true && this.isSetI32Field();
boolean that_present_i32Field = true && that.isSetI32Field();
if (this_present_i32Field || that_present_i32Field) {
if (!(this_present_i32Field && that_present_i32Field))
return false;
if (this.i32Field != that.i32Field)
return false;
}
boolean this_present_i64Field = true;
boolean that_present_i64Field = true;
if (this_present_i64Field || that_present_i64Field) {
if (!(this_present_i64Field && that_present_i64Field))
return false;
if (this.i64Field != that.i64Field)
return false;
}
boolean this_present_doubleField = true;
boolean that_present_doubleField = true;
if (this_present_doubleField || that_present_doubleField) {
if (!(this_present_doubleField && that_present_doubleField))
return false;
if (this.doubleField != that.doubleField)
return false;
}
boolean this_present_stringField = true && this.isSetStringField();
boolean that_present_stringField = true && that.isSetStringField();
if (this_present_stringField || that_present_stringField) {
if (!(this_present_stringField && that_present_stringField))
return false;
if (!this.stringField.equals(that.stringField))
return false;
}
boolean this_present_binaryField = true && this.isSetBinaryField();
boolean that_present_binaryField = true && that.isSetBinaryField();
if (this_present_binaryField || that_present_binaryField) {
if (!(this_present_binaryField && that_present_binaryField))
return false;
if (!this.binaryField.equals(that.binaryField))
return false;
}
boolean this_present_mapField = true && this.isSetMapField();
boolean that_present_mapField = true && that.isSetMapField();
if (this_present_mapField || that_present_mapField) {
if (!(this_present_mapField && that_present_mapField))
return false;
if (!this.mapField.equals(that.mapField))
return false;
}
boolean this_present_listField = true && this.isSetListField();
boolean that_present_listField = true && that.isSetListField();
if (this_present_listField || that_present_listField) {
if (!(this_present_listField && that_present_listField))
return false;
if (!this.listField.equals(that.listField))
return false;
}
boolean this_present_setField = true && this.isSetSetField();
boolean that_present_setField = true && that.isSetSetField();
if (this_present_setField || that_present_setField) {
if (!(this_present_setField && that_present_setField))
return false;
if (!this.setField.equals(that.setField))
return false;
}
boolean this_present_enumField = true && this.isSetEnumField();
boolean that_present_enumField = true && that.isSetEnumField();
if (this_present_enumField || that_present_enumField) {
if (!(this_present_enumField && that_present_enumField))
return false;
if (!this.enumField.equals(that.enumField))
return false;
}
boolean this_present_structField = true && this.isSetStructField();
boolean that_present_structField = true && that.isSetStructField();
if (this_present_structField || that_present_structField) {
if (!(this_present_structField && that_present_structField))
return false;
if (!this.structField.equals(that.structField))
return false;
}
boolean this_present_fooOrBar = true && this.isSetFooOrBar();
boolean that_present_fooOrBar = true && that.isSetFooOrBar();
if (this_present_fooOrBar || that_present_fooOrBar) {
if (!(this_present_fooOrBar && that_present_fooOrBar))
return false;
if (!this.fooOrBar.equals(that.fooOrBar))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((boolField) ? 131071 : 524287);
hashCode = hashCode * 8191 + (int) (byteField);
hashCode = hashCode * 8191 + ((isSetByteOptionalField()) ? 131071 : 524287);
if (isSetByteOptionalField())
hashCode = hashCode * 8191 + (int) (byteOptionalField);
hashCode = hashCode * 8191 + i16Field;
hashCode = hashCode * 8191 + ((isSetI16OptionalField()) ? 131071 : 524287);
if (isSetI16OptionalField())
hashCode = hashCode * 8191 + i16OptionalField;
hashCode = hashCode * 8191 + ((isSetI32Field()) ? 131071 : 524287);
if (isSetI32Field())
hashCode = hashCode * 8191 + i32Field;
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(i64Field);
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(doubleField);
hashCode = hashCode * 8191 + ((isSetStringField()) ? 131071 : 524287);
if (isSetStringField())
hashCode = hashCode * 8191 + stringField.hashCode();
hashCode = hashCode * 8191 + ((isSetBinaryField()) ? 131071 : 524287);
if (isSetBinaryField())
hashCode = hashCode * 8191 + binaryField.hashCode();
hashCode = hashCode * 8191 + ((isSetMapField()) ? 131071 : 524287);
if (isSetMapField())
hashCode = hashCode * 8191 + mapField.hashCode();
hashCode = hashCode * 8191 + ((isSetListField()) ? 131071 : 524287);
if (isSetListField())
hashCode = hashCode * 8191 + listField.hashCode();
hashCode = hashCode * 8191 + ((isSetSetField()) ? 131071 : 524287);
if (isSetSetField())
hashCode = hashCode * 8191 + setField.hashCode();
hashCode = hashCode * 8191 + ((isSetEnumField()) ? 131071 : 524287);
if (isSetEnumField())
hashCode = hashCode * 8191 + enumField.getValue();
hashCode = hashCode * 8191 + ((isSetStructField()) ? 131071 : 524287);
if (isSetStructField())
hashCode = hashCode * 8191 + structField.hashCode();
hashCode = hashCode * 8191 + ((isSetFooOrBar()) ? 131071 : 524287);
if (isSetFooOrBar())
hashCode = hashCode * 8191 + fooOrBar.hashCode();
return hashCode;
}
@Override
public int compareTo(Test other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetBoolField(), other.isSetBoolField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBoolField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.boolField, other.boolField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetByteField(), other.isSetByteField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetByteField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.byteField, other.byteField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetByteOptionalField(), other.isSetByteOptionalField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetByteOptionalField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.byteOptionalField, other.byteOptionalField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetI16Field(), other.isSetI16Field());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetI16Field()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i16Field, other.i16Field);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetI16OptionalField(), other.isSetI16OptionalField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetI16OptionalField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i16OptionalField, other.i16OptionalField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetI32Field(), other.isSetI32Field());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetI32Field()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i32Field, other.i32Field);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetI64Field(), other.isSetI64Field());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetI64Field()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.i64Field, other.i64Field);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetDoubleField(), other.isSetDoubleField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDoubleField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.doubleField, other.doubleField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetStringField(), other.isSetStringField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetStringField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.stringField, other.stringField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetBinaryField(), other.isSetBinaryField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetBinaryField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.binaryField, other.binaryField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetMapField(), other.isSetMapField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMapField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.mapField, other.mapField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetListField(), other.isSetListField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetListField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.listField, other.listField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetSetField(), other.isSetSetField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSetField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.setField, other.setField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEnumField(), other.isSetEnumField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEnumField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.enumField, other.enumField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetStructField(), other.isSetStructField());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetStructField()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.structField, other.structField);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetFooOrBar(), other.isSetFooOrBar());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetFooOrBar()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.fooOrBar, other.fooOrBar);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("Test(");
boolean first = true;
sb.append("boolField:");
sb.append(this.boolField);
first = false;
if (!first)
sb.append(", ");
sb.append("byteField:");
sb.append(this.byteField);
first = false;
if (isSetByteOptionalField()) {
if (!first)
sb.append(", ");
sb.append("byteOptionalField:");
sb.append(this.byteOptionalField);
first = false;
}
if (!first)
sb.append(", ");
sb.append("i16Field:");
sb.append(this.i16Field);
first = false;
if (isSetI16OptionalField()) {
if (!first)
sb.append(", ");
sb.append("i16OptionalField:");
sb.append(this.i16OptionalField);
first = false;
}
if (isSetI32Field()) {
if (!first)
sb.append(", ");
sb.append("i32Field:");
sb.append(this.i32Field);
first = false;
}
if (!first)
sb.append(", ");
sb.append("i64Field:");
sb.append(this.i64Field);
first = false;
if (!first)
sb.append(", ");
sb.append("doubleField:");
sb.append(this.doubleField);
first = false;
if (!first)
sb.append(", ");
sb.append("stringField:");
if (this.stringField == null) {
sb.append("null");
} else {
sb.append(this.stringField);
}
first = false;
if (isSetBinaryField()) {
if (!first)
sb.append(", ");
sb.append("binaryField:");
if (this.binaryField == null) {
sb.append("null");
} else {
org.apache.thrift.TBaseHelper.toString(this.binaryField, sb);
}
first = false;
}
if (!first)
sb.append(", ");
sb.append("mapField:");
if (this.mapField == null) {
sb.append("null");
} else {
sb.append(this.mapField);
}
first = false;
if (!first)
sb.append(", ");
sb.append("listField:");
if (this.listField == null) {
sb.append("null");
} else {
sb.append(this.listField);
}
first = false;
if (!first)
sb.append(", ");
sb.append("setField:");
if (this.setField == null) {
sb.append("null");
} else {
sb.append(this.setField);
}
first = false;
if (!first)
sb.append(", ");
sb.append("enumField:");
if (this.enumField == null) {
sb.append("null");
} else {
sb.append(this.enumField);
}
first = false;
if (!first)
sb.append(", ");
sb.append("structField:");
if (this.structField == null) {
sb.append("null");
} else {
sb.append(this.structField);
}
first = false;
if (!first)
sb.append(", ");
sb.append("fooOrBar:");
if (this.fooOrBar == null) {
sb.append("null");
} else {
sb.append(this.fooOrBar);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
if (structField != null) {
structField.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is
// wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class TestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TestStandardScheme getScheme() {
return new TestStandardScheme();
}
}
private static class TestStandardScheme extends org.apache.thrift.scheme.StandardScheme<Test> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Test struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // BOOL_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.boolField = iprot.readBool();
struct.setBoolFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // BYTE_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
struct.byteField = iprot.readByte();
struct.setByteFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 16: // BYTE_OPTIONAL_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.BYTE) {
struct.byteOptionalField = iprot.readByte();
struct.setByteOptionalFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // I16_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
struct.i16Field = iprot.readI16();
struct.setI16FieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 15: // I16_OPTIONAL_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.I16) {
struct.i16OptionalField = iprot.readI16();
struct.setI16OptionalFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // I32_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.i32Field = iprot.readI32();
struct.setI32FieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // I64_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.i64Field = iprot.readI64();
struct.setI64FieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // DOUBLE_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) {
struct.doubleField = iprot.readDouble();
struct.setDoubleFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // STRING_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.stringField = iprot.readString();
struct.setStringFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // BINARY_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.binaryField = iprot.readBinary();
struct.setBinaryFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 9: // MAP_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map0 = iprot.readMapBegin();
struct.mapField = new java.util.HashMap<java.lang.String, java.lang.Integer>(2 * _map0.size);
@org.apache.thrift.annotation.Nullable
java.lang.String _key1;
int _val2;
for (int _i3 = 0; _i3 < _map0.size; ++_i3) {
_key1 = iprot.readString();
_val2 = iprot.readI32();
struct.mapField.put(_key1, _val2);
}
iprot.readMapEnd();
}
struct.setMapFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 10: // LIST_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list4 = iprot.readListBegin();
struct.listField = new java.util.ArrayList<java.lang.Integer>(_list4.size);
int _elem5;
for (int _i6 = 0; _i6 < _list4.size; ++_i6) {
_elem5 = iprot.readI32();
struct.listField.add(_elem5);
}
iprot.readListEnd();
}
struct.setListFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 11: // SET_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.SET) {
{
org.apache.thrift.protocol.TSet _set7 = iprot.readSetBegin();
struct.setField = new java.util.HashSet<java.lang.Integer>(2 * _set7.size);
int _elem8;
for (int _i9 = 0; _i9 < _set7.size; ++_i9) {
_elem8 = iprot.readI32();
struct.setField.add(_elem8);
}
iprot.readSetEnd();
}
struct.setSetFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 12: // ENUM_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.enumField = org.apache.avro.thrift.test.E.findByValue(iprot.readI32());
struct.setEnumFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 13: // STRUCT_FIELD
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.structField = new Nested();
struct.structField.read(iprot);
struct.setStructFieldIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 14: // FOO_OR_BAR
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.fooOrBar = new FooOrBar();
struct.fooOrBar.read(iprot);
struct.setFooOrBarIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Test struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(BOOL_FIELD_FIELD_DESC);
oprot.writeBool(struct.boolField);
oprot.writeFieldEnd();
oprot.writeFieldBegin(BYTE_FIELD_FIELD_DESC);
oprot.writeByte(struct.byteField);
oprot.writeFieldEnd();
oprot.writeFieldBegin(I16_FIELD_FIELD_DESC);
oprot.writeI16(struct.i16Field);
oprot.writeFieldEnd();
if (struct.isSetI32Field()) {
oprot.writeFieldBegin(I32_FIELD_FIELD_DESC);
oprot.writeI32(struct.i32Field);
oprot.writeFieldEnd();
}
oprot.writeFieldBegin(I64_FIELD_FIELD_DESC);
oprot.writeI64(struct.i64Field);
oprot.writeFieldEnd();
oprot.writeFieldBegin(DOUBLE_FIELD_FIELD_DESC);
oprot.writeDouble(struct.doubleField);
oprot.writeFieldEnd();
if (struct.stringField != null) {
oprot.writeFieldBegin(STRING_FIELD_FIELD_DESC);
oprot.writeString(struct.stringField);
oprot.writeFieldEnd();
}
if (struct.binaryField != null) {
if (struct.isSetBinaryField()) {
oprot.writeFieldBegin(BINARY_FIELD_FIELD_DESC);
oprot.writeBinary(struct.binaryField);
oprot.writeFieldEnd();
}
}
if (struct.mapField != null) {
oprot.writeFieldBegin(MAP_FIELD_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.I32, struct.mapField.size()));
for (java.util.Map.Entry<java.lang.String, java.lang.Integer> _iter10 : struct.mapField.entrySet()) {
oprot.writeString(_iter10.getKey());
oprot.writeI32(_iter10.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
if (struct.listField != null) {
oprot.writeFieldBegin(LIST_FIELD_FIELD_DESC);
{
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.listField.size()));
for (int _iter11 : struct.listField) {
oprot.writeI32(_iter11);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.setField != null) {
oprot.writeFieldBegin(SET_FIELD_FIELD_DESC);
{
oprot.writeSetBegin(
new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I32, struct.setField.size()));
for (int _iter12 : struct.setField) {
oprot.writeI32(_iter12);
}
oprot.writeSetEnd();
}
oprot.writeFieldEnd();
}
if (struct.enumField != null) {
oprot.writeFieldBegin(ENUM_FIELD_FIELD_DESC);
oprot.writeI32(struct.enumField.getValue());
oprot.writeFieldEnd();
}
if (struct.structField != null) {
oprot.writeFieldBegin(STRUCT_FIELD_FIELD_DESC);
struct.structField.write(oprot);
oprot.writeFieldEnd();
}
if (struct.fooOrBar != null) {
oprot.writeFieldBegin(FOO_OR_BAR_FIELD_DESC);
struct.fooOrBar.write(oprot);
oprot.writeFieldEnd();
}
if (struct.isSetI16OptionalField()) {
oprot.writeFieldBegin(I16_OPTIONAL_FIELD_FIELD_DESC);
oprot.writeI16(struct.i16OptionalField);
oprot.writeFieldEnd();
}
if (struct.isSetByteOptionalField()) {
oprot.writeFieldBegin(BYTE_OPTIONAL_FIELD_FIELD_DESC);
oprot.writeByte(struct.byteOptionalField);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public TestTupleScheme getScheme() {
return new TestTupleScheme();
}
}
private static class TestTupleScheme extends org.apache.thrift.scheme.TupleScheme<Test> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, Test struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetBoolField()) {
optionals.set(0);
}
if (struct.isSetByteField()) {
optionals.set(1);
}
if (struct.isSetByteOptionalField()) {
optionals.set(2);
}
if (struct.isSetI16Field()) {
optionals.set(3);
}
if (struct.isSetI16OptionalField()) {
optionals.set(4);
}
if (struct.isSetI32Field()) {
optionals.set(5);
}
if (struct.isSetI64Field()) {
optionals.set(6);
}
if (struct.isSetDoubleField()) {
optionals.set(7);
}
if (struct.isSetStringField()) {
optionals.set(8);
}
if (struct.isSetBinaryField()) {
optionals.set(9);
}
if (struct.isSetMapField()) {
optionals.set(10);
}
if (struct.isSetListField()) {
optionals.set(11);
}
if (struct.isSetSetField()) {
optionals.set(12);
}
if (struct.isSetEnumField()) {
optionals.set(13);
}
if (struct.isSetStructField()) {
optionals.set(14);
}
if (struct.isSetFooOrBar()) {
optionals.set(15);
}
oprot.writeBitSet(optionals, 16);
if (struct.isSetBoolField()) {
oprot.writeBool(struct.boolField);
}
if (struct.isSetByteField()) {
oprot.writeByte(struct.byteField);
}
if (struct.isSetByteOptionalField()) {
oprot.writeByte(struct.byteOptionalField);
}
if (struct.isSetI16Field()) {
oprot.writeI16(struct.i16Field);
}
if (struct.isSetI16OptionalField()) {
oprot.writeI16(struct.i16OptionalField);
}
if (struct.isSetI32Field()) {
oprot.writeI32(struct.i32Field);
}
if (struct.isSetI64Field()) {
oprot.writeI64(struct.i64Field);
}
if (struct.isSetDoubleField()) {
oprot.writeDouble(struct.doubleField);
}
if (struct.isSetStringField()) {
oprot.writeString(struct.stringField);
}
if (struct.isSetBinaryField()) {
oprot.writeBinary(struct.binaryField);
}
if (struct.isSetMapField()) {
{
oprot.writeI32(struct.mapField.size());
for (java.util.Map.Entry<java.lang.String, java.lang.Integer> _iter13 : struct.mapField.entrySet()) {
oprot.writeString(_iter13.getKey());
oprot.writeI32(_iter13.getValue());
}
}
}
if (struct.isSetListField()) {
{
oprot.writeI32(struct.listField.size());
for (int _iter14 : struct.listField) {
oprot.writeI32(_iter14);
}
}
}
if (struct.isSetSetField()) {
{
oprot.writeI32(struct.setField.size());
for (int _iter15 : struct.setField) {
oprot.writeI32(_iter15);
}
}
}
if (struct.isSetEnumField()) {
oprot.writeI32(struct.enumField.getValue());
}
if (struct.isSetStructField()) {
struct.structField.write(oprot);
}
if (struct.isSetFooOrBar()) {
struct.fooOrBar.write(oprot);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Test struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(16);
if (incoming.get(0)) {
struct.boolField = iprot.readBool();
struct.setBoolFieldIsSet(true);
}
if (incoming.get(1)) {
struct.byteField = iprot.readByte();
struct.setByteFieldIsSet(true);
}
if (incoming.get(2)) {
struct.byteOptionalField = iprot.readByte();
struct.setByteOptionalFieldIsSet(true);
}
if (incoming.get(3)) {
struct.i16Field = iprot.readI16();
struct.setI16FieldIsSet(true);
}
if (incoming.get(4)) {
struct.i16OptionalField = iprot.readI16();
struct.setI16OptionalFieldIsSet(true);
}
if (incoming.get(5)) {
struct.i32Field = iprot.readI32();
struct.setI32FieldIsSet(true);
}
if (incoming.get(6)) {
struct.i64Field = iprot.readI64();
struct.setI64FieldIsSet(true);
}
if (incoming.get(7)) {
struct.doubleField = iprot.readDouble();
struct.setDoubleFieldIsSet(true);
}
if (incoming.get(8)) {
struct.stringField = iprot.readString();
struct.setStringFieldIsSet(true);
}
if (incoming.get(9)) {
struct.binaryField = iprot.readBinary();
struct.setBinaryFieldIsSet(true);
}
if (incoming.get(10)) {
{
org.apache.thrift.protocol.TMap _map16 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.I32);
struct.mapField = new java.util.HashMap<java.lang.String, java.lang.Integer>(2 * _map16.size);
@org.apache.thrift.annotation.Nullable
java.lang.String _key17;
int _val18;
for (int _i19 = 0; _i19 < _map16.size; ++_i19) {
_key17 = iprot.readString();
_val18 = iprot.readI32();
struct.mapField.put(_key17, _val18);
}
}
struct.setMapFieldIsSet(true);
}
if (incoming.get(11)) {
{
org.apache.thrift.protocol.TList _list20 = iprot.readListBegin(org.apache.thrift.protocol.TType.I32);
struct.listField = new java.util.ArrayList<java.lang.Integer>(_list20.size);
int _elem21;
for (int _i22 = 0; _i22 < _list20.size; ++_i22) {
_elem21 = iprot.readI32();
struct.listField.add(_elem21);
}
}
struct.setListFieldIsSet(true);
}
if (incoming.get(12)) {
{
org.apache.thrift.protocol.TSet _set23 = iprot.readSetBegin(org.apache.thrift.protocol.TType.I32);
struct.setField = new java.util.HashSet<java.lang.Integer>(2 * _set23.size);
int _elem24;
for (int _i25 = 0; _i25 < _set23.size; ++_i25) {
_elem24 = iprot.readI32();
struct.setField.add(_elem24);
}
}
struct.setSetFieldIsSet(true);
}
if (incoming.get(13)) {
struct.enumField = org.apache.avro.thrift.test.E.findByValue(iprot.readI32());
struct.setEnumFieldIsSet(true);
}
if (incoming.get(14)) {
struct.structField = new Nested();
struct.structField.read(iprot);
struct.setStructFieldIsSet(true);
}
if (incoming.get(15)) {
struct.fooOrBar = new FooOrBar();
struct.fooOrBar.read(iprot);
struct.setFooOrBarIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
| 7,102 |
0 | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Nested.java | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.avro.thrift.test;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-03-18")
public class Nested
implements org.apache.thrift.TBase<Nested, Nested._Fields>, java.io.Serializable, Cloneable, Comparable<Nested> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"Nested");
private static final org.apache.thrift.protocol.TField X_FIELD_DESC = new org.apache.thrift.protocol.TField("x",
org.apache.thrift.protocol.TType.I32, (short) 1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new NestedStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new NestedTupleSchemeFactory();
private int x; // required
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
X((short) 1, "x");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // X
return X;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __X_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.X,
new org.apache.thrift.meta_data.FieldMetaData("x", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Nested.class, metaDataMap);
}
public Nested() {
}
public Nested(int x) {
this();
this.x = x;
setXIsSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Nested(Nested other) {
__isset_bitfield = other.__isset_bitfield;
this.x = other.x;
}
public Nested deepCopy() {
return new Nested(this);
}
@Override
public void clear() {
setXIsSet(false);
this.x = 0;
}
public int getX() {
return this.x;
}
public void setX(int x) {
this.x = x;
setXIsSet(true);
}
public void unsetX() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __X_ISSET_ID);
}
/**
* Returns true if field x is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetX() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __X_ISSET_ID);
}
public void setXIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __X_ISSET_ID, value);
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case X:
if (value == null) {
unsetX();
} else {
setX((java.lang.Integer) value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case X:
return getX();
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case X:
return isSetX();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof Nested)
return this.equals((Nested) that);
return false;
}
public boolean equals(Nested that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_x = true;
boolean that_present_x = true;
if (this_present_x || that_present_x) {
if (!(this_present_x && that_present_x))
return false;
if (this.x != that.x)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + x;
return hashCode;
}
@Override
public int compareTo(Nested other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetX(), other.isSetX());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetX()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.x, other.x);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("Nested(");
boolean first = true;
sb.append("x:");
sb.append(this.x);
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is
// wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class NestedStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public NestedStandardScheme getScheme() {
return new NestedStandardScheme();
}
}
private static class NestedStandardScheme extends org.apache.thrift.scheme.StandardScheme<Nested> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Nested struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // X
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.x = iprot.readI32();
struct.setXIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Nested struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(X_FIELD_DESC);
oprot.writeI32(struct.x);
oprot.writeFieldEnd();
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class NestedTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public NestedTupleScheme getScheme() {
return new NestedTupleScheme();
}
}
private static class NestedTupleScheme extends org.apache.thrift.scheme.TupleScheme<Nested> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetX()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetX()) {
oprot.writeI32(struct.x);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.x = iprot.readI32();
struct.setXIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
| 7,103 |
0 | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift | Create_ds/avro/lang/java/thrift/src/test/java/org/apache/avro/thrift/test/Error.java | /**
* Autogenerated by Thrift Compiler (0.14.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.avro.thrift.test;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked", "unused" })
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.14.1)", date = "2021-03-18")
public class Error extends org.apache.thrift.TException
implements org.apache.thrift.TBase<Error, Error._Fields>, java.io.Serializable, Cloneable, Comparable<Error> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Error");
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField(
"message", org.apache.thrift.protocol.TType.STRING, (short) 1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ErrorStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ErrorTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String message; // required
/**
* The set of fields this struct contains, along with convenience methods for
* finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
MESSAGE((short) 1, "message");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // MESSAGE
return MESSAGE;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception if it
* is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null)
throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.MESSAGE,
new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Error.class, metaDataMap);
}
public Error() {
}
public Error(java.lang.String message) {
this();
this.message = message;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Error(Error other) {
if (other.isSetMessage()) {
this.message = other.message;
}
}
public Error deepCopy() {
return new Error(this);
}
@Override
public void clear() {
this.message = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getMessage() {
return this.message;
}
public void setMessage(@org.apache.thrift.annotation.Nullable java.lang.String message) {
this.message = message;
}
public void unsetMessage() {
this.message = null;
}
/**
* Returns true if field message is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetMessage() {
return this.message != null;
}
public void setMessageIsSet(boolean value) {
if (!value) {
this.message = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case MESSAGE:
if (value == null) {
unsetMessage();
} else {
setMessage((java.lang.String) value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case MESSAGE:
return getMessage();
}
throw new java.lang.IllegalStateException();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned a
* value) and false otherwise
*/
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case MESSAGE:
return isSetMessage();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof Error)
return this.equals((Error) that);
return false;
}
public boolean equals(Error that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetMessage()) ? 131071 : 524287);
if (isSetMessage())
hashCode = hashCode * 8191 + message.hashCode();
return hashCode;
}
@Override
public int compareTo(Error other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetMessage(), other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessage()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("Error(");
boolean first = true;
sb.append("message:");
if (this.message == null) {
sb.append("null");
} else {
sb.append(this.message);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ErrorStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ErrorStandardScheme getScheme() {
return new ErrorStandardScheme();
}
}
private static class ErrorStandardScheme extends org.apache.thrift.scheme.StandardScheme<Error> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Error struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // MESSAGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Error struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.message != null) {
oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
oprot.writeString(struct.message);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ErrorTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ErrorTupleScheme getScheme() {
return new ErrorTupleScheme();
}
}
private static class ErrorTupleScheme extends org.apache.thrift.scheme.TupleScheme<Error> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, Error struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetMessage()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetMessage()) {
oprot.writeString(struct.message);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Error struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY
: TUPLE_SCHEME_FACTORY).getScheme();
}
}
| 7,104 |
0 | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.thrift;
import java.util.List;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.nio.ByteBuffer;
import org.apache.avro.Schema;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.util.ClassUtils;
import org.apache.thrift.TBase;
import org.apache.thrift.TEnum;
import org.apache.thrift.TFieldIdEnum;
import org.apache.thrift.TFieldRequirementType;
import org.apache.thrift.TUnion;
import org.apache.thrift.protocol.TType;
import org.apache.thrift.meta_data.FieldMetaData;
import org.apache.thrift.meta_data.FieldValueMetaData;
import org.apache.thrift.meta_data.EnumMetaData;
import org.apache.thrift.meta_data.ListMetaData;
import org.apache.thrift.meta_data.SetMetaData;
import org.apache.thrift.meta_data.MapMetaData;
import org.apache.thrift.meta_data.StructMetaData;
/** Utilities for serializing Thrift data in Avro format. */
public class ThriftData extends GenericData {
static final String THRIFT_TYPE = "thrift";
static final String THRIFT_PROP = "thrift";
private static final ThriftData INSTANCE = new ThriftData();
protected ThriftData() {
}
/** Return the singleton instance. */
public static ThriftData get() {
return INSTANCE;
}
@Override
public DatumReader createDatumReader(Schema schema) {
return new ThriftDatumReader(schema, schema, this);
}
@Override
public DatumWriter createDatumWriter(Schema schema) {
return new ThriftDatumWriter(schema, this);
}
@Override
public void setField(Object r, String n, int pos, Object value) {
setField(r, n, pos, value, getRecordState(r, getSchema(r.getClass())));
}
@Override
public Object getField(Object r, String name, int pos) {
return getField(r, name, pos, getRecordState(r, getSchema(r.getClass())));
}
@Override
protected void setField(Object record, String name, int position, Object value, Object state) {
if (value == null && record instanceof TUnion)
return;
((TBase) record).setFieldValue(((TFieldIdEnum[]) state)[position], value);
}
@Override
protected Object getField(Object record, String name, int pos, Object state) {
TFieldIdEnum f = ((TFieldIdEnum[]) state)[pos];
TBase struct = (TBase) record;
if (struct.isSet(f))
return struct.getFieldValue(f);
return null;
}
private final Map<Schema, TFieldIdEnum[]> fieldCache = new ConcurrentHashMap<>();
@Override
@SuppressWarnings("unchecked")
protected Object getRecordState(Object r, Schema s) {
TFieldIdEnum[] fields = fieldCache.get(s);
if (fields == null) { // cache miss
fields = new TFieldIdEnum[s.getFields().size()];
Class c = r.getClass();
for (TFieldIdEnum f : FieldMetaData.getStructMetaDataMap((Class<? extends TBase>) c).keySet())
fields[s.getField(f.getFieldName()).pos()] = f;
fieldCache.put(s, fields); // update cache
}
return fields;
}
@Override
protected String getSchemaName(Object datum) {
// support implicit conversion from thrift's i16
// to avro INT for thrift's optional fields
if (datum instanceof Short)
return Schema.Type.INT.getName();
// support implicit conversion from thrift's byte
// to avro INT for thrift's optional fields
if (datum instanceof Byte)
return Schema.Type.INT.getName();
return super.getSchemaName(datum);
}
@Override
protected boolean isRecord(Object datum) {
return datum instanceof TBase;
}
@Override
protected boolean isEnum(Object datum) {
return datum instanceof TEnum;
}
@Override
protected Schema getEnumSchema(Object datum) {
return getSchema(datum.getClass());
}
@Override
// setFieldValue takes ByteBuffer but getFieldValue returns byte[]
protected boolean isBytes(Object datum) {
if (datum instanceof ByteBuffer)
return true;
if (datum == null)
return false;
Class c = datum.getClass();
return c.isArray() && c.getComponentType() == Byte.TYPE;
}
@Override
public Object newRecord(Object old, Schema schema) {
try {
Class c = ClassUtils.forName(SpecificData.getClassName(schema));
if (c == null)
return super.newRecord(old, schema); // punt to generic
if (c.isInstance(old))
return old; // reuse instance
return c.newInstance(); // create new instance
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
protected Schema getRecordSchema(Object record) {
return getSchema(record.getClass());
}
private final Map<Class, Schema> schemaCache = new ConcurrentHashMap<>();
/** Return a record schema given a thrift generated class. */
@SuppressWarnings("unchecked")
public Schema getSchema(Class c) {
Schema schema = schemaCache.get(c);
if (schema == null) { // cache miss
try {
if (TEnum.class.isAssignableFrom(c)) { // enum
List<String> symbols = new ArrayList<>();
for (Enum e : ((Class<? extends Enum>) c).getEnumConstants())
symbols.add(e.name());
schema = Schema.createEnum(c.getName(), null, null, symbols);
} else if (TBase.class.isAssignableFrom(c)) { // struct
schema = Schema.createRecord(c.getName(), null, null, Throwable.class.isAssignableFrom(c));
List<Field> fields = new ArrayList<>();
for (FieldMetaData f : FieldMetaData.getStructMetaDataMap((Class<? extends TBase>) c).values()) {
Schema s = getSchema(f.valueMetaData);
if (f.requirementType == TFieldRequirementType.OPTIONAL && (s.getType() != Schema.Type.UNION))
s = nullable(s);
fields.add(new Field(f.fieldName, s, null, null));
}
schema.setFields(fields);
} else {
throw new RuntimeException("Not a Thrift-generated class: " + c);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
schemaCache.put(c, schema); // update cache
}
return schema;
}
private static final Schema NULL = Schema.create(Schema.Type.NULL);
private Schema getSchema(FieldValueMetaData f) {
switch (f.type) {
case TType.BOOL:
return Schema.create(Schema.Type.BOOLEAN);
case TType.BYTE:
Schema b = Schema.create(Schema.Type.INT);
b.addProp(THRIFT_PROP, "byte");
return b;
case TType.I16:
Schema s = Schema.create(Schema.Type.INT);
s.addProp(THRIFT_PROP, "short");
return s;
case TType.I32:
return Schema.create(Schema.Type.INT);
case TType.I64:
return Schema.create(Schema.Type.LONG);
case TType.DOUBLE:
return Schema.create(Schema.Type.DOUBLE);
case TType.ENUM:
EnumMetaData enumMeta = (EnumMetaData) f;
return nullable(getSchema(enumMeta.enumClass));
case TType.LIST:
ListMetaData listMeta = (ListMetaData) f;
return nullable(Schema.createArray(getSchema(listMeta.elemMetaData)));
case TType.MAP:
MapMetaData mapMeta = (MapMetaData) f;
if (mapMeta.keyMetaData.type != TType.STRING)
throw new AvroRuntimeException("Map keys must be strings: " + f);
Schema map = Schema.createMap(getSchema(mapMeta.valueMetaData));
GenericData.setStringType(map, GenericData.StringType.String);
return nullable(map);
case TType.SET:
SetMetaData setMeta = (SetMetaData) f;
Schema set = Schema.createArray(getSchema(setMeta.elemMetaData));
set.addProp(THRIFT_PROP, "set");
return nullable(set);
case TType.STRING:
if (f.isBinary())
return nullable(Schema.create(Schema.Type.BYTES));
Schema string = Schema.create(Schema.Type.STRING);
GenericData.setStringType(string, GenericData.StringType.String);
return nullable(string);
case TType.STRUCT:
StructMetaData structMeta = (StructMetaData) f;
Schema record = getSchema(structMeta.structClass);
return nullable(record);
case TType.VOID:
return NULL;
default:
throw new RuntimeException("Unexpected type in field: " + f);
}
}
private Schema nullable(Schema schema) {
return Schema.createUnion(Arrays.asList(NULL, schema));
}
}
| 7,105 |
0 | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.thrift;
import java.io.IOException;
import java.util.Set;
import java.util.HashSet;
import org.apache.avro.Schema;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.io.Decoder;
import org.apache.avro.util.ClassUtils;
/**
* {@link org.apache.avro.io.DatumReader DatumReader} for generated Thrift
* classes.
*/
public class ThriftDatumReader<T> extends GenericDatumReader<T> {
public ThriftDatumReader() {
this(null, null, ThriftData.get());
}
public ThriftDatumReader(Class<T> c) {
this(ThriftData.get().getSchema(c));
}
/** Construct where the writer's and reader's schemas are the same. */
public ThriftDatumReader(Schema schema) {
this(schema, schema, ThriftData.get());
}
/** Construct given writer's and reader's schema. */
public ThriftDatumReader(Schema writer, Schema reader) {
this(writer, reader, ThriftData.get());
}
protected ThriftDatumReader(Schema writer, Schema reader, ThriftData data) {
super(writer, reader, data);
}
@Override
protected Object createEnum(String symbol, Schema schema) {
try {
Class c = ClassUtils.forName(SpecificData.getClassName(schema));
if (c == null)
return super.createEnum(symbol, schema); // punt to generic
return Enum.valueOf(c, symbol);
} catch (Exception e) {
throw new AvroRuntimeException(e);
}
}
@Override
protected Object readInt(Object old, Schema s, Decoder in) throws IOException {
String type = s.getProp(ThriftData.THRIFT_PROP);
int value = in.readInt();
if (type != null) {
if ("byte".equals(type))
return (byte) value;
if ("short".equals(type))
return (short) value;
}
return value;
}
@Override
protected Object newArray(Object old, int size, Schema schema) {
if ("set".equals(schema.getProp(ThriftData.THRIFT_PROP))) {
if (old instanceof Set) {
((Set) old).clear();
return old;
}
return new HashSet();
} else {
return super.newArray(old, size, schema);
}
}
}
| 7,106 |
0 | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro | Create_ds/avro/lang/java/thrift/src/main/java/org/apache/avro/thrift/ThriftDatumWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.thrift;
import org.apache.avro.Schema;
import org.apache.avro.io.Encoder;
import org.apache.avro.generic.GenericDatumWriter;
import java.nio.ByteBuffer;
import java.io.IOException;
/**
* {@link org.apache.avro.io.DatumWriter DatumWriter} for generated thrift
* classes.
*/
public class ThriftDatumWriter<T> extends GenericDatumWriter<T> {
public ThriftDatumWriter() {
super(ThriftData.get());
}
public ThriftDatumWriter(Class<T> c) {
super(ThriftData.get().getSchema(c), ThriftData.get());
}
public ThriftDatumWriter(Schema schema) {
super(schema, ThriftData.get());
}
protected ThriftDatumWriter(Schema root, ThriftData thriftData) {
super(root, thriftData);
}
protected ThriftDatumWriter(ThriftData thriftData) {
super(thriftData);
}
@Override
protected void writeBytes(Object datum, Encoder out) throws IOException {
// Thrift assymetry: setter takes ByteBuffer but getter returns byte[]
out.writeBytes(ByteBuffer.wrap((byte[]) datum));
}
}
| 7,107 |
0 | Create_ds/avro/lang/java/avro/src/test | Create_ds/avro/lang/java/avro/src/test/java/NoPackage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/** I am a class without a package. */
public interface NoPackage {
}
| 7,108 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDecimalConversion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericFixed;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import static java.math.RoundingMode.HALF_EVEN;
import static org.junit.jupiter.api.Assertions.*;
public class TestDecimalConversion {
private static final Conversion<BigDecimal> CONVERSION = new Conversions.DecimalConversion();
private Schema smallerSchema;
private LogicalType smallerLogicalType;
private Schema largerSchema;
private LogicalType largerLogicalType;
@BeforeEach
public void setup() {
smallerSchema = Schema.createFixed("smallFixed", null, null, 3);
smallerSchema.addProp("logicalType", "decimal");
smallerSchema.addProp("precision", 5);
smallerSchema.addProp("scale", 2);
smallerLogicalType = LogicalTypes.fromSchema(smallerSchema);
largerSchema = Schema.createFixed("largeFixed", null, null, 12);
largerSchema.addProp("logicalType", "decimal");
largerSchema.addProp("precision", 28);
largerSchema.addProp("scale", 15);
largerLogicalType = LogicalTypes.fromSchema(largerSchema);
}
@Test
void toFromBytes() {
final BigDecimal value = BigDecimal.valueOf(10.99).setScale(15, HALF_EVEN);
final ByteBuffer byteBuffer = CONVERSION.toBytes(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromBytes(byteBuffer, largerSchema, largerLogicalType);
assertEquals(value, result);
}
@Test
void toFromBytesMaxPrecision() {
final BigDecimal value = new BigDecimal("4567335489766.99834").setScale(15, HALF_EVEN);
final ByteBuffer byteBuffer = CONVERSION.toBytes(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromBytes(byteBuffer, largerSchema, largerLogicalType);
assertEquals(value, result);
}
@Test
void toBytesPrecisionError() {
final BigDecimal value = new BigDecimal("1.07046455859736525E+18").setScale(15, HALF_EVEN);
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toBytes(value, largerSchema, largerLogicalType));
assertEquals("Cannot encode decimal with precision 34 as max precision 28", avroTypeException.getMessage());
}
@Test
void toBytesFixedSmallerScale() {
final BigDecimal value = new BigDecimal("99892.1234").setScale(10, HALF_EVEN);
final ByteBuffer byteBuffer = CONVERSION.toBytes(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromBytes(byteBuffer, largerSchema, largerLogicalType);
assertEquals(new BigDecimal("99892.123400000000000"), result);
}
@Test
void toBytesScaleError() {
final BigDecimal value = new BigDecimal("4567335489766.989989998435899453").setScale(16, HALF_EVEN);
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toBytes(value, largerSchema, largerLogicalType));
assertEquals("Cannot encode decimal with scale 16 as scale 15 without rounding", avroTypeException.getMessage());
}
@Test
void toFromFixed() {
final BigDecimal value = new BigDecimal("3").setScale(15, HALF_EVEN);
final GenericFixed fixed = CONVERSION.toFixed(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromFixed(fixed, largerSchema, largerLogicalType);
assertEquals(value, result);
}
@Test
void toFromFixedMaxPrecision() {
final BigDecimal value = new BigDecimal("4567335489766.99834").setScale(15, HALF_EVEN);
final GenericFixed fixed = CONVERSION.toFixed(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromFixed(fixed, largerSchema, largerLogicalType);
assertEquals(value, result);
}
@Test
void toFixedPrecisionError() {
final BigDecimal value = new BigDecimal("1.07046455859736525E+18").setScale(15, HALF_EVEN);
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toFixed(value, largerSchema, largerLogicalType));
assertEquals("Cannot encode decimal with precision 34 as max precision 28", avroTypeException.getMessage());
}
@Test
void toFromFixedSmallerScale() {
final BigDecimal value = new BigDecimal("99892.1234").setScale(10, HALF_EVEN);
final GenericFixed fixed = CONVERSION.toFixed(value, largerSchema, largerLogicalType);
final BigDecimal result = CONVERSION.fromFixed(fixed, largerSchema, largerLogicalType);
assertEquals(new BigDecimal("99892.123400000000000"), result);
}
@Test
void toFixedScaleError() {
final BigDecimal value = new BigDecimal("4567335489766.3453453453453453453453").setScale(16, HALF_EVEN);
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toFixed(value, largerSchema, largerLogicalType));
assertEquals("Cannot encode decimal with scale 16 as scale 15 without rounding", avroTypeException.getMessage());
}
@Test
void toFromFixedMatchScaleAndPrecision() {
final BigDecimal value = new BigDecimal("123.45");
final GenericFixed fixed = CONVERSION.toFixed(value, smallerSchema, smallerLogicalType);
final BigDecimal result = CONVERSION.fromFixed(fixed, smallerSchema, smallerLogicalType);
assertEquals(value, result);
}
@Test
void toFromFixedRepresentedInLogicalTypeAllowRoundUnneccesary() {
final BigDecimal value = new BigDecimal("123.4500");
final GenericFixed fixed = CONVERSION.toFixed(value, smallerSchema, smallerLogicalType);
final BigDecimal result = CONVERSION.fromFixed(fixed, smallerSchema, smallerLogicalType);
assertEquals(new BigDecimal("123.45"), result);
}
@Test
void toFromFixedPrecisionErrorAfterAdjustingScale() {
final BigDecimal value = new BigDecimal("1234.560");
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toFixed(value, smallerSchema, smallerLogicalType));
assertEquals(
"Cannot encode decimal with precision 6 as max precision 5. This is after safely adjusting scale from 3 to required 2",
avroTypeException.getMessage());
}
@Test
void toFixedRepresentedInLogicalTypeErrorIfRoundingRequired() {
final BigDecimal value = new BigDecimal("123.456");
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> CONVERSION.toFixed(value, smallerSchema, smallerLogicalType));
assertEquals("Cannot encode decimal with scale 3 as scale 2 without rounding", avroTypeException.getMessage());
}
@Test
void importanceOfEnsuringCorrectScaleWhenConvertingFixed() {
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) smallerLogicalType;
final BigDecimal bigDecimal = new BigDecimal("1234.5");
assertEquals(decimal.getPrecision(), bigDecimal.precision());
assertTrue(decimal.getScale() >= bigDecimal.scale());
final byte[] bytes = bigDecimal.unscaledValue().toByteArray();
final BigDecimal fromFixed = CONVERSION.fromFixed(new GenericData.Fixed(smallerSchema, bytes), smallerSchema,
decimal);
assertNotEquals(0, bigDecimal.compareTo(fromFixed));
assertNotEquals(bigDecimal, fromFixed);
assertEquals(new BigDecimal("123.45"), fromFixed);
}
@Test
void importanceOfEnsuringCorrectScaleWhenConvertingBytes() {
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) smallerLogicalType;
final BigDecimal bigDecimal = new BigDecimal("1234.5");
assertEquals(decimal.getPrecision(), bigDecimal.precision());
assertTrue(decimal.getScale() >= bigDecimal.scale());
final byte[] bytes = bigDecimal.unscaledValue().toByteArray();
final BigDecimal fromBytes = CONVERSION.fromBytes(ByteBuffer.wrap(bytes), smallerSchema, decimal);
assertNotEquals(0, bigDecimal.compareTo(fromBytes));
assertNotEquals(bigDecimal, fromBytes);
assertEquals(new BigDecimal("123.45"), fromBytes);
}
}
| 7,109 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaNormalization.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.List;
import java.util.Locale;
import org.apache.avro.util.CaseFinder;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class TestSchemaNormalization {
@Nested
public static class TestCanonical {
public static List<Object[]> cases() throws IOException {
return CaseFinder.find(data(), "canonical", new ArrayList<>());
}
@ParameterizedTest
@MethodSource("cases")
void canonicalization(String input, String expectedOutput) {
assertEquals(SchemaNormalization.toParsingForm(new Schema.Parser().parse(input)), expectedOutput);
}
}
@Nested
public static class TestFingerprint {
public static List<Object[]> cases() throws IOException {
return CaseFinder.find(data(), "fingerprint", new ArrayList<>());
}
@ParameterizedTest
@MethodSource("cases")
void canonicalization(String input, String expectedOutput) {
Schema s = new Schema.Parser().parse(input);
long carefulFP = altFingerprint(SchemaNormalization.toParsingForm(s));
assertEquals(carefulFP, Long.parseLong(expectedOutput));
assertEqHex(carefulFP, SchemaNormalization.parsingFingerprint64(s));
}
}
// see AVRO-1493
@Nested
public static class TestFingerprintInternationalization {
public static List<Object[]> cases() throws IOException {
return CaseFinder.find(data(), "fingerprint", new ArrayList<>());
}
@ParameterizedTest
@MethodSource("cases")
void canonicalization(String input, String expectedOutput) {
Locale originalDefaultLocale = Locale.getDefault();
Locale.setDefault(Locale.forLanguageTag("tr"));
Schema s = new Schema.Parser().parse(input);
long carefulFP = altFingerprint(SchemaNormalization.toParsingForm(s));
assertEquals(carefulFP, Long.parseLong(expectedOutput));
assertEqHex(carefulFP, SchemaNormalization.parsingFingerprint64(s));
Locale.setDefault(originalDefaultLocale);
}
}
private static String DATA_FILE = (System.getProperty("share.dir", "../../../share") + "/test/data/schema-tests.txt");
private static BufferedReader data() throws IOException {
return Files.newBufferedReader(Paths.get(DATA_FILE), UTF_8);
}
/**
* Compute the fingerprint of <i>bytes[s,s+l)</i> using a slow algorithm that's
* an alternative to that implemented in {@link SchemaNormalization}. Algo from
* Broder93 ("Some applications of Rabin's fingerprinting method").
*/
public static long altFingerprint(String s) {
// In our algorithm, we multiply all inputs by x^64 (which is
// equivalent to prepending it with a single "1" bit followed
// by 64 zero bits). This both deals with the fact that
// CRCs ignore leading zeros, and also ensures some degree of
// randomness for small inputs
long tmp = altExtend(SchemaNormalization.EMPTY64, 64, ONE, s.getBytes(UTF_8));
return altExtend(SchemaNormalization.EMPTY64, 64, tmp, POSTFIX);
}
private static long altExtend(long poly, int degree, long fp, byte[] b) {
final long overflowBit = 1L << (64 - degree);
for (byte b1 : b) {
for (int j = 1; j < 129; j = j << 1) {
boolean overflow = (0 != (fp & overflowBit));
fp >>>= 1;
if (0 != (j & b1))
fp |= ONE; // shift in the input bit
if (overflow) {
fp ^= poly; // hi-order coeff of poly kills overflow bit
}
}
}
return fp;
}
private static final long ONE = 0x8000000000000000L;
private static final byte[] POSTFIX = { 0, 0, 0, 0, 0, 0, 0, 0 };
private static void assertEqHex(long expected, long actual) {
assertEquals(expected, actual, () -> format("0x%016x != 0x%016x", expected, actual));
}
private static String format(String f, Object... args) {
return (new Formatter()).format(f, args).toString();
}
}
| 7,110 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityReaderFieldMissingDefaultValue.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.A_INT_B_DINT_RECORD1;
import static org.apache.avro.TestSchemas.A_INT_RECORD1;
import static org.apache.avro.TestSchemas.EMPTY_RECORD1;
public class TestSchemaCompatibilityReaderFieldMissingDefaultValue {
public static Stream<Arguments> data() {
return Stream.of(Arguments.of(A_INT_RECORD1, EMPTY_RECORD1, "a", "/fields/0"),
Arguments.of(A_INT_B_DINT_RECORD1, EMPTY_RECORD1, "a", "/fields/0"));
}
@ParameterizedTest
@MethodSource("data")
public void testReaderFieldMissingDefaultValueSchemas(Schema reader, Schema writer, String details, String location) {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.READER_FIELD_MISSING_DEFAULT_VALUE, details,
location);
}
}
| 7,111 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/FooRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
public class FooRecord {
private int fooCount;
public FooRecord() {
}
public FooRecord(int fooCount) {
this.fooCount = fooCount;
}
@Override
public boolean equals(Object that) {
if (that instanceof FooRecord) {
return this.fooCount == ((FooRecord) that).fooCount;
}
return false;
}
@Override
public int hashCode() {
return fooCount;
}
@Override
public String toString() {
return FooRecord.class.getSimpleName() + "{count=" + fooCount + "}";
}
}
| 7,112 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.stream.Stream;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableFileInput;
import org.apache.avro.file.Syncable;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.util.RandomData;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestDataFile {
private static final Logger LOG = LoggerFactory.getLogger(TestDataFile.class);
@TempDir
public File DIR;
public static Stream<Arguments> codecs() {
List<Object[]> r = new ArrayList<>();
r.add(new Object[] { null });
r.add(new Object[] { CodecFactory.deflateCodec(0) });
r.add(new Object[] { CodecFactory.deflateCodec(1) });
r.add(new Object[] { CodecFactory.deflateCodec(9) });
r.add(new Object[] { CodecFactory.nullCodec() });
r.add(new Object[] { CodecFactory.snappyCodec() });
r.add(new Object[] { CodecFactory.xzCodec(0) });
r.add(new Object[] { CodecFactory.xzCodec(1) });
r.add(new Object[] { CodecFactory.xzCodec(6) });
r.add(new Object[] { CodecFactory.zstandardCodec(-5) });
r.add(new Object[] { CodecFactory.zstandardCodec(0, true) });
r.add(new Object[] { CodecFactory.zstandardCodec(5, false) });
r.add(new Object[] { CodecFactory.zstandardCodec(18, true) });
r.add(new Object[] { CodecFactory.zstandardCodec(0, false, false) });
r.add(new Object[] { CodecFactory.zstandardCodec(0, false, true) });
return r.stream().map(Arguments::of);
}
private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "200"));
private static final boolean VALIDATE = !"false".equals(System.getProperty("test.validate", "true"));
private static final long SEED = System.currentTimeMillis();
private static final String SCHEMA_JSON = "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+ "{\"name\":\"stringField\", \"type\":\"string\"}," + "{\"name\":\"longField\", \"type\":\"long\"}]}";
private static final Schema SCHEMA = new Schema.Parser().parse(SCHEMA_JSON);
private File makeFile(CodecFactory codec) {
return new File(DIR, "test-" + codec + ".avro");
}
@ParameterizedTest
@MethodSource("codecs")
public void runTestsInOrder(CodecFactory codec) throws Exception {
LOG.info("Running with codec: " + codec);
testGenericWrite(codec);
testGenericRead(codec);
testSplits(codec);
testSyncDiscovery(codec);
testGenericAppend(codec);
testReadWithHeader(codec);
testFSync(codec, false);
testFSync(codec, true);
}
private void testGenericWrite(CodecFactory codec) throws IOException {
DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(100);
if (codec != null) {
writer.setCodec(codec);
}
writer.create(SCHEMA, makeFile(codec));
try {
int count = 0;
for (Object datum : new RandomData(SCHEMA, COUNT, SEED)) {
writer.append(datum);
if (++count % (COUNT / 3) == 0)
writer.sync(); // force some syncs mid-file
if (count == 5) {
// force a write of an invalid record
boolean threwProperly = false;
try {
GenericData.Record record = (GenericData.Record) datum;
record.put(1, null);
threwProperly = true;
writer.append(record);
threwProperly = false;
} catch (DataFileWriter.AppendWriteException e) {
System.out.println("Ignoring: " + e);
}
assertTrue(threwProperly, "failed to throw when expected");
}
}
} finally {
writer.close();
}
// Ensure that a second call to close doesn't raise an exception. (AVRO-1249)
Exception doubleCloseEx = null;
try {
writer.close();
} catch (Exception e) {
doubleCloseEx = e;
}
assertNull(doubleCloseEx, "Double close() threw an unexpected exception");
}
private void testGenericRead(CodecFactory codec) throws IOException {
try (DataFileReader<Object> reader = new DataFileReader<>(makeFile(codec), new GenericDatumReader<>())) {
Object datum = null;
if (VALIDATE) {
for (Object expected : new RandomData(SCHEMA, COUNT, SEED)) {
datum = reader.next(datum);
assertEquals(expected, datum);
}
} else {
for (int i = 0; i < COUNT; i++) {
datum = reader.next(datum);
}
}
}
}
private void testSplits(CodecFactory codec) throws IOException {
File file = makeFile(codec);
try (DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>())) {
Random rand = new Random(SEED);
int splits = 10; // number of splits
int length = (int) file.length(); // length of file
int end = length; // end of split
int remaining = end; // bytes remaining
int count = 0; // count of entries
while (remaining > 0) {
int start = Math.max(0, end - rand.nextInt(2 * length / splits));
reader.sync(start); // count entries in split
while (!reader.pastSync(end)) {
reader.next();
count++;
}
remaining -= end - start;
end = start;
}
assertEquals(COUNT, count);
}
}
private void testSyncDiscovery(CodecFactory codec) throws IOException {
File file = makeFile(codec);
try (DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>())) {
// discover the sync points
ArrayList<Long> syncs = new ArrayList<>();
long previousSync = -1;
while (reader.hasNext()) {
if (reader.previousSync() != previousSync) {
previousSync = reader.previousSync();
syncs.add(previousSync);
}
reader.next();
}
// confirm that the first point is the one reached by sync(0)
reader.sync(0);
assertEquals(reader.previousSync(), (long) syncs.get(0));
// and confirm that all points are reachable
for (Long sync : syncs) {
reader.seek(sync);
assertNotNull(reader.next());
}
}
}
private void testGenericAppend(CodecFactory codec) throws IOException {
File file = makeFile(codec);
long start = file.length();
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>()).appendTo(file)) {
for (Object datum : new RandomData(SCHEMA, COUNT, SEED + 1)) {
writer.append(datum);
}
}
try (DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>())) {
reader.seek(start);
Object datum = null;
if (VALIDATE) {
for (Object expected : new RandomData(SCHEMA, COUNT, SEED + 1)) {
datum = reader.next(datum);
assertEquals(expected, datum);
}
} else {
for (int i = 0; i < COUNT; i++) {
datum = reader.next(datum);
}
}
}
}
private void testReadWithHeader(CodecFactory codec) throws IOException {
File file = makeFile(codec);
try (DataFileReader<Object> reader = new DataFileReader<>(file, new GenericDatumReader<>())) {
// get a header for this file
DataFileStream.Header header = reader.getHeader();
// re-open to an arbitrary position near the middle, with sync == true
SeekableFileInput sin = new SeekableFileInput(file);
sin.seek(sin.length() / 2);
try (DataFileReader<Object> readerTrue = DataFileReader.openReader(sin, new GenericDatumReader<>(), header,
true);) {
assertNotNull(readerTrue.next(), "Should be able to reopen from arbitrary point");
long validPos = readerTrue.previousSync();
// post sync, we know of a valid sync point: re-open with seek (sync == false)
sin.seek(validPos);
try (DataFileReader<Object> readerFalse = DataFileReader.openReader(sin, new GenericDatumReader<>(), header,
false)) {
assertEquals(validPos, sin.tell(), "Should not move from sync point on reopen");
assertNotNull(readerFalse.next(), "Should be able to reopen at sync point");
}
}
}
}
@Test
public void syncInHeader() throws IOException {
try (DataFileReader<Object> reader = new DataFileReader<>(new File("../../../share/test/data/syncInMeta.avro"),
new GenericDatumReader<>())) {
reader.sync(0);
for (Object datum : reader)
assertNotNull(datum);
}
}
@Test
public void test12() throws IOException {
readFile(new File("../../../share/test/data/test.avro12"), new GenericDatumReader<>());
}
@Test
public void flushCount() throws IOException {
DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>());
writer.setFlushOnEveryBlock(false);
TestingByteArrayOutputStream out = new TestingByteArrayOutputStream();
writer.create(SCHEMA, out);
int currentCount = 0;
int flushCounter = 0;
try {
for (Object datum : new RandomData(SCHEMA, COUNT, SEED + 1)) {
currentCount++;
writer.append(datum);
writer.sync();
if (currentCount % 10 == 0) {
flushCounter++;
writer.flush();
}
}
} finally {
writer.close();
}
System.out.println("Total number of flushes: " + out.flushCount);
// Unfortunately, the underlying buffered output stream might flush data
// to disk when the buffer becomes full, so the only check we can
// accurately do is that each sync did not lead to a flush and that the
// file was flushed at least as many times as we called flush. Generally
// noticed that out.flushCount is almost always 24 though.
assertTrue(out.flushCount < currentCount && out.flushCount >= flushCounter);
}
private void testFSync(CodecFactory codec, boolean useFile) throws IOException {
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>())) {
writer.setFlushOnEveryBlock(false);
TestingByteArrayOutputStream out = new TestingByteArrayOutputStream();
if (useFile) {
File f = makeFile(codec);
try (SeekableFileInput in = new SeekableFileInput(f)) {
writer.appendTo(in, out);
}
} else {
writer.create(SCHEMA, out);
}
int currentCount = 0;
int syncCounter = 0;
for (Object datum : new RandomData(SCHEMA, COUNT, SEED + 1)) {
currentCount++;
writer.append(datum);
if (currentCount % 10 == 0) {
writer.fSync();
syncCounter++;
}
}
System.out.println("Total number of syncs: " + out.syncCount);
assertEquals(syncCounter, out.syncCount);
}
}
static void readFile(File f, DatumReader<? extends Object> datumReader) throws IOException {
try (FileReader<? extends Object> reader = DataFileReader.openReader(f, datumReader)) {
for (Object datum : reader) {
assertNotNull(datum);
}
}
}
public static void main(String[] args) throws Exception {
File input = new File(args[0]);
Schema projection = null;
if (args.length > 1)
projection = new Schema.Parser().parse(new File(args[1]));
TestDataFile.readFile(input, new GenericDatumReader<>(null, projection));
long start = System.currentTimeMillis();
for (int i = 0; i < 4; i++)
TestDataFile.readFile(input, new GenericDatumReader<>(null, projection));
System.out.println("Time: " + (System.currentTimeMillis() - start));
}
private static class TestingByteArrayOutputStream extends ByteArrayOutputStream implements Syncable {
private int flushCount = 0;
private int syncCount = 0;
@Override
public void flush() throws IOException {
super.flush();
flushCount++;
}
@Override
public void sync() throws IOException {
syncCount++;
}
}
}
| 7,113 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityTypeMismatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.A_INT_RECORD1;
import static org.apache.avro.TestSchemas.BOOLEAN_SCHEMA;
import static org.apache.avro.TestSchemas.BYTES_SCHEMA;
import static org.apache.avro.TestSchemas.DOUBLE_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM2_AB_SCHEMA;
import static org.apache.avro.TestSchemas.FIXED_4_BYTES;
import static org.apache.avro.TestSchemas.FLOAT_SCHEMA;
import static org.apache.avro.TestSchemas.INT_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.INT_FLOAT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_LIST_RECORD;
import static org.apache.avro.TestSchemas.INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_MAP_SCHEMA;
import static org.apache.avro.TestSchemas.INT_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_LIST_RECORD;
import static org.apache.avro.TestSchemas.LONG_MAP_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_SCHEMA;
import static org.apache.avro.TestSchemas.NULL_SCHEMA;
import static org.apache.avro.TestSchemas.STRING_SCHEMA;
public class TestSchemaCompatibilityTypeMismatch {
public static Stream<Arguments> data() {
return Stream.of(
Arguments.of(NULL_SCHEMA, INT_SCHEMA, "reader type: NULL not compatible with writer type: INT", "/"),
Arguments.of(NULL_SCHEMA, LONG_SCHEMA, "reader type: NULL not compatible with writer type: LONG", "/"),
Arguments.of(BOOLEAN_SCHEMA, INT_SCHEMA, "reader type: BOOLEAN not compatible with writer type: INT", "/"),
Arguments.of(INT_SCHEMA, NULL_SCHEMA, "reader type: INT not compatible with writer type: NULL", "/"),
Arguments.of(INT_SCHEMA, BOOLEAN_SCHEMA, "reader type: INT not compatible with writer type: BOOLEAN", "/"),
Arguments.of(INT_SCHEMA, LONG_SCHEMA, "reader type: INT not compatible with writer type: LONG", "/"),
Arguments.of(INT_SCHEMA, FLOAT_SCHEMA, "reader type: INT not compatible with writer type: FLOAT", "/"),
Arguments.of(INT_SCHEMA, DOUBLE_SCHEMA, "reader type: INT not compatible with writer type: DOUBLE", "/"),
Arguments.of(LONG_SCHEMA, FLOAT_SCHEMA, "reader type: LONG not compatible with writer type: FLOAT", "/"),
Arguments.of(LONG_SCHEMA, DOUBLE_SCHEMA, "reader type: LONG not compatible with writer type: DOUBLE", "/"),
Arguments.of(FLOAT_SCHEMA, DOUBLE_SCHEMA, "reader type: FLOAT not compatible with writer type: DOUBLE", "/"),
Arguments.of(DOUBLE_SCHEMA, STRING_SCHEMA, "reader type: DOUBLE not compatible with writer type: STRING", "/"),
Arguments.of(FIXED_4_BYTES, STRING_SCHEMA, "reader type: FIXED not compatible with writer type: STRING", "/"),
Arguments.of(STRING_SCHEMA, BOOLEAN_SCHEMA, "reader type: STRING not compatible with writer type: BOOLEAN",
"/"),
Arguments.of(STRING_SCHEMA, INT_SCHEMA, "reader type: STRING not compatible with writer type: INT", "/"),
Arguments.of(BYTES_SCHEMA, NULL_SCHEMA, "reader type: BYTES not compatible with writer type: NULL", "/"),
Arguments.of(BYTES_SCHEMA, INT_SCHEMA, "reader type: BYTES not compatible with writer type: INT", "/"),
Arguments.of(A_INT_RECORD1, INT_SCHEMA, "reader type: RECORD not compatible with writer type: INT", "/"),
Arguments.of(INT_ARRAY_SCHEMA, LONG_ARRAY_SCHEMA, "reader type: INT not compatible with writer type: LONG",
"/items"),
Arguments.of(INT_MAP_SCHEMA, INT_ARRAY_SCHEMA, "reader type: MAP not compatible with writer type: ARRAY", "/"),
Arguments.of(INT_ARRAY_SCHEMA, INT_MAP_SCHEMA, "reader type: ARRAY not compatible with writer type: MAP", "/"),
Arguments.of(INT_MAP_SCHEMA, LONG_MAP_SCHEMA, "reader type: INT not compatible with writer type: LONG",
"/values"),
Arguments.of(INT_SCHEMA, ENUM2_AB_SCHEMA, "reader type: INT not compatible with writer type: ENUM", "/"),
Arguments.of(ENUM2_AB_SCHEMA, INT_SCHEMA, "reader type: ENUM not compatible with writer type: INT", "/"),
Arguments.of(FLOAT_SCHEMA, INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA,
"reader type: FLOAT not compatible with writer type: DOUBLE", "/3"),
Arguments.of(LONG_SCHEMA, INT_FLOAT_UNION_SCHEMA, "reader type: LONG not compatible with writer type: FLOAT",
"/1"),
Arguments.of(INT_SCHEMA, INT_FLOAT_UNION_SCHEMA, "reader type: INT not compatible with writer type: FLOAT",
"/1"),
Arguments.of(INT_LIST_RECORD, LONG_LIST_RECORD, "reader type: INT not compatible with writer type: LONG",
"/fields/0/type"),
Arguments.of(NULL_SCHEMA, INT_SCHEMA, "reader type: NULL not compatible with writer type: INT", "/"));
}
@ParameterizedTest
@MethodSource("data")
public void testTypeMismatchSchemas(Schema reader, Schema writer, String details, String location) throws Exception {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.TYPE_MISMATCH, details, location);
}
}
| 7,114 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityNameMismatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.A_DINT_B_DENUM_1_RECORD1;
import static org.apache.avro.TestSchemas.A_DINT_B_DENUM_2_RECORD1;
import static org.apache.avro.TestSchemas.EMPTY_RECORD1;
import static org.apache.avro.TestSchemas.EMPTY_RECORD2;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM2_AB_SCHEMA;
import static org.apache.avro.TestSchemas.FIXED_4_BYTES;
public class TestSchemaCompatibilityNameMismatch {
private static final Schema FIXED_4_ANOTHER_NAME = Schema.createFixed("AnotherName", null, null, 4);
public static Stream<Arguments> data() {
return Stream.of(Arguments.of(ENUM1_AB_SCHEMA, ENUM2_AB_SCHEMA, "expected: Enum2", "/name"),
Arguments.of(EMPTY_RECORD2, EMPTY_RECORD1, "expected: Record1", "/name"),
Arguments.of(FIXED_4_BYTES, FIXED_4_ANOTHER_NAME, "expected: AnotherName", "/name"),
Arguments.of(A_DINT_B_DENUM_1_RECORD1, A_DINT_B_DENUM_2_RECORD1, "expected: Enum2", "/fields/1/type/name"));
}
@ParameterizedTest
@MethodSource("data")
public void testNameMismatchSchemas(Schema reader, Schema writer, String details, String location) throws Exception {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.NAME_MISMATCH, details, location);
}
}
| 7,115 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMultiple.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.api.Test;
public class TestSchemaCompatibilityMultiple {
@Test
void multipleIncompatibilities() throws Exception {
Schema reader = SchemaBuilder.record("base").fields()
// 0
.name("check_enum_symbols_field").type().enumeration("check_enum_symbols_type").symbols("A", "C").noDefault()
// 1
.name("check_enum_name_field").type().enumeration("check_enum_name_type").symbols("A", "B", "C", "D")
.noDefault()
// 2
.name("type_mismatch_field").type().stringType().noDefault()
// 3
.name("sub_record").type().record("sub_record_type").fields()
// 3.0
.name("identical_1_field").type().longType().longDefault(42L)
// 3.1
.name("extra_no_default_field").type().longType().noDefault()
// 3.2
.name("fixed_length_mismatch_field").type().fixed("fixed_length_mismatch_type").size(4).noDefault()
// 3.3
.name("union_missing_branches_field").type().unionOf().booleanType().endUnion().noDefault()
// 3.4
.name("reader_union_does_not_support_type_field").type().unionOf().booleanType().endUnion().noDefault()
// 3.5
.name("record_fqn_mismatch_field").type().record("recordA").namespace("not_nsA").fields()
// 3.5.0
.name("A_field_0").type().booleanType().booleanDefault(true)
// 3.5.1
.name("array_type_mismatch_field").type().array().items().stringType().noDefault()
// EOR
.endRecord().noDefault()
// EOR
.endRecord().noDefault()
// EOR
.endRecord();
Schema writer = SchemaBuilder.record("base").fields()
// 0
.name("check_enum_symbols_field").type().enumeration("check_enum_symbols_type").symbols("A", "B", "C", "D")
.noDefault()
// 1
.name("check_enum_name_field").type().enumeration("check_enum_name_type_ERR").symbols("A", "B", "C", "D")
.noDefault()
// 2
.name("type_mismatch_field").type().longType().noDefault()
// 3
.name("sub_record").type().record("sub_record_type").fields()
// 3.0
.name("identical_1_field").type().longType().longDefault(42L)
// 3.1
// MISSING FIELD
// 3.2
.name("fixed_length_mismatch_field").type().fixed("fixed_length_mismatch_type").size(8).noDefault()
// 3.3
.name("union_missing_branches_field").type().unionOf().booleanType().and().doubleType().and().stringType()
.endUnion().noDefault()
// 3.4
.name("reader_union_does_not_support_type_field").type().longType().noDefault()
// 3.5
.name("record_fqn_mismatch_field").type().record("recordA").namespace("nsA").fields()
// 3.5.0
.name("A_field_0").type().booleanType().booleanDefault(true)
// 3.5.1
.name("array_type_mismatch_field").type().array().items().booleanType().noDefault()
// EOR
.endRecord().noDefault()
// EOR
.endRecord().noDefault()
// EOR
.endRecord();
List<SchemaIncompatibilityType> types = Arrays.asList(SchemaIncompatibilityType.MISSING_ENUM_SYMBOLS,
SchemaIncompatibilityType.NAME_MISMATCH, SchemaIncompatibilityType.TYPE_MISMATCH,
SchemaIncompatibilityType.READER_FIELD_MISSING_DEFAULT_VALUE, SchemaIncompatibilityType.FIXED_SIZE_MISMATCH,
SchemaIncompatibilityType.MISSING_UNION_BRANCH, SchemaIncompatibilityType.MISSING_UNION_BRANCH,
SchemaIncompatibilityType.MISSING_UNION_BRANCH, SchemaIncompatibilityType.TYPE_MISMATCH);
List<String> details = Arrays.asList("[B, D]", "expected: check_enum_name_type_ERR",
"reader type: STRING not compatible with writer type: LONG", "extra_no_default_field", "expected: 8, found: 4",
"reader union lacking writer type: DOUBLE", "reader union lacking writer type: STRING",
"reader union lacking writer type: LONG", "reader type: STRING not compatible with writer type: BOOLEAN");
List<String> location = Arrays.asList("/fields/0/type/symbols", "/fields/1/type/name", "/fields/2/type",
"/fields/3/type/fields/1", "/fields/3/type/fields/2/type/size", "/fields/3/type/fields/3/type/1",
"/fields/3/type/fields/3/type/2", "/fields/3/type/fields/4/type",
"/fields/3/type/fields/5/type/fields/1/type/items");
validateIncompatibleSchemas(reader, writer, types, details, location);
}
}
| 7,116 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TypeEnum.java | /**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
@SuppressWarnings("all")
@org.apache.avro.specific.AvroGenerated
public enum TypeEnum implements org.apache.avro.generic.GenericEnumSymbol<TypeEnum> {
a, b, c;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"enum\",\"name\":\"TypeEnum\",\"namespace\":\"org.apache.avro\",\"symbols\":[\"a\",\"b\",\"c\"]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
}
| 7,117 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemas.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import org.apache.avro.Schema.Field;
/**
* Schemas used by other tests in this package. Therefore mostly package
* protected.
*/
public class TestSchemas {
static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL);
static final Schema BOOLEAN_SCHEMA = Schema.create(Schema.Type.BOOLEAN);
static final Schema INT_SCHEMA = Schema.create(Schema.Type.INT);
static final Schema LONG_SCHEMA = Schema.create(Schema.Type.LONG);
static final Schema FLOAT_SCHEMA = Schema.create(Schema.Type.FLOAT);
static final Schema DOUBLE_SCHEMA = Schema.create(Schema.Type.DOUBLE);
static final Schema STRING_SCHEMA = Schema.create(Schema.Type.STRING);
static final Schema BYTES_SCHEMA = Schema.create(Schema.Type.BYTES);
static final Schema INT_ARRAY_SCHEMA = Schema.createArray(INT_SCHEMA);
static final Schema LONG_ARRAY_SCHEMA = Schema.createArray(LONG_SCHEMA);
static final Schema STRING_ARRAY_SCHEMA = Schema.createArray(STRING_SCHEMA);
static final Schema INT_MAP_SCHEMA = Schema.createMap(INT_SCHEMA);
static final Schema LONG_MAP_SCHEMA = Schema.createMap(LONG_SCHEMA);
static final Schema STRING_MAP_SCHEMA = Schema.createMap(STRING_SCHEMA);
static final Schema ENUM1_AB_SCHEMA = Schema.createEnum("Enum1", null, null, list("A", "B"));
static final Schema ENUM1_AB_SCHEMA_DEFAULT = Schema.createEnum("Enum1", null, null, list("A", "B"), "A");
public static final Schema ENUM1_AB_SCHEMA_NAMESPACE_1 = Schema.createEnum("Enum1", null, "namespace1",
list("A", "B"));
public static final Schema ENUM1_AB_SCHEMA_NAMESPACE_2 = Schema.createEnum("Enum1", null, "namespace2",
list("A", "B"));
static final Schema ENUM1_ABC_SCHEMA = Schema.createEnum("Enum1", null, null, list("A", "B", "C"));
static final Schema ENUM1_BC_SCHEMA = Schema.createEnum("Enum1", null, null, list("B", "C"));
static final Schema ENUM2_AB_SCHEMA = Schema.createEnum("Enum2", null, null, list("A", "B"));
static final Schema ENUM_ABC_ENUM_DEFAULT_A_SCHEMA = Schema.createEnum("Enum", null, null, list("A", "B", "C"), "A");
static final Schema ENUM_AB_ENUM_DEFAULT_A_SCHEMA = Schema.createEnum("Enum", null, null, list("A", "B"), "A");
static final Schema ENUM_ABC_ENUM_DEFAULT_A_RECORD = Schema.createRecord("Record", null, null, false);
static final Schema ENUM_AB_ENUM_DEFAULT_A_RECORD = Schema.createRecord("Record", null, null, false);
static final Schema ENUM_ABC_FIELD_DEFAULT_B_ENUM_DEFAULT_A_RECORD = Schema.createRecord("Record", null, null, false);
static final Schema ENUM_AB_FIELD_DEFAULT_A_ENUM_DEFAULT_B_RECORD = Schema.createRecord("Record", null, null, false);
static {
ENUM_ABC_ENUM_DEFAULT_A_RECORD.setFields(
list(new Schema.Field("Field", Schema.createEnum("Schema", null, null, list("A", "B", "C"), "A"), null, null)));
ENUM_AB_ENUM_DEFAULT_A_RECORD.setFields(
list(new Schema.Field("Field", Schema.createEnum("Schema", null, null, list("A", "B"), "A"), null, null)));
ENUM_ABC_FIELD_DEFAULT_B_ENUM_DEFAULT_A_RECORD.setFields(
list(new Schema.Field("Field", Schema.createEnum("Schema", null, null, list("A", "B", "C"), "A"), null, "B")));
ENUM_AB_FIELD_DEFAULT_A_ENUM_DEFAULT_B_RECORD.setFields(
list(new Schema.Field("Field", Schema.createEnum("Schema", null, null, list("A", "B"), "B"), null, "A")));
}
static final Schema EMPTY_UNION_SCHEMA = Schema.createUnion(new ArrayList<>());
static final Schema NULL_UNION_SCHEMA = Schema.createUnion(list(NULL_SCHEMA));
static final Schema INT_UNION_SCHEMA = Schema.createUnion(list(INT_SCHEMA));
static final Schema LONG_UNION_SCHEMA = Schema.createUnion(list(LONG_SCHEMA));
static final Schema FLOAT_UNION_SCHEMA = Schema.createUnion(list(FLOAT_SCHEMA));
static final Schema DOUBLE_UNION_SCHEMA = Schema.createUnion(list(DOUBLE_SCHEMA));
static final Schema STRING_UNION_SCHEMA = Schema.createUnion(list(STRING_SCHEMA));
static final Schema BYTES_UNION_SCHEMA = Schema.createUnion(list(BYTES_SCHEMA));
static final Schema INT_STRING_UNION_SCHEMA = Schema.createUnion(list(INT_SCHEMA, STRING_SCHEMA));
static final Schema STRING_INT_UNION_SCHEMA = Schema.createUnion(list(STRING_SCHEMA, INT_SCHEMA));
static final Schema INT_FLOAT_UNION_SCHEMA = Schema.createUnion(list(INT_SCHEMA, FLOAT_SCHEMA));
static final Schema INT_LONG_UNION_SCHEMA = Schema.createUnion(list(INT_SCHEMA, LONG_SCHEMA));
static final Schema INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA = Schema
.createUnion(list(INT_SCHEMA, LONG_SCHEMA, FLOAT_SCHEMA, DOUBLE_SCHEMA));
static final Schema NULL_INT_ARRAY_UNION_SCHEMA = Schema.createUnion(list(NULL_SCHEMA, INT_ARRAY_SCHEMA));
static final Schema NULL_INT_MAP_UNION_SCHEMA = Schema.createUnion(list(NULL_SCHEMA, INT_MAP_SCHEMA));
// Non recursive records:
static final Schema EMPTY_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema EMPTY_RECORD2 = Schema.createRecord("Record2", null, null, false);
static final Schema A_INT_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_LONG_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_INT_B_INT_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_RECORD1 = // DTYPE means TYPE with default value
Schema.createRecord("Record1", null, null, false);
static final Schema A_INT_B_DINT_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DINT_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DFIXED_4_BYTES_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DFIXED_8_BYTES_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DINT_STRING_UNION_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DINT_UNION_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DENUM_1_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema A_DINT_B_DENUM_2_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema FIXED_4_BYTES = Schema.createFixed("Fixed", null, null, 4);
static final Schema FIXED_8_BYTES = Schema.createFixed("Fixed", null, null, 8);
static final Schema NS_RECORD1 = Schema.createRecord("Record1", null, null, false);
static final Schema NS_RECORD2 = Schema.createRecord("Record1", null, null, false);
static final Schema NS_INNER_RECORD1 = Schema.createRecord("InnerRecord1", null, "ns1", false);
static final Schema NS_INNER_RECORD2 = Schema.createRecord("InnerRecord1", null, "ns2", false);
static final Schema WITHOUT_NS = Schema.createRecord("Record", null, null, false);
static final Schema WITH_NS = Schema.createRecord("ns.Record", null, null, false);
static {
EMPTY_RECORD1.setFields(Collections.emptyList());
EMPTY_RECORD2.setFields(Collections.emptyList());
A_INT_RECORD1.setFields(list(new Field("a", INT_SCHEMA, null, null)));
A_LONG_RECORD1.setFields(list(new Field("a", LONG_SCHEMA, null, null)));
A_INT_B_INT_RECORD1.setFields(list(new Field("a", INT_SCHEMA, null, null), new Field("b", INT_SCHEMA, null, null)));
A_DINT_RECORD1.setFields(list(new Field("a", INT_SCHEMA, null, 0)));
A_INT_B_DINT_RECORD1.setFields(list(new Field("a", INT_SCHEMA, null, null), new Field("b", INT_SCHEMA, null, 0)));
A_DINT_B_DINT_RECORD1.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", INT_SCHEMA, null, 0)));
A_DINT_B_DFIXED_4_BYTES_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", FIXED_4_BYTES, null, null)));
A_DINT_B_DFIXED_8_BYTES_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", FIXED_8_BYTES, null, null)));
A_DINT_B_DINT_STRING_UNION_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", INT_STRING_UNION_SCHEMA, null, 0)));
A_DINT_B_DINT_UNION_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", INT_UNION_SCHEMA, null, 0)));
A_DINT_B_DENUM_1_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", ENUM1_AB_SCHEMA, null, null)));
A_DINT_B_DENUM_2_RECORD1
.setFields(list(new Field("a", INT_SCHEMA, null, 0), new Field("b", ENUM2_AB_SCHEMA, null, null)));
NS_INNER_RECORD1.setFields(list(new Schema.Field("a", INT_SCHEMA)));
NS_INNER_RECORD2.setFields(list(new Schema.Field("a", INT_SCHEMA)));
NS_RECORD1
.setFields(list(new Schema.Field("f1", Schema.createUnion(NULL_SCHEMA, Schema.createArray(NS_INNER_RECORD1)))));
NS_RECORD2
.setFields(list(new Schema.Field("f1", Schema.createUnion(NULL_SCHEMA, Schema.createArray(NS_INNER_RECORD2)))));
WITH_NS.setFields(list(new Field("f1", INT_SCHEMA, null, null)));
WITHOUT_NS.setFields(list(new Field("f1", INT_SCHEMA, null, null)));
}
// Recursive records
static final Schema INT_LIST_RECORD = Schema.createRecord("List", null, null, false);
static final Schema LONG_LIST_RECORD = Schema.createRecord("List", null, null, false);
static {
INT_LIST_RECORD
.setFields(list(new Field("head", INT_SCHEMA, null, null), new Field("tail", INT_LIST_RECORD, null, null)));
LONG_LIST_RECORD
.setFields(list(new Field("head", LONG_SCHEMA, null, null), new Field("tail", LONG_LIST_RECORD, null, null)));
}
// -----------------------------------------------------------------------------------------------
/** Reader/writer schema pair. */
static final class ReaderWriter {
private final Schema mReader;
private final Schema mWriter;
public ReaderWriter(final Schema reader, final Schema writer) {
mReader = reader;
mWriter = writer;
}
public Schema getReader() {
return mReader;
}
public Schema getWriter() {
return mWriter;
}
}
/** Borrowed from the Guava library. */
static <E> ArrayList<E> list(E... elements) {
final ArrayList<E> list = new ArrayList<>();
Collections.addAll(list, elements);
return list;
}
static void assertSchemaContains(Schema schemaSubset, Schema original) {
String subset = schemaSubset.toString(false);
String whole = original.toString(false);
assertTrue(whole.contains(subset), String.format("Subset '%s' not found in '%s'", subset, whole));
}
}
| 7,118 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityFixedSizeMismatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.A_DINT_B_DFIXED_4_BYTES_RECORD1;
import static org.apache.avro.TestSchemas.A_DINT_B_DFIXED_8_BYTES_RECORD1;
import static org.apache.avro.TestSchemas.FIXED_4_BYTES;
import static org.apache.avro.TestSchemas.FIXED_8_BYTES;
public class TestSchemaCompatibilityFixedSizeMismatch {
public static Stream<Arguments> data() {
return Stream.of(Arguments.of(FIXED_4_BYTES, FIXED_8_BYTES, "expected: 8, found: 4", "/size"),
Arguments.of(FIXED_8_BYTES, FIXED_4_BYTES, "expected: 4, found: 8", "/size"),
Arguments.of(A_DINT_B_DFIXED_8_BYTES_RECORD1, A_DINT_B_DFIXED_4_BYTES_RECORD1, "expected: 4, found: 8",
"/fields/1/type/size"),
Arguments.of(A_DINT_B_DFIXED_4_BYTES_RECORD1, A_DINT_B_DFIXED_8_BYTES_RECORD1, "expected: 8, found: 4",
"/fields/1/type/size"));
}
@ParameterizedTest
@MethodSource("data")
void fixedSizeMismatchSchemas(Schema reader, Schema writer, String details, String location) {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.FIXED_SIZE_MISMATCH, details, location);
}
}
| 7,119 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestNestedRecords.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.JsonDecoder;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
/**
* This test demonstrates the fix for a complex nested schema type.
*/
public class TestNestedRecords {
@Test
void singleSubRecord() throws IOException {
final Schema child = SchemaBuilder.record("Child").namespace("org.apache.avro.nested").fields()
.requiredString("childField").endRecord();
final Schema parent = SchemaBuilder.record("Parent").namespace("org.apache.avro.nested").fields()
.requiredString("parentField1").name("child1").type(child).noDefault().requiredString("parentField2")
.endRecord();
final String inputAsExpected = "{\n" + " \"parentField1\": \"parentValue1\",\n" + " \"child1\":{\n"
+ " \"childField\":\"childValue1\"\n" + " },\n" + " \"parentField2\":\"parentValue2\"\n" + "}";
final ByteArrayInputStream inputStream = new ByteArrayInputStream(inputAsExpected.getBytes(UTF_8));
final JsonDecoder decoder = DecoderFactory.get().jsonDecoder(parent, inputStream);
final DatumReader<Object> reader = new GenericDatumReader<>(parent);
final GenericData.Record decoded = (GenericData.Record) reader.read(null, decoder);
assertThat(decoded.get("parentField1").toString(), equalTo("parentValue1"));
assertThat(decoded.get("parentField2").toString(), equalTo("parentValue2"));
assertThat(((GenericData.Record) decoded.get("child1")).get("childField").toString(), equalTo("childValue1"));
}
@Test
void singleSubRecordExtraField() throws IOException {
final Schema child = SchemaBuilder.record("Child").namespace("org.apache.avro.nested").fields()
.requiredString("childField").endRecord();
final Schema parent = SchemaBuilder.record("Parent").namespace("org.apache.avro.nested").fields()
.requiredString("parentField1").name("child1").type(child).noDefault().requiredString("parentField2")
.endRecord();
final String inputAsExpected = "{\n" + " \"parentField1\": \"parentValue1\",\n" + " \"child1\":{\n"
+ " \"childField\":\"childValue1\",\n" +
// this field should be safely ignored
" \"extraField\":\"extraValue\"\n" + " },\n" + " \"parentField2\":\"parentValue2\"\n" + "}";
final ByteArrayInputStream inputStream = new ByteArrayInputStream(inputAsExpected.getBytes(UTF_8));
final JsonDecoder decoder = DecoderFactory.get().jsonDecoder(parent, inputStream);
final DatumReader<Object> reader = new GenericDatumReader<>(parent);
final GenericData.Record decoded = (GenericData.Record) reader.read(null, decoder);
assertThat(decoded.get("parentField1").toString(), equalTo("parentValue1"));
assertThat(decoded.get("parentField2").toString(), equalTo("parentValue2"));
assertThat(((GenericData.Record) decoded.get("child1")).get("childField").toString(), equalTo("childValue1"));
}
}
| 7,120 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestLogicalType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.hamcrest.collection.IsMapContaining;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.concurrent.Callable;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class TestLogicalType {
@Test
void decimalFromSchema() {
Schema schema = Schema.createFixed("aFixed", null, null, 4);
schema.addProp("logicalType", "decimal");
schema.addProp("precision", 9);
schema.addProp("scale", 2);
LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
assertTrue(logicalType instanceof LogicalTypes.Decimal, "Should be a Decimal");
LogicalTypes.Decimal decimal = (LogicalTypes.Decimal) logicalType;
assertEquals(9, decimal.getPrecision(), "Should have correct precision");
assertEquals(2, decimal.getScale(), "Should have correct scale");
}
@Test
void invalidLogicalTypeIgnored() {
final Schema schema = Schema.createFixed("aFixed", null, null, 2);
schema.addProp("logicalType", "decimal");
schema.addProp("precision", 9);
schema.addProp("scale", 2);
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Should ignore invalid logical type");
}
@Test
void decimalWithNonByteArrayTypes() {
final LogicalType decimal = LogicalTypes.decimal(5, 2);
// test simple types
Schema[] nonBytes = new Schema[] { Schema.createRecord("Record", null, null, false),
Schema.createArray(Schema.create(Schema.Type.BYTES)), Schema.createMap(Schema.create(Schema.Type.BYTES)),
Schema.createEnum("Enum", null, null, Arrays.asList("a", "b")),
Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.BYTES), Schema.createFixed("fixed", null, null, 4))),
Schema.create(Schema.Type.BOOLEAN), Schema.create(Schema.Type.INT), Schema.create(Schema.Type.LONG),
Schema.create(Schema.Type.FLOAT), Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.NULL),
Schema.create(Schema.Type.STRING) };
for (final Schema schema : nonBytes) {
assertThrows("Should reject type: " + schema.getType(), IllegalArgumentException.class,
"Logical type decimal must be backed by fixed or bytes", () -> {
decimal.addToSchema(schema);
return null;
});
}
}
@Test
void unknownFromJsonNode() {
Schema schema = Schema.create(Schema.Type.STRING);
schema.addProp("logicalType", "unknown");
schema.addProp("someProperty", 34);
LogicalType logicalType = LogicalTypes.fromSchemaIgnoreInvalid(schema);
assertNull(logicalType, "Should not return a LogicalType instance");
}
@Test
void decimalBytesHasNoPrecisionLimit() {
Schema schema = Schema.create(Schema.Type.BYTES);
// precision is not limited for bytes
LogicalTypes.decimal(Integer.MAX_VALUE).addToSchema(schema);
assertEquals(Integer.MAX_VALUE,
((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getPrecision(),
"Precision should be an Integer.MAX_VALUE");
}
@Test
void decimalFixedPrecisionLimit() {
// 4 bytes can hold up to 9 digits of precision
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class, "fixed(4) cannot store 10 digits (max 9)",
() -> {
LogicalTypes.decimal(10).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
// 129 bytes can hold up to 310 digits of precision
final Schema schema129 = Schema.createFixed("aDecimal", null, null, 129);
assertThrows("Should reject precision", IllegalArgumentException.class,
"fixed(129) cannot store 311 digits (max 310)", () -> {
LogicalTypes.decimal(311).addToSchema(schema129);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema129), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithZeroPrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal precision: 0 (must be positive)", () -> {
LogicalTypes.decimal(0).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithNegativePrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal precision: -9 (must be positive)", () -> {
LogicalTypes.decimal(-9).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalScaleBoundedByPrecision() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal scale: 10 (greater than precision: 9)", () -> {
LogicalTypes.decimal(9, 10).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void decimalFailsWithNegativeScale() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
assertThrows("Should reject precision", IllegalArgumentException.class,
"Invalid decimal scale: -2 (must be positive)", () -> {
LogicalTypes.decimal(9, -2).addToSchema(schema);
return null;
});
assertNull(LogicalTypes.fromSchemaIgnoreInvalid(schema), "Invalid logical type should not be set on schema");
}
@Test
void schemaRejectsSecondLogicalType() {
final Schema schema = Schema.createFixed("aDecimal", null, null, 4);
LogicalTypes.decimal(9).addToSchema(schema);
assertThrows("Should reject second logical type", AvroRuntimeException.class, "Can't overwrite property: scale",
() -> {
LogicalTypes.decimal(9, 2).addToSchema(schema);
return null;
});
assertEquals(LogicalTypes.decimal(9), LogicalTypes.fromSchemaIgnoreInvalid(schema),
"First logical type should still be set on schema");
}
@Test
void decimalDefaultScale() {
Schema schema = Schema.createFixed("aDecimal", null, null, 4);
// 4 bytes can hold up to 9 digits of precision
LogicalTypes.decimal(9).addToSchema(schema);
assertEquals(0, ((LogicalTypes.Decimal) LogicalTypes.fromSchemaIgnoreInvalid(schema)).getScale(),
"Scale should be a 0");
}
@Test
void fixedDecimalToFromJson() {
Schema schema = Schema.createFixed("aDecimal", null, null, 4);
LogicalTypes.decimal(9, 2).addToSchema(schema);
Schema parsed = new Schema.Parser().parse(schema.toString(true));
assertEquals(schema, parsed, "Constructed and parsed schemas should match");
}
@Test
void bytesDecimalToFromJson() {
Schema schema = Schema.create(Schema.Type.BYTES);
LogicalTypes.decimal(9, 2).addToSchema(schema);
Schema parsed = new Schema.Parser().parse(schema.toString(true));
assertEquals(schema, parsed, "Constructed and parsed schemas should match");
}
@Test
void uuidExtendsString() {
Schema uuidSchema = LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.STRING));
assertEquals(LogicalTypes.uuid(), uuidSchema.getLogicalType());
assertThrows("UUID requires a string", IllegalArgumentException.class,
"Uuid can only be used with an underlying string type",
() -> LogicalTypes.uuid().addToSchema(Schema.create(Schema.Type.INT)));
}
@Test
void durationExtendsFixed12() {
Schema durationSchema = LogicalTypes.duration().addToSchema(Schema.createFixed("f", null, null, 12));
assertEquals(LogicalTypes.duration(), durationSchema.getLogicalType());
assertThrows("Duration requires a fixed(12)", IllegalArgumentException.class,
"Duration can only be used with an underlying fixed type of size 12.",
() -> LogicalTypes.duration().addToSchema(Schema.create(Schema.Type.INT)));
assertThrows("Duration requires a fixed(12)", IllegalArgumentException.class,
"Duration can only be used with an underlying fixed type of size 12.",
() -> LogicalTypes.duration().addToSchema(Schema.createFixed("wrong", null, null, 42)));
}
@Test
void logicalTypeEquals() {
LogicalTypes.Decimal decimal90 = LogicalTypes.decimal(9);
LogicalTypes.Decimal decimal80 = LogicalTypes.decimal(8);
LogicalTypes.Decimal decimal92 = LogicalTypes.decimal(9, 2);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 0), decimal90);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(8, 0), decimal80);
assertEqualsTrue("Same decimal", LogicalTypes.decimal(9, 2), decimal92);
assertEqualsFalse("Different logical type", LogicalTypes.uuid(), decimal90);
assertEqualsFalse("Different precision", decimal90, decimal80);
assertEqualsFalse("Different scale", decimal90, decimal92);
}
@Test
void logicalTypeInSchemaEquals() {
Schema schema1 = Schema.createFixed("aDecimal", null, null, 4);
Schema schema2 = Schema.createFixed("aDecimal", null, null, 4);
Schema schema3 = Schema.createFixed("aDecimal", null, null, 4);
assertNotSame(schema1, schema2);
assertNotSame(schema1, schema3);
assertEqualsTrue("No logical types", schema1, schema2);
assertEqualsTrue("No logical types", schema1, schema3);
LogicalTypes.decimal(9).addToSchema(schema1);
assertEqualsFalse("Two has no logical type", schema1, schema2);
LogicalTypes.decimal(9).addToSchema(schema2);
assertEqualsTrue("Same logical types", schema1, schema2);
LogicalTypes.decimal(9, 2).addToSchema(schema3);
assertEqualsFalse("Different logical type", schema1, schema3);
}
@Test
void registerLogicalTypeThrowsIfTypeNameNotProvided() {
assertThrows("Should error if type name was not provided", UnsupportedOperationException.class,
"LogicalTypeFactory TypeName has not been provided", () -> {
LogicalTypes.register(schema -> LogicalTypes.date());
return null;
});
}
@Test
void registerLogicalTypeWithName() {
final LogicalTypes.LogicalTypeFactory factory = new LogicalTypes.LogicalTypeFactory() {
@Override
public LogicalType fromSchema(Schema schema) {
return LogicalTypes.date();
}
@Override
public String getTypeName() {
return "typename";
}
};
LogicalTypes.register("registered", factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("registered", factory));
}
@Test
void registerLogicalTypeWithFactoryName() {
final LogicalTypes.LogicalTypeFactory factory = new LogicalTypes.LogicalTypeFactory() {
@Override
public LogicalType fromSchema(Schema schema) {
return LogicalTypes.date();
}
@Override
public String getTypeName() {
return "factory";
}
};
LogicalTypes.register(factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("factory", factory));
}
@Test
void registerLogicalTypeWithFactoryNameNotProvided() {
final LogicalTypes.LogicalTypeFactory factory = schema -> LogicalTypes.date();
LogicalTypes.register("logicalTypeName", factory);
assertThat(LogicalTypes.getCustomRegisteredTypes(), IsMapContaining.hasEntry("logicalTypeName", factory));
}
@Test
public void testRegisterLogicalTypeFactoryByServiceLoader() {
assertThat(LogicalTypes.getCustomRegisteredTypes(),
IsMapContaining.hasEntry(equalTo("custom"), instanceOf(LogicalTypes.LogicalTypeFactory.class)));
}
public static void assertEqualsTrue(String message, Object o1, Object o2) {
assertEquals(o1, o2, "Should be equal (forward): " + message);
assertEquals(o2, o1, "Should be equal (reverse): " + message);
}
public static void assertEqualsFalse(String message, Object o1, Object o2) {
assertNotEquals(o1, o2, "Should be equal (forward): " + message);
assertNotEquals(o2, o1, "Should be equal (reverse): " + message);
}
/**
* A convenience method to avoid a large number of @Test(expected=...) tests
*
* @param message A String message to describe this assertion
* @param expected An Exception class that the Runnable should throw
* @param containedInMessage A String that should be contained by the thrown
* exception's message
* @param callable A Callable that is expected to throw the exception
*/
public static void assertThrows(String message, Class<? extends Exception> expected, String containedInMessage,
Callable<?> callable) {
try {
callable.call();
fail("No exception was thrown (" + message + "), expected: " + expected.getName());
} catch (Exception actual) {
assertEquals(expected, actual.getClass(), message);
assertTrue(actual.getMessage().contains(containedInMessage),
"Expected exception message (" + containedInMessage + ") missing: " + actual.getMessage());
}
}
}
| 7,121 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileConcat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.util.RandomData;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
public class TestDataFileConcat {
private static final Logger LOG = LoggerFactory.getLogger(TestDataFileConcat.class);
@TempDir
public File DIR;
public static Stream<Arguments> codecs() {
return Stream.of(Arguments.of(null, null, false), Arguments.of(null, null, true),
Arguments.of(CodecFactory.deflateCodec(1), CodecFactory.deflateCodec(6), false),
Arguments.of(CodecFactory.deflateCodec(1), CodecFactory.deflateCodec(6), true),
Arguments.of(CodecFactory.deflateCodec(3), CodecFactory.nullCodec(), false),
Arguments.of(CodecFactory.nullCodec(), CodecFactory.deflateCodec(6), false),
Arguments.of(CodecFactory.xzCodec(1), CodecFactory.xzCodec(2), false),
Arguments.of(CodecFactory.xzCodec(1), CodecFactory.xzCodec(2), true),
Arguments.of(CodecFactory.xzCodec(2), CodecFactory.nullCodec(), false),
Arguments.of(CodecFactory.nullCodec(), CodecFactory.xzCodec(2), false));
}
private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "200"));
private static final boolean VALIDATE = !"false".equals(System.getProperty("test.validate", "true"));
private static final long SEED = System.currentTimeMillis();
private static final String SCHEMA_JSON = "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+ "{\"name\":\"stringField\", \"type\":\"string\"}" + "," + "{\"name\":\"longField\", \"type\":\"long\"}" + "]}";
private static final Schema SCHEMA = new Schema.Parser().parse(SCHEMA_JSON);
private File makeFile(String name) {
return new File(DIR, "test-" + name + ".avro");
}
@ParameterizedTest
@MethodSource("codecs")
void concatenateFiles(CodecFactory codec, CodecFactory codec2, boolean recompress) throws IOException {
System.out.println("SEED = " + SEED);
System.out.println("COUNT = " + COUNT);
for (int k = 0; k < 5; k++) {
int syncInterval = 460 + k;
RandomData data1 = new RandomData(SCHEMA, COUNT, SEED);
RandomData data2 = new RandomData(SCHEMA, COUNT, SEED + 1);
File file1 = makeFile((codec == null ? "null" : codec.toString()) + "-A");
File file2 = makeFile((codec2 == null ? "null" : codec2.toString()) + "-B");
DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(syncInterval);
if (codec != null) {
writer.setCodec(codec);
}
writer.create(SCHEMA, file1);
try {
for (Object datum : data1) {
writer.append(datum);
}
} finally {
writer.close();
}
DataFileWriter<Object> writer2 = new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(syncInterval);
if (codec2 != null) {
writer2.setCodec(codec2);
}
writer2.create(SCHEMA, file2);
try {
for (Object datum : data2) {
writer2.append(datum);
}
} finally {
writer2.close();
}
DataFileWriter<Object> concatinto = new DataFileWriter<>(new GenericDatumWriter<>())
.setSyncInterval(syncInterval);
concatinto.appendTo(file1);
DataFileReader<Object> concatfrom = new DataFileReader<>(file2, new GenericDatumReader<>());
concatinto.appendAllFrom(concatfrom, recompress);
concatinto.close();
concatfrom.close();
concatfrom = new DataFileReader<>(file2, new GenericDatumReader<>());
DataFileReader<Object> concat = new DataFileReader<>(file1, new GenericDatumReader<>());
int count = 0;
try {
Object datum = null;
if (VALIDATE) {
for (Object expected : data1) {
datum = concat.next(datum);
assertEquals("at " + count++, expected, datum);
}
for (Object expected : data2) {
datum = concatfrom.next(datum);
assertEquals("at " + count++, expected, datum);
}
for (Object expected : data2) {
datum = concat.next(datum);
assertEquals("at " + count++, expected, datum);
}
} else {
for (int i = 0; i < COUNT * 2; i++) {
datum = concat.next(datum);
}
}
} finally {
if (count != 3 * COUNT) {
System.out.println(count + " " + k);
}
concat.close();
concatfrom.close();
}
}
}
}
| 7,122 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/SchemaNameValidatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
class SchemaNameValidatorTest {
@ParameterizedTest
@MethodSource("data")
void validator(Schema.NameValidator validator, String input, boolean expectedResult) {
Schema.NameValidator.Result result = validator.validate(input);
Assertions.assertEquals(expectedResult, result.isOK(), result.getErrors());
}
static Stream<Arguments> data() {
return Stream.of(Arguments.of(Schema.NameValidator.UTF_VALIDATOR, null, false), // null not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, null, false), // null not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "", false), // empty not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "", false), // empty not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "Hello world", false), // space not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "Hello world", false), // space not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "H&", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "H&", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "H=", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "H=", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "H]", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "H]", false), // non letter or digit not accepted
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "Hello_world", true),
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "Hello_world", true),
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "éàçô", true), // Accept accent
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "éàçô", false), // Not Accept accent
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "5éàçô", false), // can't start with number
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "5éàçô", false), // can't start with number
Arguments.of(Schema.NameValidator.UTF_VALIDATOR, "_Hello_world", true),
Arguments.of(Schema.NameValidator.STRICT_VALIDATOR, "_Hello_world", true));
}
}
| 7,123 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestUnionError.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestUnionError {
@Test
void unionErrorMessage() throws IOException {
String writerSchemaJson = " {\n" + " \"type\" : \"record\",\n"
+ " \"name\" : \"C\",\n" + " \"fields\" : [ {\n"
+ " \"name\" : \"c\",\n" + " \"type\" : [ {\n"
+ " \"type\" : \"record\",\n" + " \"name\" : \"A\",\n"
+ " \"fields\" : [ {\n" + " \"name\" : \"amount\",\n"
+ " \"type\" : \"int\"\n" + " } ]\n" + " }, {\n"
+ " \"type\" : \"record\",\n" + " \"name\" : \"B\",\n"
+ " \"fields\" : [ {\n" + " \"name\" : \"amount1\",\n"
+ " \"type\" : \"int\"\n" + " } ]\n" + " } ]\n"
+ " } ]\n" + " }";
Schema writerSchema = new Schema.Parser().parse(writerSchemaJson);
String readerSchemaJson = " {\n" + " \"type\" : \"record\",\n" + " \"name\" : \"C1\",\n"
+ " \"fields\" : [ {\n" + " \"name\" : \"c\",\n"
+ " \"type\" : [ {\n" + " \"type\" : \"record\",\n"
+ " \"name\" : \"A\",\n" + " \"fields\" : [ {\n"
+ " \"name\" : \"amount\",\n" + " \"type\" : \"int\"\n"
+ " } ]\n" + " }, \"float\" ]\n" + " } ]\n" + " }";
Schema readerSchema = new Schema.Parser().parse(readerSchemaJson);
List<Schema> unionSchemas = writerSchema.getField("c").schema().getTypes();
GenericRecord r = new GenericData.Record(writerSchema);
GenericRecord b = new GenericData.Record(unionSchemas.get(1));
b.put("amount1", 12);
r.put("c", b);
ByteArrayOutputStream outs = new ByteArrayOutputStream();
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(writerSchema);
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(outs, null);
datumWriter.write(r, encoder);
encoder.flush();
InputStream ins = new ByteArrayInputStream(outs.toByteArray());
BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(ins, null);
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>(writerSchema, readerSchema);
AvroTypeException avroException = assertThrows(AvroTypeException.class, () -> datumReader.read(null, decoder));
assertEquals("Found B, expecting union[A, float]", avroException.getMessage());
}
}
| 7,124 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileReflect.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.SeekableFileInput;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestDataFileReflect {
@TempDir
public File DIR;
/*
* Test that using multiple schemas in a file works doing a union before writing
* any records.
*/
@Test
void multiReflectWithUnionBeforeWriting() throws IOException {
File file = new File(DIR.getPath(), "testMultiReflectWithUnionBeforeWriting.avro");
CheckList<Object> check = new CheckList<>();
try (FileOutputStream fos = new FileOutputStream(file)) {
ReflectData reflectData = ReflectData.get();
List<Schema> schemas = Arrays.asList(reflectData.getSchema(FooRecord.class),
reflectData.getSchema(BarRecord.class));
Schema union = Schema.createUnion(schemas);
try (DataFileWriter<Object> writer = new DataFileWriter<>(new ReflectDatumWriter<>(union))) {
writer.create(union, fos);
// test writing to a file
write(writer, new BarRecord("One beer please"), check);
write(writer, new FooRecord(10), check);
write(writer, new BarRecord("Two beers please"), check);
write(writer, new FooRecord(20), check);
}
}
// new File(DIR.getRoot().getPath(), "test.avro");
ReflectDatumReader<Object> din = new ReflectDatumReader<>();
SeekableFileInput sin = new SeekableFileInput(file);
try (DataFileReader<Object> reader = new DataFileReader<>(sin, din)) {
int count = 0;
for (Object datum : reader) {
check.assertEquals(datum, count++);
}
assertEquals(count, check.size());
}
}
/*
* Test that writing a record with a field that is null.
*/
@Test
void testNull() throws IOException {
File file = new File(DIR.getPath(), "testNull.avro");
CheckList<BarRecord> check = new CheckList<>();
try (FileOutputStream fos = new FileOutputStream(file)) {
ReflectData reflectData = ReflectData.AllowNull.get();
Schema schema = reflectData.getSchema(BarRecord.class);
try (DataFileWriter<BarRecord> writer = new DataFileWriter<>(
new ReflectDatumWriter<>(BarRecord.class, reflectData))) {
writer.create(schema, fos);
// test writing to a file
write(writer, new BarRecord("One beer please"), check);
// null record here, fails when using the default reflectData instance
write(writer, new BarRecord(), check);
write(writer, new BarRecord("Two beers please"), check);
}
}
ReflectDatumReader<BarRecord> din = new ReflectDatumReader<>();
try (SeekableFileInput sin = new SeekableFileInput(file)) {
try (DataFileReader<BarRecord> reader = new DataFileReader<>(sin, din)) {
int count = 0;
for (BarRecord datum : reader) {
check.assertEquals(datum, count++);
}
assertEquals(count, check.size());
}
}
}
@Test
void testNew() throws IOException {
ByteBuffer payload = ByteBuffer.allocateDirect(8 * 1024);
for (int i = 0; i < 500; i++) {
payload.putInt(1);
}
payload.flip();
ByteBufferRecord bbr = new ByteBufferRecord();
bbr.setPayload(payload);
bbr.setTp(TypeEnum.b);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ReflectDatumWriter<ByteBufferRecord> writer = new ReflectDatumWriter<>(ByteBufferRecord.class);
BinaryEncoder avroEncoder = EncoderFactory.get().blockingBinaryEncoder(outputStream, null);
writer.write(bbr, avroEncoder);
avroEncoder.flush();
byte[] bytes = outputStream.toByteArray();
ByteArrayInputStream inputStream = new ByteArrayInputStream(bytes);
ReflectDatumReader<ByteBufferRecord> datumReader = new ReflectDatumReader<>(ByteBufferRecord.class);
BinaryDecoder avroDecoder = DecoderFactory.get().binaryDecoder(inputStream, null);
ByteBufferRecord deserialized = datumReader.read(null, avroDecoder);
assertEquals(bbr, deserialized);
}
/*
* Test that writing out and reading in a nested class works
*/
@Test
void nestedClass() throws IOException {
File file = new File(DIR.getPath(), "testNull.avro");
CheckList<BazRecord> check = new CheckList<>();
try (FileOutputStream fos = new FileOutputStream(file)) {
Schema schema = ReflectData.get().getSchema(BazRecord.class);
try (DataFileWriter<BazRecord> writer = new DataFileWriter<>(new ReflectDatumWriter<>(schema))) {
writer.create(schema, fos);
// test writing to a file
write(writer, new BazRecord(10), check);
write(writer, new BazRecord(20), check);
}
}
ReflectDatumReader<BazRecord> din = new ReflectDatumReader<>();
try (SeekableFileInput sin = new SeekableFileInput(file)) {
try (DataFileReader<BazRecord> reader = new DataFileReader<>(sin, din)) {
int count = 0;
for (BazRecord datum : reader) {
check.assertEquals(datum, count++);
}
assertEquals(count, check.size());
}
}
}
private <T> void write(DataFileWriter<T> writer, T o, CheckList<T> l) throws IOException {
writer.append(l.addAndReturn(o));
}
@SuppressWarnings("serial")
private static class CheckList<T> extends ArrayList<T> {
T addAndReturn(T check) {
add(check);
return check;
}
void assertEquals(Object toCheck, int i) {
assertNotNull(toCheck);
Object o = get(i);
assertNotNull(o);
Assertions.assertEquals(toCheck, o);
}
}
private static class BazRecord {
private int nbr;
@SuppressWarnings("unused")
public BazRecord() {
}
public BazRecord(int nbr) {
this.nbr = nbr;
}
@Override
public boolean equals(Object that) {
if (that instanceof BazRecord) {
return this.nbr == ((BazRecord) that).nbr;
}
return false;
}
@Override
public int hashCode() {
return nbr;
}
@Override
public String toString() {
return BazRecord.class.getSimpleName() + "{cnt=" + nbr + "}";
}
}
}
| 7,125 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestUnionSelfReference.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.slf4j.LoggerFactory;
import org.apache.avro.Schema.Field;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.avro.Schema.Type;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
public class TestUnionSelfReference {
/** The logger for TestUnionSelfReference */
@SuppressWarnings("unused")
private static final Logger LOG = LoggerFactory.getLogger(TestUnionSelfReference.class);
private static final String SIMPLE_BINARY_TREE = "{" + " \"namespace\": \"tree\"," + " \"type\": \"record\","
+ " \"name\": \"Node\"," + " \"fields\": [" + " {" + " \"name\": \"left\","
+ " \"type\": [" + " \"null\"," + " {" + " \"type\": \"Node\"" + " }"
+ " ]," + " \"default\": null" + " }," + " {" + " \"name\": \"right\","
+ " \"type\": [" + " \"null\"," + " {" + " \"type\": \"Node\"" + " }"
+ " ]," + " \"default\": null" + " }" + " ]" + " }";
private static final String THREE_TYPE_UNION = "{" + " \"namespace\": \"tree\"," + " \"type\": \"record\","
+ " \"name\": \"Node\"," + " \"fields\": [" + " {" + " \"name\": \"left\","
+ " \"type\": [" + " \"null\"," + " \"string\"," + " {"
+ " \"type\": \"Node\"" + " }" + " ]," + " \"default\": null" + " },"
+ " {" + " \"name\": \"right\"," + " \"type\": [" + " \"null\","
+ " \"string\"," + " {" + " \"type\": \"Node\"" + " }" + " ],"
+ " \"default\": null" + " }" + " ]" + " }";
@Test
void selfReferenceInUnion() {
Schema schema = new Schema.Parser().parse(SIMPLE_BINARY_TREE);
Field leftField = schema.getField("left");
assertEquals(JsonProperties.NULL_VALUE, leftField.defaultVal());
final Schema leftFieldSchema = leftField.schema();
assertEquals(Type.UNION, leftFieldSchema.getType());
assertEquals("null", leftFieldSchema.getTypes().get(0).getName());
assertEquals("Node", leftFieldSchema.getTypes().get(1).getName());
Field rightField = schema.getField("right");
assertEquals(JsonProperties.NULL_VALUE, rightField.defaultVal());
final Schema rightFieldSchema = rightField.schema();
assertEquals(Type.UNION, rightFieldSchema.getType());
assertEquals("null", rightFieldSchema.getTypes().get(0).getName());
assertEquals("Node", rightFieldSchema.getTypes().get(1).getName());
}
@Test
void selfReferenceInThreeUnion() {
Schema schema = new Schema.Parser().parse(THREE_TYPE_UNION);
Field leftField = schema.getField("left");
assertEquals(JsonProperties.NULL_VALUE, leftField.defaultVal());
final Schema leftFieldSchema = leftField.schema();
assertEquals(Type.UNION, leftFieldSchema.getType());
assertEquals("null", leftFieldSchema.getTypes().get(0).getName());
assertEquals("string", leftFieldSchema.getTypes().get(1).getName());
assertEquals("Node", leftFieldSchema.getTypes().get(2).getName());
Field rightField = schema.getField("right");
assertEquals(JsonProperties.NULL_VALUE, rightField.defaultVal());
final Schema rightFieldSchema = rightField.schema();
assertEquals(Type.UNION, rightFieldSchema.getType());
assertEquals("null", rightFieldSchema.getTypes().get(0).getName());
assertEquals("string", rightFieldSchema.getTypes().get(1).getName());
assertEquals("Node", rightFieldSchema.getTypes().get(2).getName());
}
}
| 7,126 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/GenerateBlockingData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.RandomData;
/**
* Generates file with objects of a specific schema(that doesn't contain nesting
* of arrays and maps) with random data. This is only for testing. Generated
* file contains the count of objects of the specified schema followed by
* objects serialized using BlockingBinaryEncoder. No other metadata is written
* to the file. See interoptests.py for more details(interoptests.py reads the
* file generated here and validates the contents).
*/
public class GenerateBlockingData {
private static final int SYNC_INTERVAL = 1000;
private static ByteArrayOutputStream buffer = new ByteArrayOutputStream(2 * SYNC_INTERVAL);
private static EncoderFactory factory = EncoderFactory.get();
private static Encoder bufOut = EncoderFactory.get().blockingBinaryEncoder(buffer, null);
private static int blockCount;
private static void writeBlock(Encoder vout, FileOutputStream out) throws IOException {
vout.writeLong(blockCount);
bufOut.flush();
buffer.writeTo(out);
buffer.reset();
blockCount = 0;
}
public static void main(String[] args) throws Exception {
if (args.length != 3) {
System.out.println("Usage: GenerateBlockingData <schemafile> <outputfile> <count>");
System.exit(-1);
}
Schema sch = new Schema.Parser().parse(new File(args[0]));
File outputFile = new File(args[1]);
int numObjects = Integer.parseInt(args[2]);
FileOutputStream out = new FileOutputStream(outputFile, false);
DatumWriter<Object> dout = new GenericDatumWriter<>();
dout.setSchema(sch);
Encoder vout = factory.directBinaryEncoder(out, null);
vout.writeLong(numObjects); // metadata:the count of objects in the file
for (Object datum : new RandomData(sch, numObjects)) {
dout.write(datum, bufOut);
blockCount++;
if (buffer.size() >= SYNC_INTERVAL) {
writeBlock(vout, out);
}
}
if (blockCount > 0) {
writeBlock(vout, out);
}
out.flush();
out.close();
}
}
| 7,127 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestCircularReferences.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestCircularReferences {
@TempDir
public File temp;
public static class Reference extends LogicalType {
private static final String REFERENCE = "reference";
private static final String REF_FIELD_NAME = "ref-field-name";
private final String refFieldName;
public Reference(String refFieldName) {
super(REFERENCE);
this.refFieldName = refFieldName;
}
public Reference(Schema schema) {
super(REFERENCE);
this.refFieldName = schema.getProp(REF_FIELD_NAME);
}
@Override
public Schema addToSchema(Schema schema) {
super.addToSchema(schema);
schema.addProp(REF_FIELD_NAME, refFieldName);
return schema;
}
@Override
public String getName() {
return REFERENCE;
}
public String getRefFieldName() {
return refFieldName;
}
@Override
public void validate(Schema schema) {
super.validate(schema);
if (schema.getField(refFieldName) == null) {
throw new IllegalArgumentException("Invalid field name for reference field: " + refFieldName);
}
}
}
public static class ReferenceTypeFactory implements LogicalTypes.LogicalTypeFactory {
@Override
public LogicalType fromSchema(Schema schema) {
return new Reference(schema);
}
@Override
public String getTypeName() {
return Reference.REFERENCE;
}
}
public static class Referenceable extends LogicalType {
private static final String REFERENCEABLE = "referenceable";
private static final String ID_FIELD_NAME = "id-field-name";
private final String idFieldName;
public Referenceable(String idFieldName) {
super(REFERENCEABLE);
this.idFieldName = idFieldName;
}
public Referenceable(Schema schema) {
super(REFERENCEABLE);
this.idFieldName = schema.getProp(ID_FIELD_NAME);
}
@Override
public Schema addToSchema(Schema schema) {
super.addToSchema(schema);
schema.addProp(ID_FIELD_NAME, idFieldName);
return schema;
}
@Override
public String getName() {
return REFERENCEABLE;
}
public String getIdFieldName() {
return idFieldName;
}
@Override
public void validate(Schema schema) {
super.validate(schema);
Schema.Field idField = schema.getField(idFieldName);
if (idField == null || idField.schema().getType() != Schema.Type.LONG) {
throw new IllegalArgumentException("Invalid ID field: " + idFieldName + ": " + idField);
}
}
}
public static class ReferenceableTypeFactory implements LogicalTypes.LogicalTypeFactory {
@Override
public LogicalType fromSchema(Schema schema) {
return new Referenceable(schema);
}
@Override
public String getTypeName() {
return Referenceable.REFERENCEABLE;
}
}
@BeforeAll
public static void addReferenceTypes() {
LogicalTypes.register(Referenceable.REFERENCEABLE, new ReferenceableTypeFactory());
LogicalTypes.register(Reference.REFERENCE, new ReferenceTypeFactory());
}
public static class ReferenceManager {
private interface Callback {
void set(Object referenceable);
}
private final Map<Long, Object> references = new HashMap<>();
private final Map<Object, Long> ids = new IdentityHashMap<>();
private final Map<Long, List<Callback>> callbacksById = new HashMap<>();
private final ReferenceableTracker tracker = new ReferenceableTracker();
private final ReferenceHandler handler = new ReferenceHandler();
public ReferenceableTracker getTracker() {
return tracker;
}
public ReferenceHandler getHandler() {
return handler;
}
public class ReferenceableTracker extends Conversion<IndexedRecord> {
@Override
@SuppressWarnings("unchecked")
public Class<IndexedRecord> getConvertedType() {
return (Class) Record.class;
}
@Override
public String getLogicalTypeName() {
return Referenceable.REFERENCEABLE;
}
@Override
public IndexedRecord fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
// read side
long id = getId(value, schema);
// keep track of this for later references
references.put(id, value);
// call any callbacks waiting to resolve this id
List<Callback> callbacks = callbacksById.get(id);
for (Callback callback : callbacks) {
callback.set(value);
}
return value;
}
@Override
public IndexedRecord toRecord(IndexedRecord value, Schema schema, LogicalType type) {
// write side
long id = getId(value, schema);
// keep track of this for later references
// references.put(id, value);
ids.put(value, id);
return value;
}
private long getId(IndexedRecord referenceable, Schema schema) {
Referenceable info = (Referenceable) schema.getLogicalType();
int idField = schema.getField(info.getIdFieldName()).pos();
return (Long) referenceable.get(idField);
}
}
public class ReferenceHandler extends Conversion<IndexedRecord> {
@Override
@SuppressWarnings("unchecked")
public Class<IndexedRecord> getConvertedType() {
return (Class) Record.class;
}
@Override
public String getLogicalTypeName() {
return Reference.REFERENCE;
}
@Override
public IndexedRecord fromRecord(final IndexedRecord record, Schema schema, LogicalType type) {
// read side: resolve the record or save a callback
final Schema.Field refField = schema.getField(((Reference) type).getRefFieldName());
Long id = (Long) record.get(refField.pos());
if (id != null) {
if (references.containsKey(id)) {
record.put(refField.pos(), references.get(id));
} else {
List<Callback> callbacks = callbacksById.computeIfAbsent(id, k -> new ArrayList<>());
// add a callback to resolve this reference when the id is available
callbacks.add(referenceable -> record.put(refField.pos(), referenceable));
}
}
return record;
}
@Override
public IndexedRecord toRecord(IndexedRecord record, Schema schema, LogicalType type) {
// write side: replace a referenced field with its id
Schema.Field refField = schema.getField(((Reference) type).getRefFieldName());
IndexedRecord referenced = (IndexedRecord) record.get(refField.pos());
if (referenced == null) {
return record;
}
// hijack the field to return the id instead of the ref
return new HijackingIndexedRecord(record, refField.pos(), ids.get(referenced));
}
}
private static class HijackingIndexedRecord implements IndexedRecord {
private final IndexedRecord wrapped;
private final int index;
private final Object data;
public HijackingIndexedRecord(IndexedRecord wrapped, int index, Object data) {
this.wrapped = wrapped;
this.index = index;
this.data = data;
}
@Override
public void put(int i, Object v) {
throw new RuntimeException("[BUG] This is a read-only class.");
}
@Override
public Object get(int i) {
if (i == index) {
return data;
}
return wrapped.get(i);
}
@Override
public Schema getSchema() {
return wrapped.getSchema();
}
}
}
@Test
void test() throws IOException {
ReferenceManager manager = new ReferenceManager();
GenericData model = new GenericData();
model.addLogicalTypeConversion(manager.getTracker());
model.addLogicalTypeConversion(manager.getHandler());
Schema parentSchema = Schema.createRecord("Parent", null, null, false);
Schema parentRefSchema = Schema.createUnion(Schema.create(Schema.Type.NULL), Schema.create(Schema.Type.LONG),
parentSchema);
Reference parentRef = new Reference("parent");
List<Schema.Field> childFields = new ArrayList<>();
childFields.add(new Schema.Field("c", Schema.create(Schema.Type.STRING)));
childFields.add(new Schema.Field("parent", parentRefSchema));
Schema childSchema = parentRef.addToSchema(Schema.createRecord("Child", null, null, false, childFields));
List<Schema.Field> parentFields = new ArrayList<>();
parentFields.add(new Schema.Field("id", Schema.create(Schema.Type.LONG)));
parentFields.add(new Schema.Field("p", Schema.create(Schema.Type.STRING)));
parentFields.add(new Schema.Field("child", childSchema));
parentSchema.setFields(parentFields);
Referenceable idRef = new Referenceable("id");
Schema schema = idRef.addToSchema(parentSchema);
System.out.println("Schema: " + schema.toString(true));
Record parent = new Record(schema);
parent.put("id", 1L);
parent.put("p", "parent data!");
Record child = new Record(childSchema);
child.put("c", "child data!");
child.put("parent", parent);
parent.put("child", child);
// serialization round trip
File data = write(model, schema, parent);
List<Record> records = read(model, schema, data);
Record actual = records.get(0);
// because the record is a recursive structure, equals won't work
assertEquals(1L, actual.get("id"), "Should correctly read back the parent id");
assertEquals(new Utf8("parent data!"), actual.get("p"), "Should correctly read back the parent data");
Record actualChild = (Record) actual.get("child");
assertEquals(new Utf8("child data!"), actualChild.get("c"), "Should correctly read back the child data");
Object childParent = actualChild.get("parent");
assertTrue(childParent instanceof Record, "Should have a parent Record object");
Record childParentRecord = (Record) actualChild.get("parent");
assertEquals(1L, childParentRecord.get("id"), "Should have the right parent id");
assertEquals(new Utf8("parent data!"), childParentRecord.get("p"), "Should have the right parent data");
}
private <D> List<D> read(GenericData model, Schema schema, File file) throws IOException {
DatumReader<D> reader = newReader(model, schema);
List<D> data = new ArrayList<>();
try (FileReader<D> fileReader = new DataFileReader<>(file, reader)) {
for (D datum : fileReader) {
data.add(datum);
}
}
return data;
}
@SuppressWarnings("unchecked")
private <D> DatumReader<D> newReader(GenericData model, Schema schema) {
return model.createDatumReader(schema);
}
@SuppressWarnings("unchecked")
private <D> File write(GenericData model, Schema schema, D... data) throws IOException {
File file = File.createTempFile("junit", null, temp);
DatumWriter<D> writer = model.createDatumWriter(schema);
try (DataFileWriter<D> fileWriter = new DataFileWriter<>(writer)) {
fileWriter.create(schema, file);
for (D datum : data) {
fileWriter.append(datum);
}
}
return file;
}
}
| 7,128 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/ByteBufferRecord.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.nio.ByteBuffer;
public class ByteBufferRecord {
private ByteBuffer payload;
private TypeEnum tp;
public ByteBufferRecord() {
}
public ByteBuffer getPayload() {
return payload;
}
public void setPayload(ByteBuffer payload) {
this.payload = payload;
}
public TypeEnum getTp() {
return tp;
}
public void setTp(TypeEnum tp) {
this.tp = tp;
}
@Override
public boolean equals(Object ob) {
if (this == ob)
return true;
if (!(ob instanceof ByteBufferRecord))
return false;
ByteBufferRecord that = (ByteBufferRecord) ob;
if (this.getPayload() == null)
return that.getPayload() == null;
if (!this.getPayload().equals(that.getPayload()))
return false;
if (this.getTp() == null)
return that.getTp() == null;
return this.getTp().equals(that.getTp());
}
@Override
public int hashCode() {
return this.payload.hashCode();
}
}
| 7,129 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCommons.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.stream.Stream;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestSchemaCommons {
private static final Logger LOG = LoggerFactory.getLogger(TestSchemaCommons.class);
@ParameterizedTest
@MethodSource("sharedFolders")
void runFolder(final File folder) throws IOException {
final File schemaSource = new File(folder, "schema.json");
final File data = new File(folder, "data.avro");
if (!schemaSource.exists()) {
LOG.warn("No 'schema.json' file on folder {}", folder.getPath());
return;
}
final Schema schema = new Schema.Parser().parse(schemaSource);
Assertions.assertNotNull(schema);
if (!data.exists()) {
LOG.warn("No 'data.avro' file on folder {}", folder.getPath());
return;
}
// output file
final String rootTest = Thread.currentThread().getContextClassLoader().getResource(".").getPath();
final File copyData = new File(rootTest, "copy.avro");
// Deserialize from disk
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>(schema);
try (DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(data, datumReader);
DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) {
dataFileWriter.create(schema, copyData);
GenericRecord record = null;
int counter = 0;
while (dataFileReader.hasNext()) {
record = dataFileReader.next();
counter++;
Assertions.assertNotNull(record);
dataFileWriter.append(record);
}
Assertions.assertTrue(counter > 0, "no data in file");
}
}
public static Stream<Arguments> sharedFolders() {
File root = new File("../../../share/test/data/schemas");
return Arrays.stream(root.listFiles(File::isDirectory)).map(Arguments::of);
}
}
| 7,130 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileDeflate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema.Type;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
/** Simple test of DataFileWriter and DataFileStream with deflate codec. */
public class TestDataFileDeflate {
@Test
void writeAndRead() throws IOException {
Schema schema = Schema.create(Type.STRING);
// Write it
DataFileWriter<Utf8> w = new DataFileWriter<>(new GenericDatumWriter<>(schema));
w.setCodec(CodecFactory.deflateCodec(6));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
w.create(schema, baos);
w.append(new Utf8("hello world"));
w.append(new Utf8("hello moon"));
w.sync();
w.append(new Utf8("bye bye world"));
w.append(new Utf8("bye bye moon"));
w.close();
// Read it
try (DataFileStream<Utf8> r = new DataFileStream<>(new ByteArrayInputStream(baos.toByteArray()),
new GenericDatumReader<>(schema))) {
assertEquals("hello world", r.next().toString());
assertEquals("hello moon", r.next().toString());
assertEquals("bye bye world", r.next().toString());
assertEquals("bye bye moon", r.next().toString());
assertFalse(r.hasNext());
}
}
}
| 7,131 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.NullNode;
import com.fasterxml.jackson.databind.node.TextNode;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSchema {
@Test
void splitSchemaBuild() {
Schema s = SchemaBuilder.record("HandshakeRequest").namespace("org.apache.avro.ipc").fields().name("clientProtocol")
.type().optional().stringType().name("meta").type().optional().map().values().bytesType().endRecord();
String schemaString = s.toString();
int mid = schemaString.length() / 2;
Schema parsedStringSchema = new org.apache.avro.Schema.Parser().parse(s.toString());
Schema parsedArrayOfStringSchema = new org.apache.avro.Schema.Parser().parse(schemaString.substring(0, mid),
schemaString.substring(mid));
assertNotNull(parsedStringSchema);
assertNotNull(parsedArrayOfStringSchema);
assertEquals(parsedStringSchema.toString(), parsedArrayOfStringSchema.toString());
}
@Test
void defaultRecordWithDuplicateFieldName() {
String recordName = "name";
Schema schema = Schema.createRecord(recordName, "doc", "namespace", false);
List<Field> fields = new ArrayList<>();
fields.add(new Field("field_name", Schema.create(Type.NULL), null, null));
fields.add(new Field("field_name", Schema.create(Type.INT), null, null));
try {
schema.setFields(fields);
fail("Should not be able to create a record with duplicate field name.");
} catch (AvroRuntimeException are) {
assertTrue(are.getMessage().contains("Duplicate field field_name in record " + recordName));
}
}
@Test
void createUnionVarargs() {
List<Schema> types = new ArrayList<>();
types.add(Schema.create(Type.NULL));
types.add(Schema.create(Type.LONG));
Schema expected = Schema.createUnion(types);
Schema schema = Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.LONG));
assertEquals(expected, schema);
}
@Test
void recordWithNullDoc() {
Schema schema = Schema.createRecord("name", null, "namespace", false);
String schemaString = schema.toString();
assertNotNull(schemaString);
}
@Test
void recordWithNullNamespace() {
Schema schema = Schema.createRecord("name", "doc", null, false);
String schemaString = schema.toString();
assertNotNull(schemaString);
}
@Test
void emptyRecordSchema() {
Schema schema = createDefaultRecord();
String schemaString = schema.toString();
assertNotNull(schemaString);
}
@Test
void parseEmptySchema() {
assertThrows(SchemaParseException.class, () -> {
new Schema.Parser().parse("");
});
}
@Test
void schemaWithFields() {
List<Field> fields = new ArrayList<>();
fields.add(new Field("field_name1", Schema.create(Type.NULL), null, null));
fields.add(new Field("field_name2", Schema.create(Type.INT), null, null));
Schema schema = createDefaultRecord();
schema.setFields(fields);
String schemaString = schema.toString();
assertNotNull(schemaString);
assertEquals(2, schema.getFields().size());
}
@Test
void schemaWithNullFields() {
assertThrows(NullPointerException.class, () -> {
Schema.createRecord("name", "doc", "namespace", false, null);
});
}
@Test
void isUnionOnUnionWithMultipleElements() {
Schema schema = Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.LONG));
assertTrue(schema.isUnion());
}
@Test
void isUnionOnUnionWithOneElement() {
Schema schema = Schema.createUnion(Schema.create(Type.LONG));
assertTrue(schema.isUnion());
}
@Test
void isUnionOnRecord() {
Schema schema = createDefaultRecord();
assertFalse(schema.isUnion());
}
@Test
void isUnionOnArray() {
Schema schema = Schema.createArray(Schema.create(Type.LONG));
assertFalse(schema.isUnion());
}
@Test
void isUnionOnEnum() {
Schema schema = Schema.createEnum("name", "doc", "namespace", Collections.singletonList("value"));
assertFalse(schema.isUnion());
}
@Test
void isUnionOnFixed() {
Schema schema = Schema.createFixed("name", "doc", "space", 10);
assertFalse(schema.isUnion());
}
@Test
void isUnionOnMap() {
Schema schema = Schema.createMap(Schema.create(Type.LONG));
assertFalse(schema.isUnion());
}
@Test
void isNullableOnUnionWithNull() {
Schema schema = Schema.createUnion(Schema.create(Type.NULL), Schema.create(Type.LONG));
assertTrue(schema.isNullable());
}
@Test
void isNullableOnUnionWithoutNull() {
Schema schema = Schema.createUnion(Schema.create(Type.LONG));
assertFalse(schema.isNullable());
}
@Test
void isNullableOnRecord() {
Schema schema = createDefaultRecord();
assertFalse(schema.isNullable());
}
private Schema createDefaultRecord() {
return Schema.createRecord("name", "doc", "namespace", false);
}
@Test
void serialization() throws IOException, ClassNotFoundException {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
InputStream jsonSchema = getClass().getResourceAsStream("/SchemaBuilder.avsc")) {
Schema payload = new Schema.Parser().parse(jsonSchema);
oos.writeObject(payload);
try (ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bis)) {
Schema sp = (Schema) ois.readObject();
assertEquals(payload, sp);
}
}
}
@Test
void reconstructSchemaStringWithoutInlinedChildReference() {
String child = "{\"type\":\"record\"," + "\"name\":\"Child\"," + "\"namespace\":\"org.apache.avro.nested\","
+ "\"fields\":" + "[{\"name\":\"childField\",\"type\":\"string\"}]}";
String parent = "{\"type\":\"record\"," + "\"name\":\"Parent\"," + "\"namespace\":\"org.apache.avro.nested\","
+ "\"fields\":" + "[{\"name\":\"child\",\"type\":\"Child\"}]}";
Schema.Parser parser = new Schema.Parser();
Schema childSchema = parser.parse(child);
Schema parentSchema = parser.parse(parent);
String parentWithoutInlinedChildReference = parentSchema.toString(Collections.singleton(childSchema), false);
// The generated string should be the same as the original parent
// schema string that did not have the child schema inlined.
assertEquals(parent, parentWithoutInlinedChildReference);
}
@Test
void intDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", 1);
assertTrue(field.hasDefaultValue());
assertEquals(1, field.defaultVal());
assertEquals(1, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", Integer.MIN_VALUE);
assertTrue(field.hasDefaultValue());
assertEquals(Integer.MIN_VALUE, field.defaultVal());
assertEquals(Integer.MIN_VALUE, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", Integer.MAX_VALUE);
assertTrue(field.hasDefaultValue());
assertEquals(Integer.MAX_VALUE, field.defaultVal());
assertEquals(Integer.MAX_VALUE, GenericData.get().getDefaultValue(field));
}
@Test
void validLongAsIntDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", 1L);
assertTrue(field.hasDefaultValue());
assertEquals(1, field.defaultVal());
assertEquals(1, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", Long.valueOf(Integer.MIN_VALUE));
assertTrue(field.hasDefaultValue());
assertEquals(Integer.MIN_VALUE, field.defaultVal());
assertEquals(Integer.MIN_VALUE, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", Long.valueOf(Integer.MAX_VALUE));
assertTrue(field.hasDefaultValue());
assertEquals(Integer.MAX_VALUE, field.defaultVal());
assertEquals(Integer.MAX_VALUE, GenericData.get().getDefaultValue(field));
}
@Test
void invalidLongAsIntDefaultValue() {
assertThrows(AvroTypeException.class, () -> {
new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", Integer.MAX_VALUE + 1L);
});
}
@Test
void doubleAsIntDefaultValue() {
assertThrows(AvroTypeException.class, () -> {
new Schema.Field("myField", Schema.create(Schema.Type.INT), "doc", 1.0);
});
}
@Test
void longDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", 1L);
assertTrue(field.hasDefaultValue());
assertEquals(1L, field.defaultVal());
assertEquals(1L, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", Long.MIN_VALUE);
assertTrue(field.hasDefaultValue());
assertEquals(Long.MIN_VALUE, field.defaultVal());
assertEquals(Long.MIN_VALUE, GenericData.get().getDefaultValue(field));
field = new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", Long.MAX_VALUE);
assertTrue(field.hasDefaultValue());
assertEquals(Long.MAX_VALUE, field.defaultVal());
assertEquals(Long.MAX_VALUE, GenericData.get().getDefaultValue(field));
}
@Test
void intAsLongDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", 1);
assertTrue(field.hasDefaultValue());
assertEquals(1L, field.defaultVal());
assertEquals(1L, GenericData.get().getDefaultValue(field));
}
@Test
void doubleAsLongDefaultValue() {
assertThrows(AvroTypeException.class, () -> {
new Schema.Field("myField", Schema.create(Schema.Type.LONG), "doc", 1.0);
});
}
@Test
void doubleDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.DOUBLE), "doc", 1.0);
assertTrue(field.hasDefaultValue());
assertEquals(1.0d, field.defaultVal());
assertEquals(1.0d, GenericData.get().getDefaultValue(field));
}
@Test
void intAsDoubleDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.DOUBLE), "doc", 1);
assertTrue(field.hasDefaultValue());
assertEquals(1.0d, field.defaultVal());
assertEquals(1.0d, GenericData.get().getDefaultValue(field));
}
@Test
void longAsDoubleDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.DOUBLE), "doc", 1L);
assertTrue(field.hasDefaultValue());
assertEquals(1.0d, field.defaultVal());
assertEquals(1.0d, GenericData.get().getDefaultValue(field));
}
@Test
void floatAsDoubleDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.DOUBLE), "doc", 1.0f);
assertTrue(field.hasDefaultValue());
assertEquals(1.0d, field.defaultVal());
assertEquals(1.0d, GenericData.get().getDefaultValue(field));
}
@Test
void floatDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1.0f);
assertTrue(field.hasDefaultValue());
assertEquals(1.0f, field.defaultVal());
assertEquals(1.0f, GenericData.get().getDefaultValue(field));
}
@Test
void intAsFloatDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1);
assertTrue(field.hasDefaultValue());
assertEquals(1.0f, field.defaultVal());
assertEquals(1.0f, GenericData.get().getDefaultValue(field));
}
@Test
void longAsFloatDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1L);
assertTrue(field.hasDefaultValue());
assertEquals(1.0f, field.defaultVal());
assertEquals(1.0f, GenericData.get().getDefaultValue(field));
}
@Test
void doubleAsFloatDefaultValue() {
Schema.Field field = new Schema.Field("myField", Schema.create(Schema.Type.FLOAT), "doc", 1.0d);
assertTrue(field.hasDefaultValue());
assertEquals(1.0f, field.defaultVal());
assertEquals(1.0f, GenericData.get().getDefaultValue(field));
}
@Test
void enumSymbolAsNull() {
assertThrows(SchemaParseException.class, () -> {
Schema.createEnum("myField", "doc", "namespace", Collections.singletonList(null));
});
}
@Test
void schemaFieldWithoutSchema() {
assertThrows(NullPointerException.class, () -> {
new Schema.Field("f", null);
});
}
@Test
void parseRecordWithNameAsType() {
final String schemaString = "{\n \"type\" : \"record\",\n \"name\" : \"ns.int\",\n"
+ " \"fields\" : [ \n {\"name\" : \"value\", \"type\" : \"int\"}, \n"
+ " {\"name\" : \"next\", \"type\" : [ \"null\", \"ns.int\" ]}\n ]\n}";
final Schema schema = new Schema.Parser().parse(schemaString);
String toString = schema.toString(true);
final Schema schema2 = new Schema.Parser().parse(toString);
assertEquals(schema, schema2);
}
@Test
void qualifiedName() {
Arrays.stream(Type.values()).forEach((Type t) -> {
final Schema.Name name = new Schema.Name(t.getName(), "space");
assertEquals("space." + t.getName(), name.getQualified("space"));
assertEquals("space." + t.getName(), name.getQualified("otherdefault"));
});
final Schema.Name name = new Schema.Name("name", "space");
assertEquals("name", name.getQualified("space"));
assertEquals("space.name", name.getQualified("otherdefault"));
final Schema.Name nameInt = new Schema.Name("Int", "space");
assertEquals("Int", nameInt.getQualified("space"));
}
@Test
void validValue() {
// Valid null value
final Schema nullSchema = Schema.create(Type.NULL);
assertTrue(nullSchema.isValidDefault(JsonNodeFactory.instance.nullNode()));
// Valid int value
final Schema intSchema = Schema.create(Type.INT);
assertTrue(intSchema.isValidDefault(JsonNodeFactory.instance.numberNode(12)));
// Valid Text value
final Schema strSchema = Schema.create(Type.STRING);
assertTrue(strSchema.isValidDefault(new TextNode("textNode")));
// Valid Array value
final Schema arraySchema = Schema.createArray(Schema.create(Type.STRING));
final ArrayNode arrayValue = JsonNodeFactory.instance.arrayNode();
assertTrue(arraySchema.isValidDefault(arrayValue)); // empty array
arrayValue.add("Hello");
arrayValue.add("World");
assertTrue(arraySchema.isValidDefault(arrayValue));
arrayValue.add(5);
assertFalse(arraySchema.isValidDefault(arrayValue));
// Valid Union type
final Schema unionSchema = Schema.createUnion(strSchema, intSchema, nullSchema);
assertTrue(unionSchema.isValidDefault(JsonNodeFactory.instance.textNode("Hello")));
assertTrue(unionSchema.isValidDefault(new IntNode(23)));
assertTrue(unionSchema.isValidDefault(JsonNodeFactory.instance.nullNode()));
assertFalse(unionSchema.isValidDefault(arrayValue));
// Array of union
final Schema arrayUnion = Schema.createArray(unionSchema);
final ArrayNode arrayUnionValue = JsonNodeFactory.instance.arrayNode();
arrayUnionValue.add("Hello");
arrayUnionValue.add(NullNode.getInstance());
assertTrue(arrayUnion.isValidDefault(arrayUnionValue));
// Union String, bytes
final Schema unionStrBytes = Schema.createUnion(strSchema, Schema.create(Type.BYTES));
assertTrue(unionStrBytes.isValidDefault(JsonNodeFactory.instance.textNode("Hello")));
assertFalse(unionStrBytes.isValidDefault(JsonNodeFactory.instance.numberNode(123)));
}
@Test
void enumLateDefine() {
String schemaString = "{\n" + " \"type\":\"record\",\n" + " \"name\": \"Main\",\n" + " \"fields\":[\n"
+ " {\n" + " \"name\":\"f1\",\n" + " \"type\":\"Sub\"\n" + " },\n"
+ " {\n" + " \"name\":\"f2\",\n" + " \"type\":{\n"
+ " \"type\":\"enum\",\n" + " \"name\":\"Sub\",\n"
+ " \"symbols\":[\"OPEN\",\"CLOSE\"]\n" + " }\n" + " }\n" + " ]\n" + "}";
final Schema schema = new Schema.Parser().parse(schemaString);
Schema f1Schema = schema.getField("f1").schema();
Schema f2Schema = schema.getField("f2").schema();
assertSame(f1Schema, f2Schema);
assertEquals(Type.ENUM, f1Schema.getType());
String stringSchema = schema.toString();
int definitionIndex = stringSchema.indexOf("\"symbols\":[\"OPEN\",\"CLOSE\"]");
int usageIndex = stringSchema.indexOf("\"type\":\"Sub\"");
assertTrue(definitionIndex < usageIndex, "usage is before definition");
}
@Test
public void testRecordInArray() {
String schemaString = "{\n" + " \"type\": \"record\",\n" + " \"name\": \"TestRecord\",\n" + " \"fields\": [\n"
+ " {\n" + " \"name\": \"value\",\n" + " \"type\": {\n" + " \"type\": \"record\",\n"
+ " \"name\": \"Container\",\n" + " \"fields\": [\n" + " {\n"
+ " \"name\": \"Optional\",\n" + " \"type\": {\n" + " \"type\": \"array\",\n"
+ " \"items\": [\n" + " {\n" + " \"type\": \"record\",\n"
+ " \"name\": \"optional_field_0\",\n" + " \"namespace\": \"\",\n"
+ " \"doc\": \"\",\n" + " \"fields\": [\n" + " {\n"
+ " \"name\": \"optional_field_1\",\n" + " \"type\": \"long\",\n"
+ " \"doc\": \"\",\n" + " \"default\": 0\n"
+ " }\n" + " ]\n" + " }\n" + " ]\n"
+ " }\n" + " }\n" + " ]\n" + " }\n" + " }\n" + " ]\n" + "}";
final Schema schema = new Schema.Parser().parse(schemaString);
assertNotNull(schema);
}
/*
* @Test public void testRec() { String schemaString =
* "[{\"name\":\"employees\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Pair1081149ea1d6eb80\",\"fields\":[{\"name\":\"key\",\"type\":\"int\"},{\"name\":\"value\",\"type\":{\"type\":\"record\",\"name\":\"EmployeeInfo2\",\"fields\":[{\"name\":\"companyMap\",\"type\":[\"null\",{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"PairIntegerString\",\"fields\":[{\"name\":\"key\",\"type\":\"int\"},{\"name\":\"value\",\"type\":\"string\"}]},\"java-class\":\"java.util.HashMap\"}],\"default\":null},{\"name\":\"name\",\"type\":[\"null\",\"string\"],\"default\":null}]}}]},\"java-class\":\"java.util.HashMap\"}],\"default\":null}]";
* final Schema schema = new Schema.Parser().parse(schemaString);
* Assert.assertNotNull(schema);
*
* }
*/
@Test
public void testUnionFieldType() {
String schemaString = "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": [{\"name\":\"value\", \"type\":[\"null\", \"string\",{\"type\": \"record\", \"name\": \"Cons\", \"fields\": [{\"name\":\"car\", \"type\":\"Lisp\"},{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}";
final Schema schema = new Schema.Parser().parse(schemaString);
Field value = schema.getField("value");
Schema fieldSchema = value.schema();
Schema subSchema = fieldSchema.getTypes().stream().filter((Schema s) -> s.getType() == Type.RECORD).findFirst()
.get();
assertTrue(subSchema.hasFields());
}
@Test
public void parseAliases() throws JsonProcessingException {
String s1 = "{ \"aliases\" : [\"a1\", \"b1\"]}";
ObjectMapper mapper = new ObjectMapper();
JsonNode j1 = mapper.readTree(s1);
Set<String> aliases = Schema.parseAliases(j1);
assertEquals(2, aliases.size());
assertTrue(aliases.contains("a1"));
assertTrue(aliases.contains("b1"));
String s2 = "{ \"aliases\" : {\"a1\": \"b1\"}}";
JsonNode j2 = mapper.readTree(s2);
SchemaParseException ex = assertThrows(SchemaParseException.class, () -> Schema.parseAliases(j2));
assertTrue(ex.getMessage().contains("aliases not an array"));
String s3 = "{ \"aliases\" : [11, \"b1\"]}";
JsonNode j3 = mapper.readTree(s3);
SchemaParseException ex3 = assertThrows(SchemaParseException.class, () -> Schema.parseAliases(j3));
assertTrue(ex3.getMessage().contains("alias not a string"));
}
@Test
void testContentAfterAvsc() {
Schema.Parser parser = new Schema.Parser(Schema.NameValidator.UTF_VALIDATOR);
parser.setValidateDefaults(true);
assertThrows(SchemaParseException.class, () -> parser.parse("{\"type\": \"string\"}; DROP TABLE STUDENTS"));
}
@Test
void testContentAfterAvscInInputStream() throws Exception {
Schema.Parser parser = new Schema.Parser(Schema.NameValidator.UTF_VALIDATOR);
parser.setValidateDefaults(true);
String avsc = "{\"type\": \"string\"}; DROP TABLE STUDENTS";
ByteArrayInputStream is = new ByteArrayInputStream(avsc.getBytes(StandardCharsets.UTF_8));
Schema schema = parser.parse(is);
assertNotNull(schema);
}
@Test
void testContentAfterAvscInFile() throws Exception {
File avscFile = Files.createTempFile("testContentAfterAvscInFile", null).toFile();
try (FileWriter writer = new FileWriter(avscFile)) {
writer.write("{\"type\": \"string\"}; DROP TABLE STUDENTS");
writer.flush();
}
Schema.Parser parser = new Schema.Parser(Schema.NameValidator.UTF_VALIDATOR);
parser.setValidateDefaults(true);
assertThrows(SchemaParseException.class, () -> parser.parse(avscFile));
}
@Test
void testParseMultipleFile() throws IOException {
URL directory = Thread.currentThread().getContextClassLoader().getResource("multipleFile");
File f1 = new File(directory.getPath(), "ApplicationEvent.avsc");
File f2 = new File(directory.getPath(), "DocumentInfo.avsc");
File f3 = new File(directory.getPath(), "MyResponse.avsc");
Assertions.assertTrue(f1.exists(), "File not exist for test " + f1.getPath());
Assertions.assertTrue(f2.exists(), "File not exist for test " + f2.getPath());
Assertions.assertTrue(f3.exists(), "File not exist for test " + f3.getPath());
final List<Schema> schemas = new Schema.Parser().parse(Arrays.asList(f1, f2, f3));
Assertions.assertEquals(3, schemas.size());
Schema schemaAppEvent = schemas.get(0);
Schema schemaDocInfo = schemas.get(1);
Schema schemaResponse = schemas.get(2);
Assertions.assertNotNull(schemaAppEvent);
Assertions.assertEquals(3, schemaAppEvent.getFields().size());
Field documents = schemaAppEvent.getField("documents");
Schema docSchema = documents.schema().getTypes().get(1).getElementType();
Assertions.assertEquals(docSchema, schemaDocInfo);
Assertions.assertNotNull(schemaDocInfo);
Assertions.assertNotNull(schemaResponse);
}
@Test
void add_types() {
String schemaRecord2 = "{\"type\":\"record\", \"name\":\"record2\", \"fields\": ["
+ " {\"name\":\"f1\", \"type\":\"record1\" }" + "]}";
// register schema1 in schema.
Schema schemaRecord1 = Schema.createRecord("record1", "doc", "", false);
Schema.Parser parser = new Schema.Parser().addTypes(Collections.singleton(schemaRecord1));
// parse schema for record2 that contains field for schema1.
final Schema schema = parser.parse(schemaRecord2);
final Field f1 = schema.getField("f1");
assertNotNull(f1);
assertEquals(schemaRecord1, f1.schema());
}
}
| 7,132 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestBigDecimalConversion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.apache.avro;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class TestBigDecimalConversion {
private Conversion<BigDecimal> conversion = new Conversions.BigDecimalConversion();
private final LogicalType bigDecimal = LogicalTypes.bigDecimal();
private Schema bytesSchema = conversion.getRecommendedSchema();
@ParameterizedTest
@MethodSource("listBigDecimal")
void bigdec(BigDecimal d1) {
ByteBuffer d1bytes = conversion.toBytes(d1, bytesSchema, bigDecimal);
BigDecimal decimal1 = conversion.fromBytes(d1bytes, bytesSchema, bigDecimal);
Assertions.assertEquals(decimal1, d1);
}
static Stream<Arguments> listBigDecimal() {
Iterator<BigDecimal> iterator = new Iterator<BigDecimal>() {
int index = 0;
BigDecimal step = new BigDecimal(-2.7d);
BigDecimal current = new BigDecimal(1.0d);
@Override
public boolean hasNext() {
if (index == 50) {
// test small bigdecimal
current = new BigDecimal(1.0d);
step = new BigDecimal(-0.71d);
}
return index < 100;
}
@Override
public BigDecimal next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
index++;
current = current.multiply(step);
return current;
}
};
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED), false)
.map(Arguments::of);
}
}
| 7,133 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaValidateDefault.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.generic.GenericData;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Function;
public class TestSchemaValidateDefault {
@Test
public void valueReadWithCorrectDefaultValue() throws IOException {
ExampleRecord writtenValue = new ExampleRecord(new ComplexValue(42L), new ComplexValue(666L));
byte[] bytes = getSerializer(ExampleRecord.SCHEMA_WITH_ONE_FIELD).apply(writtenValue);
ReflectDatumReader<ExampleRecord> reader = new ReflectDatumReader<>(ExampleRecord.SCHEMA_WITH_ONE_FIELD,
ExampleRecord.SCHEMA_WITH_TWO_FIELDS, ReflectData.get());
Decoder decoder = DecoderFactory.get().jsonDecoder(ExampleRecord.SCHEMA_WITH_ONE_FIELD,
new ByteArrayInputStream(bytes));
ExampleRecord deserializedValue = reader.read(null, decoder);
Assertions.assertNotNull(deserializedValue.getValue2(), "Null get value2");
Assertions.assertEquals(15L, deserializedValue.getValue2().getValue());
}
public static Function<Object, byte[]> getSerializer(Schema writerSchema) {
Objects.requireNonNull(writerSchema, "writerSchema must not be null");
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<>(writerSchema, new ReflectData());
return object -> {
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().jsonEncoder(writerSchema, stream);
writer.write(object, encoder);
encoder.flush();
return stream.toByteArray();
} catch (IOException e) {
throw new IllegalStateException(String.format("Avro failed to encode %s to schema %s", object, writerSchema),
e);
}
};
}
public static <T> Function<byte[], T> getDeserializer(Class<T> readClass, Schema readerSchema, Schema writerSchema) {
Objects.requireNonNull(readClass, "readClass must not be null");
Objects.requireNonNull(readerSchema, "readerSchema must not be null");
Objects.requireNonNull(writerSchema, "writerSchema must not be null");
ReflectDatumReader<T> reader = new ReflectDatumReader<>(writerSchema, readerSchema, new ReflectData());
return (byte[] bytes) -> {
try {
Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, new ByteArrayInputStream(bytes));
T readValue = reader.read(null, decoder);
return readValue;
} catch (IOException e) {
throw new IllegalStateException(String.format("Avro failed to decode %s to %s", new String(bytes), readClass),
e);
}
};
}
static final Schema SCHEMA = SchemaBuilder.record("org.apache.avro.TestSchemaValidateDefault.ComplexValue").fields()
.optionalLong("value").endRecord();
public static class ComplexValue {
private Long value;
public ComplexValue() {
}
public ComplexValue(Long value) {
this.value = value;
}
public Long getValue() {
return this.value;
}
@Override
public String toString() {
return "{" + "\"value\": { \"long\": " + this.value + "}}";
}
}
public static class ExampleRecord {
public static final Schema SCHEMA_WITH_ONE_FIELD;
public static final Schema SCHEMA_WITH_TWO_FIELDS;
static {
SCHEMA_WITH_ONE_FIELD = SchemaBuilder.record("org.apache.avro.TestSchemaValidateDefault.ExampleRecord").fields()
.name("value1").type(TestSchemaValidateDefault.SCHEMA).noDefault().endRecord();
GenericData.Record record = new GenericData.Record(TestSchemaValidateDefault.SCHEMA);
record.put("value", 15L);
SCHEMA_WITH_TWO_FIELDS = SchemaBuilder.record("org.apache.avro.TestSchemaValidateDefault.ExampleRecord").fields()
.name("value1").type(TestSchemaValidateDefault.SCHEMA).noDefault().name("value2")
.type(TestSchemaValidateDefault.SCHEMA).withDefault(record).endRecord();
}
private ComplexValue value1;
private ComplexValue value2;
public ExampleRecord() {
}
public ExampleRecord(ComplexValue value1, ComplexValue value2) {
this.value1 = value1;
this.value2 = value2;
}
public ComplexValue getValue1() {
return this.value1;
}
public ComplexValue getValue2() {
return this.value2;
}
}
}
| 7,134 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingUnionBranch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.util.Collections;
import java.util.List;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.A_DINT_B_DINT_STRING_UNION_RECORD1;
import static org.apache.avro.TestSchemas.A_DINT_B_DINT_UNION_RECORD1;
import static org.apache.avro.TestSchemas.BOOLEAN_SCHEMA;
import static org.apache.avro.TestSchemas.BYTES_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.DOUBLE_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA;
import static org.apache.avro.TestSchemas.FIXED_4_BYTES;
import static org.apache.avro.TestSchemas.FLOAT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_MAP_SCHEMA;
import static org.apache.avro.TestSchemas.INT_SCHEMA;
import static org.apache.avro.TestSchemas.INT_STRING_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.NULL_SCHEMA;
import static org.apache.avro.TestSchemas.STRING_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.list;
public class TestSchemaCompatibilityMissingUnionBranch {
private static final Schema RECORD1_WITH_INT = SchemaBuilder.record("Record1").fields() //
.name("field1").type(INT_SCHEMA).noDefault() //
.endRecord();
private static final Schema RECORD2_WITH_INT = SchemaBuilder.record("Record2").fields() //
.name("field1").type(INT_SCHEMA).noDefault() //
.endRecord();
private static final Schema UNION_INT_RECORD1 = Schema.createUnion(list(INT_SCHEMA, RECORD1_WITH_INT));
private static final Schema UNION_INT_RECORD2 = Schema.createUnion(list(INT_SCHEMA, RECORD2_WITH_INT));
private static final Schema UNION_INT_ENUM1_AB = Schema.createUnion(list(INT_SCHEMA, ENUM1_AB_SCHEMA));
private static final Schema UNION_INT_FIXED_4_BYTES = Schema.createUnion(list(INT_SCHEMA, FIXED_4_BYTES));
private static final Schema UNION_INT_BOOLEAN = Schema.createUnion(list(INT_SCHEMA, BOOLEAN_SCHEMA));
private static final Schema UNION_INT_ARRAY_INT = Schema.createUnion(list(INT_SCHEMA, INT_ARRAY_SCHEMA));
private static final Schema UNION_INT_MAP_INT = Schema.createUnion(list(INT_SCHEMA, INT_MAP_SCHEMA));
private static final Schema UNION_INT_NULL = Schema.createUnion(list(INT_SCHEMA, NULL_SCHEMA));
public static Stream<Arguments> data() {
return Stream.of( //
Arguments.of(INT_UNION_SCHEMA, INT_STRING_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: STRING"), Collections.singletonList("/1")),
Arguments.of(STRING_UNION_SCHEMA, INT_STRING_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: INT"), Collections.singletonList("/0")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_RECORD1,
Collections.singletonList("reader union lacking writer type: RECORD"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_RECORD2,
Collections.singletonList("reader union lacking writer type: RECORD"), Collections.singletonList("/1")),
// more info in the subset schemas
Arguments.of(UNION_INT_RECORD1, UNION_INT_RECORD2,
Collections.singletonList("reader union lacking writer type: RECORD"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_ENUM1_AB,
Collections.singletonList("reader union lacking writer type: ENUM"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_FIXED_4_BYTES,
Collections.singletonList("reader union lacking writer type: FIXED"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_BOOLEAN,
Collections.singletonList("reader union lacking writer type: BOOLEAN"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, LONG_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: LONG"), Collections.singletonList("/0")),
Arguments.of(INT_UNION_SCHEMA, FLOAT_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: FLOAT"), Collections.singletonList("/0")),
Arguments.of(INT_UNION_SCHEMA, DOUBLE_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: DOUBLE"), Collections.singletonList("/0")),
Arguments.of(INT_UNION_SCHEMA, BYTES_UNION_SCHEMA,
Collections.singletonList("reader union lacking writer type: BYTES"), Collections.singletonList("/0")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_ARRAY_INT,
Collections.singletonList("reader union lacking writer type: ARRAY"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_MAP_INT,
Collections.singletonList("reader union lacking writer type: MAP"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, UNION_INT_NULL,
Collections.singletonList("reader union lacking writer type: NULL"), Collections.singletonList("/1")),
Arguments.of(INT_UNION_SCHEMA, INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA,
asList("reader union lacking writer type: LONG", "reader union lacking writer type: FLOAT",
"reader union lacking writer type: DOUBLE"),
asList("/1", "/2", "/3")),
Arguments.of(A_DINT_B_DINT_UNION_RECORD1, A_DINT_B_DINT_STRING_UNION_RECORD1,
Collections.singletonList("reader union lacking writer type: STRING"),
Collections.singletonList("/fields/1/type/1")));
}
@ParameterizedTest
@MethodSource("data")
public void testMissingUnionBranch(Schema reader, Schema writer, List<String> details, List<String> location)
throws Exception {
List<SchemaIncompatibilityType> types = Collections.nCopies(details.size(),
SchemaIncompatibilityType.MISSING_UNION_BRANCH);
validateIncompatibleSchemas(reader, writer, types, details, location);
}
}
| 7,135 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/CustomType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.util.Objects;
public final class CustomType {
private final String name;
public CustomType(CharSequence name) {
this.name = name.toString();
}
public String getName() {
return name;
}
@Override
public int hashCode() {
return Objects.hashCode(name);
}
@Override
public boolean equals(Object obj) {
return obj instanceof CustomType && name.equals(((CustomType) obj).name);
}
@Override
public String toString() {
return "CustomType{name='" + name + "'}";
}
}
| 7,136 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityMissingEnumSymbols.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.util.stream.Stream;
import static org.apache.avro.TestSchemaCompatibility.validateIncompatibleSchemas;
import static org.apache.avro.TestSchemas.ENUM1_ABC_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_BC_SCHEMA;
public class TestSchemaCompatibilityMissingEnumSymbols {
private static final Schema RECORD1_WITH_ENUM_AB = SchemaBuilder.record("Record1").fields() //
.name("field1").type(ENUM1_AB_SCHEMA).noDefault() //
.endRecord();
private static final Schema RECORD1_WITH_ENUM_ABC = SchemaBuilder.record("Record1").fields() //
.name("field1").type(ENUM1_ABC_SCHEMA).noDefault() //
.endRecord();
public static Stream<Arguments> data() {
return Stream.of(Arguments.of(ENUM1_AB_SCHEMA, ENUM1_ABC_SCHEMA, "[C]", "/symbols"),
Arguments.of(ENUM1_BC_SCHEMA, ENUM1_ABC_SCHEMA, "[A]", "/symbols"),
Arguments.of(RECORD1_WITH_ENUM_AB, RECORD1_WITH_ENUM_ABC, "[C]", "/fields/0/type/symbols"));
}
@ParameterizedTest
@MethodSource("data")
public void testTypeMismatchSchemas(Schema reader, Schema writer, String details, String location) {
validateIncompatibleSchemas(reader, writer, SchemaIncompatibilityType.MISSING_ENUM_SYMBOLS, details, location);
}
}
| 7,137 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileMeta.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import org.apache.avro.Schema.Type;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestDataFileMeta {
@TempDir
public File DIR;
@Test
public void useReservedMeta() throws IOException {
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
assertThrows(AvroRuntimeException.class, () -> w.setMeta("avro.foo", "bar"));
}
}
@Test
public void useMeta() throws IOException {
File f = new File(DIR, "testDataFileMeta.avro");
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.setMeta("hello", "bar");
w.create(Schema.create(Type.NULL), f);
}
try (DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>())) {
assertTrue(r.getMetaKeys().contains("hello"));
assertEquals("bar", r.getMetaString("hello"));
}
}
@Test
public void useMetaAfterCreate() throws IOException {
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(Schema.create(Type.NULL), new ByteArrayOutputStream());
assertThrows(AvroRuntimeException.class, () -> w.setMeta("foo", "bar"));
}
}
@Test
public void blockSizeSetInvalid() {
int exceptions = 0;
for (int i = -1; i < 33; i++) {
// 33 invalid, one valid
try {
new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(i);
} catch (IllegalArgumentException iae) {
exceptions++;
}
}
assertEquals(33, exceptions);
}
}
| 7,138 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestProtocol.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.JsonEncoder;
import com.fasterxml.jackson.databind.JsonNode;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import org.junit.jupiter.api.Test;
public class TestProtocol {
@Test
public void parse() throws IOException {
File fic = new File("../../../share/test/schemas/namespace.avpr");
Protocol protocol = Protocol.parse(fic);
assertNotNull(protocol);
assertEquals("TestNamespace", protocol.getName());
}
/**
* record type 'User' contains a field of type 'Status', which contains a field
* of type 'User'.
*/
@Test
public void crossProtocol() {
String userStatus = "{ \"protocol\" : \"p1\", " + "\"types\": ["
+ "{\"name\": \"User\", \"type\": \"record\", \"fields\": [{\"name\": \"current_status\", \"type\": \"Status\"}]},\n"
+ "\n"
+ "{\"name\": \"Status\", \"type\": \"record\", \"fields\": [{\"name\": \"author\", \"type\": \"User\"}]}"
+ "]}";
Protocol protocol = Protocol.parse(userStatus);
Schema userSchema = protocol.getType("User");
Schema statusSchema = protocol.getType("Status");
assertSame(statusSchema, userSchema.getField("current_status").schema());
assertSame(userSchema, statusSchema.getField("author").schema());
String parsingFormUser = SchemaNormalization.toParsingForm(userSchema);
assertEquals(
"{\"name\":\"User\",\"type\":\"record\",\"fields\":[{\"name\":\"current_status\",\"type\":{\"name\":\"Status\",\"type\":\"record\",\"fields\":[{\"name\":\"author\",\"type\":\"User\"}]}}]}",
parsingFormUser);
String parsingFormStatus = SchemaNormalization.toParsingForm(statusSchema);
assertEquals(
"{\"name\":\"Status\",\"type\":\"record\",\"fields\":[{\"name\":\"author\",\"type\":{\"name\":\"User\",\"type\":\"record\",\"fields\":[{\"name\":\"current_status\",\"type\":\"Status\"}]}}]}",
parsingFormStatus);
}
/**
* When one schema with a type used before it is defined, test normalization
* defined schema before it is used.
*/
@Test
void normalization() {
final String schema = "{\n" + " \"type\":\"record\", \"name\": \"Main\", " + " \"fields\":[\n"
+ " { \"name\":\"f1\", \"type\":\"Sub\" },\n" // use Sub
+ " { \"name\":\"f2\", " + " \"type\":{\n" + " \"type\":\"enum\", \"name\":\"Sub\",\n" // define
// Sub
+ " \"symbols\":[\"OPEN\",\"CLOSE\"]\n" + " }\n" + " }\n" + " ]\n" + "}";
Schema s = new Schema.Parser().parse(schema);
assertNotNull(s);
String parsingForm = SchemaNormalization.toParsingForm(s);
assertEquals(
"{\"name\":\"Main\",\"type\":\"record\",\"fields\":[{\"name\":\"f1\",\"type\":{\"name\":\"Sub\",\"type\":\"enum\",\"symbols\":[\"OPEN\",\"CLOSE\"]}},{\"name\":\"f2\",\"type\":\"Sub\"}]}",
parsingForm);
}
@Test
void namespaceAndNameRules() {
Protocol p1 = new Protocol("P", null, "foo");
Protocol p2 = new Protocol("foo.P", null, null);
Protocol p3 = new Protocol("foo.P", null, "bar");
assertEquals(p1.getName(), p2.getName());
assertEquals(p1.getNamespace(), p2.getNamespace());
assertEquals(p1.getName(), p3.getName());
assertEquals(p1.getNamespace(), p3.getNamespace());
// The following situation is allowed, even if confusing, because the
// specification describes this algorithm without specifying that the resulting
// namespace mst be non-empty.
Protocol invalidName = new Protocol(".P", null, "ignored");
assertNull(invalidName.getNamespace());
assertEquals("P", invalidName.getName());
}
@Test
void propEquals() {
Protocol p1 = new Protocol("P", null, "foo");
p1.addProp("a", "1");
Protocol p2 = new Protocol("P", null, "foo");
p2.addProp("a", "2");
assertNotEquals(p1, p2);
}
@Test
void splitProtocolBuild() {
Protocol p = new Protocol("P", null, "foo");
p.addProp("property", "some value");
String protocolString = p.toString();
final int mid = protocolString.length() / 2;
Protocol parsedStringProtocol = org.apache.avro.Protocol.parse(protocolString);
Protocol parsedArrayOfStringProtocol = org.apache.avro.Protocol.parse(protocolString.substring(0, mid),
protocolString.substring(mid));
assertNotNull(parsedStringProtocol);
assertNotNull(parsedArrayOfStringProtocol);
assertEquals(parsedStringProtocol.toString(), parsedArrayOfStringProtocol.toString());
}
@Test
void copyMessage() {
Protocol p = new Protocol("P", "protocol", "foo");
Schema req1 = SchemaBuilder.record("foo.req1").fields().endRecord();
Protocol.Message m1 = p.createMessage("M", "message", singletonMap("foo", "bar"), req1);
Schema req2 = SchemaBuilder.record("foo.req2").fields().name("test").type().booleanType().noDefault().endRecord();
Protocol.Message m2 = p.createMessage(m1, req2);
assertEquals(m1.getName(), m2.getName());
assertEquals(m1.getDoc(), m2.getDoc());
assertEquals(m1.getProp("foo"), m2.getProp("foo"));
}
}
| 7,139 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibilityEnumDefaults.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.apache.avro.TestSchemas.ENUM1_ABC_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM2_AB_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM_ABC_ENUM_DEFAULT_A_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM_AB_ENUM_DEFAULT_A_SCHEMA;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.ByteArrayOutputStream;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
public class TestSchemaCompatibilityEnumDefaults {
@Test
void enumDefaultNotAppliedWhenWriterFieldMissing() throws Exception {
Schema writerSchema = SchemaBuilder.record("Record1").fields().name("field2").type(ENUM2_AB_SCHEMA).noDefault()
.endRecord();
Schema readerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_AB_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
GenericRecord datum = new GenericData.Record(writerSchema);
datum.put("field2", new GenericData.EnumSymbol(writerSchema, "B"));
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> serializeWithWriterThenDeserializeWithReader(writerSchema, datum, readerSchema));
assertEquals("Found Record1, expecting Record1, missing required field field1", avroTypeException.getMessage());
}
@Test
void enumDefaultAppliedWhenNoFieldDefaultDefined() throws Exception {
Schema writerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_ABC_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
Schema readerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_AB_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
GenericRecord datum = new GenericData.Record(writerSchema);
datum.put("field1", new GenericData.EnumSymbol(writerSchema, "C"));
GenericRecord decodedDatum = serializeWithWriterThenDeserializeWithReader(writerSchema, datum, readerSchema);
// The A is the Enum fallback value.
assertEquals("A", decodedDatum.get("field1").toString());
}
@Test
void enumDefaultNotAppliedWhenCompatibleSymbolIsFound() throws Exception {
Schema writerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_ABC_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
Schema readerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_AB_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
GenericRecord datum = new GenericData.Record(writerSchema);
datum.put("field1", new GenericData.EnumSymbol(writerSchema, "B"));
GenericRecord decodedDatum = serializeWithWriterThenDeserializeWithReader(writerSchema, datum, readerSchema);
assertEquals("B", decodedDatum.get("field1").toString());
}
@Test
void enumDefaultAppliedWhenFieldDefaultDefined() throws Exception {
Schema writerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_ABC_ENUM_DEFAULT_A_SCHEMA)
.noDefault().endRecord();
Schema readerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM_AB_ENUM_DEFAULT_A_SCHEMA)
.withDefault("B").endRecord();
GenericRecord datum = new GenericData.Record(writerSchema);
datum.put("field1", new GenericData.EnumSymbol(writerSchema, "C"));
GenericRecord decodedDatum = serializeWithWriterThenDeserializeWithReader(writerSchema, datum, readerSchema);
// The A is the Enum default, which is assigned since C is not in [A,B].
assertEquals("A", decodedDatum.get("field1").toString());
}
@Test
void fieldDefaultNotAppliedForUnknownSymbol() throws Exception {
Schema writerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM1_ABC_SCHEMA).noDefault()
.endRecord();
Schema readerSchema = SchemaBuilder.record("Record1").fields().name("field1").type(ENUM1_AB_SCHEMA).withDefault("A")
.endRecord();
GenericRecord datum = new GenericData.Record(writerSchema);
datum.put("field1", new GenericData.EnumSymbol(writerSchema, "C"));
AvroTypeException avroTypeException = assertThrows(AvroTypeException.class,
() -> serializeWithWriterThenDeserializeWithReader(writerSchema, datum, readerSchema));
assertEquals("No match for C", avroTypeException.getMessage());
}
private GenericRecord serializeWithWriterThenDeserializeWithReader(Schema writerSchema, GenericRecord datum,
Schema readerSchema) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
DatumWriter<Object> datumWriter = new GenericDatumWriter<>(writerSchema);
datumWriter.write(datum, encoder);
encoder.flush();
byte[] bytes = baos.toByteArray();
Decoder decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema,
DecoderFactory.get().binaryDecoder(bytes, null));
DatumReader<Object> datumReader = new GenericDatumReader<>(readerSchema);
return (GenericRecord) datumReader.read(null, decoder);
}
}
| 7,140 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCustomSync.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.UUID;
import org.apache.avro.Schema.Type;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
public class TestDataFileCustomSync {
private byte[] createDataFile(byte[] sync) throws IOException {
Schema schema = Schema.create(Type.STRING);
DataFileWriter<Utf8> w = new DataFileWriter<>(new GenericDatumWriter<>(schema));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
w.create(schema, baos, sync);
w.append(new Utf8("apple"));
w.append(new Utf8("banana"));
w.sync();
w.append(new Utf8("celery"));
w.append(new Utf8("date"));
w.sync();
w.append(new Utf8("endive"));
w.append(new Utf8("fig"));
w.close();
return baos.toByteArray();
}
private static byte[] generateSync() {
try {
MessageDigest digester = MessageDigest.getInstance("MD5");
long time = System.currentTimeMillis();
digester.update((UUID.randomUUID() + "@" + time).getBytes(UTF_8));
return digester.digest();
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
@Test
void invalidSync() throws IOException {
assertThrows(IOException.class, () -> {
// Invalid size (must be 16):
byte[] sync = new byte[8];
createDataFile(sync);
});
}
@Test
void randomSync() throws IOException {
byte[] sync = generateSync();
byte[] randSyncFile = createDataFile(null);
byte[] customSyncFile = createDataFile(sync);
assertFalse(Arrays.equals(randSyncFile, customSyncFile));
}
@Test
void customSync() throws IOException {
byte[] sync = generateSync();
byte[] customSyncFile = createDataFile(sync);
byte[] sameCustomSyncFile = createDataFile(sync);
assertArrayEquals(customSyncFile, sameCustomSyncFile);
}
}
| 7,141 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSystemLimitException.java | /*
* Copyright 2017 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.apache.avro.SystemLimitException.*;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import java.util.function.Function;
public class TestSystemLimitException {
/** Delegated here for package visibility. */
public static final int MAX_ARRAY_VM_LIMIT = SystemLimitException.MAX_ARRAY_VM_LIMIT;
public static final String ERROR_NEGATIVE = "Malformed data. Length is negative: -1";
public static final String ERROR_VM_LIMIT_BYTES = "Cannot read arrays longer than " + MAX_ARRAY_VM_LIMIT
+ " bytes in Java library";
public static final String ERROR_VM_LIMIT_COLLECTION = "Cannot read collections larger than " + MAX_ARRAY_VM_LIMIT
+ " items in Java library";
public static final String ERROR_VM_LIMIT_STRING = "Cannot read strings longer than " + MAX_ARRAY_VM_LIMIT + " bytes";
/** Delegated here for package visibility. */
public static void resetLimits() {
SystemLimitException.resetLimits();
}
@AfterEach
void reset() {
System.clearProperty(MAX_BYTES_LENGTH_PROPERTY);
System.clearProperty(MAX_COLLECTION_LENGTH_PROPERTY);
System.clearProperty(MAX_STRING_LENGTH_PROPERTY);
resetLimits();
}
/**
* A helper method that tests the consistent limit handling from system
* properties.
*
* @param f The function to be tested.
* @param sysProperty The system property used to control the custom limit.
* @param errorVmLimit The error message used when the property would be
* over the VM limit.
* @param errorCustomLimit The error message used when the property would be
* over the custom limit of 1000.
*/
void helpCheckSystemLimits(Function<Long, Integer> f, String sysProperty, String errorVmLimit,
String errorCustomLimit) {
// Correct values pass through
assertEquals(0, f.apply(0L));
assertEquals(1024, f.apply(1024L));
assertEquals(MAX_ARRAY_VM_LIMIT, f.apply((long) MAX_ARRAY_VM_LIMIT));
// Values that exceed the default system limits throw exceptions
Exception ex = assertThrows(UnsupportedOperationException.class, () -> f.apply(Long.MAX_VALUE));
assertEquals(errorVmLimit, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class, () -> f.apply((long) MAX_ARRAY_VM_LIMIT + 1));
assertEquals(errorVmLimit, ex.getMessage());
ex = assertThrows(AvroRuntimeException.class, () -> f.apply(-1L));
assertEquals(ERROR_NEGATIVE, ex.getMessage());
// Setting the system property to provide a custom limit.
System.setProperty(sysProperty, Long.toString(1000L));
resetLimits();
// Correct values pass through
assertEquals(0, f.apply(0L));
assertEquals(102, f.apply(102L));
// Values that exceed the custom system limits throw exceptions
ex = assertThrows(UnsupportedOperationException.class, () -> f.apply((long) MAX_ARRAY_VM_LIMIT + 1));
assertEquals(errorVmLimit, ex.getMessage());
ex = assertThrows(SystemLimitException.class, () -> f.apply(1024L));
assertEquals(errorCustomLimit, ex.getMessage());
ex = assertThrows(AvroRuntimeException.class, () -> f.apply(-1L));
assertEquals(ERROR_NEGATIVE, ex.getMessage());
}
@Test
void testCheckMaxBytesLength() {
helpCheckSystemLimits(SystemLimitException::checkMaxBytesLength, MAX_BYTES_LENGTH_PROPERTY, ERROR_VM_LIMIT_BYTES,
"Bytes length 1024 exceeds maximum allowed");
}
@Test
void testCheckMaxCollectionLengthFromZero() {
helpCheckSystemLimits(l -> checkMaxCollectionLength(0L, l), MAX_COLLECTION_LENGTH_PROPERTY,
ERROR_VM_LIMIT_COLLECTION, "Collection length 1024 exceeds maximum allowed");
}
@Test
void testCheckMaxStringLength() {
helpCheckSystemLimits(SystemLimitException::checkMaxStringLength, MAX_STRING_LENGTH_PROPERTY, ERROR_VM_LIMIT_STRING,
"String length 1024 exceeds maximum allowed");
}
@Test
void testCheckMaxCollectionLengthFromNonZero() {
// Correct values pass through
assertEquals(10, checkMaxCollectionLength(10L, 0L));
assertEquals(MAX_ARRAY_VM_LIMIT, checkMaxCollectionLength(10L, MAX_ARRAY_VM_LIMIT - 10L));
assertEquals(MAX_ARRAY_VM_LIMIT, checkMaxCollectionLength(MAX_ARRAY_VM_LIMIT - 10L, 10L));
// Values that exceed the default system limits throw exceptions
Exception ex = assertThrows(UnsupportedOperationException.class,
() -> checkMaxCollectionLength(10L, MAX_ARRAY_VM_LIMIT - 9L));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class,
() -> checkMaxCollectionLength(SystemLimitException.MAX_ARRAY_VM_LIMIT - 9L, 10L));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(10L, Long.MAX_VALUE - 10L));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(Long.MAX_VALUE - 10L, 10L));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
// Overflow that adds to negative
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(10L, Long.MAX_VALUE));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(Long.MAX_VALUE, 10L));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(AvroRuntimeException.class, () -> checkMaxCollectionLength(10L, -1L));
assertEquals(ERROR_NEGATIVE, ex.getMessage());
ex = assertThrows(AvroRuntimeException.class, () -> checkMaxCollectionLength(-1L, 10L));
assertEquals(ERROR_NEGATIVE, ex.getMessage());
// Setting the system property to provide a custom limit.
System.setProperty(MAX_COLLECTION_LENGTH_PROPERTY, Long.toString(1000L));
resetLimits();
// Correct values pass through
assertEquals(10, checkMaxCollectionLength(10L, 0L));
assertEquals(102, checkMaxCollectionLength(10L, 92L));
assertEquals(102, checkMaxCollectionLength(92L, 10L));
// Values that exceed the custom system limits throw exceptions
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(MAX_ARRAY_VM_LIMIT, 1));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(UnsupportedOperationException.class, () -> checkMaxCollectionLength(1, MAX_ARRAY_VM_LIMIT));
assertEquals(ERROR_VM_LIMIT_COLLECTION, ex.getMessage());
ex = assertThrows(SystemLimitException.class, () -> checkMaxCollectionLength(999, 25));
assertEquals("Collection length 1024 exceeds maximum allowed", ex.getMessage());
ex = assertThrows(SystemLimitException.class, () -> checkMaxCollectionLength(25, 999));
assertEquals("Collection length 1024 exceeds maximum allowed", ex.getMessage());
}
}
| 7,142 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileCorruption.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import org.apache.avro.Schema.Type;
import org.apache.avro.file.DataFileConstants;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
public class TestDataFileCorruption {
private static final File DIR = new File(System.getProperty("java.io.tmpdir"));
private File makeFile(String name) {
return new File(DIR, "test-" + name + ".avro");
}
@Test
void corruptedFile() throws IOException {
Schema schema = Schema.create(Type.STRING);
// Write a data file
DataFileWriter<Utf8> w = new DataFileWriter<>(new GenericDatumWriter<>(schema));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
w.create(schema, baos);
w.append(new Utf8("apple"));
w.append(new Utf8("banana"));
w.sync();
w.append(new Utf8("celery"));
w.append(new Utf8("date"));
long pos = w.sync();
w.append(new Utf8("endive"));
w.append(new Utf8("fig"));
w.close();
// Corrupt the input by inserting some zero bytes before the sync marker for the
// penultimate block
byte[] original = baos.toByteArray();
int corruptPosition = (int) pos - DataFileConstants.SYNC_SIZE;
int corruptedBytes = 3;
byte[] corrupted = new byte[original.length + corruptedBytes];
System.arraycopy(original, 0, corrupted, 0, corruptPosition);
System.arraycopy(original, corruptPosition, corrupted, corruptPosition + corruptedBytes,
original.length - corruptPosition);
File file = makeFile("corrupt");
file.deleteOnExit();
FileOutputStream out = new FileOutputStream(file);
out.write(corrupted);
out.close();
// Read the data file
try (DataFileReader r = new DataFileReader<>(file, new GenericDatumReader<>(schema))) {
assertEquals("apple", r.next().toString());
assertEquals("banana", r.next().toString());
long prevSync = r.previousSync();
r.next();
fail("Corrupt block should throw exception");
r.sync(prevSync); // go to sync point after previous successful one
assertEquals("endive", r.next().toString());
assertEquals("fig", r.next().toString());
assertFalse(r.hasNext());
} catch (AvroRuntimeException e) {
assertEquals("Invalid sync!", e.getCause().getMessage());
}
}
}
| 7,143 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestReadingWritingDataInEvolvedSchemas.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Stream;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.EnumSymbol;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
public class TestReadingWritingDataInEvolvedSchemas {
private static final String RECORD_A = "RecordA";
private static final String FIELD_A = "fieldA";
private static final char LATIN_SMALL_LETTER_O_WITH_DIARESIS = '\u00F6';
private static final Schema DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().doubleType().noDefault() //
.endRecord();
private static final Schema FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().floatType().noDefault() //
.endRecord();
private static final Schema LONG_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().longType().noDefault() //
.endRecord();
private static final Schema INT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().intType().noDefault() //
.endRecord();
private static final Schema UNION_INT_LONG_FLOAT_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().doubleType().and().floatType().and().longType().and().intType().endUnion()
.noDefault() //
.endRecord();
private static final Schema STRING_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().stringType().noDefault() //
.endRecord();
private static final Schema BYTES_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().bytesType().noDefault() //
.endRecord();
private static final Schema UNION_STRING_BYTES_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().stringType().and().bytesType().endUnion().noDefault() //
.endRecord();
private static final Schema ENUM_AB = SchemaBuilder.enumeration("Enum1").symbols("A", "B");
private static final Schema ENUM_AB_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type(ENUM_AB).noDefault() //
.endRecord();
private static final Schema ENUM_ABC = SchemaBuilder.enumeration("Enum1").symbols("A", "B", "C");
private static final Schema ENUM_ABC_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type(ENUM_ABC).noDefault() //
.endRecord();
private static final Schema UNION_INT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().intType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_LONG_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().longType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().doubleType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_LONG_FLOAT_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().and().longType().endUnion().noDefault() //
.endRecord();
private static final Schema UNION_FLOAT_DOUBLE_RECORD = SchemaBuilder.record(RECORD_A) //
.fields() //
.name(FIELD_A).type().unionOf().floatType().and().doubleType().endUnion().noDefault() //
.endRecord();
enum EncoderType {
BINARY, JSON
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void doubleWrittenWithUnionSchemaIsConvertedToDoubleSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_LONG_FLOAT_DOUBLE_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42.0);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(DOUBLE_RECORD, writer, encoded, encoderType);
assertEquals(42.0, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsConvertedToUnionLongFloatSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_LONG_FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42L, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsConvertedToDoubleSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_DOUBLE_RECORD, writer, encoded, encoderType);
assertEquals(42.0, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void intWrittenWithUnionSchemaIsConvertedToDoubleSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_DOUBLE_RECORD, writer, encoded, encoderType);
assertEquals(42.0, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void intWrittenWithUnionSchemaIsReadableByFloatSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42.0f, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void intWrittenWithUnionSchemaIsReadableByFloatUnionSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42.0f, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsReadableByFloatSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42.0f, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsReadableByFloatUnionSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42.0f, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsConvertedToLongFloatUnionSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_LONG_FLOAT_RECORD, writer, encoded, encoderType);
assertEquals(42L, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsConvertedToFloatDoubleUnionSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_LONG_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(UNION_FLOAT_DOUBLE_RECORD, writer, encoded, encoderType);
assertEquals(42.0F, decoded.get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void doubleWrittenWithUnionSchemaIsNotConvertedToFloatSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_LONG_FLOAT_DOUBLE_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42.0);
byte[] encoded = encodeGenericBlob(record, encoderType);
AvroTypeException exception = Assertions.assertThrows(AvroTypeException.class,
() -> decodeGenericBlob(FLOAT_RECORD, writer, encoded, encoderType));
Assertions.assertEquals("Found double, expecting float", exception.getMessage());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void floatWrittenWithUnionSchemaIsNotConvertedToLongSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_LONG_FLOAT_DOUBLE_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42.0f);
byte[] encoded = encodeGenericBlob(record, encoderType);
AvroTypeException exception = Assertions.assertThrows(AvroTypeException.class,
() -> decodeGenericBlob(LONG_RECORD, writer, encoded, encoderType));
Assertions.assertEquals("Found float, expecting long", exception.getMessage());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void longWrittenWithUnionSchemaIsNotConvertedToIntSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_LONG_FLOAT_DOUBLE_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42L);
byte[] encoded = encodeGenericBlob(record, encoderType);
AvroTypeException exception = Assertions.assertThrows(AvroTypeException.class,
() -> decodeGenericBlob(INT_RECORD, writer, encoded, encoderType));
Assertions.assertEquals("Found long, expecting int", exception.getMessage());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void intWrittenWithUnionSchemaIsConvertedToAllNumberSchemas(EncoderType encoderType) throws Exception {
Schema writer = UNION_INT_LONG_FLOAT_DOUBLE_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
assertEquals(42.0, decodeGenericBlob(DOUBLE_RECORD, writer, encoded, encoderType).get(FIELD_A));
assertEquals(42.0f, decodeGenericBlob(FLOAT_RECORD, writer, encoded, encoderType).get(FIELD_A));
assertEquals(42L, decodeGenericBlob(LONG_RECORD, writer, encoded, encoderType).get(FIELD_A));
assertEquals(42, decodeGenericBlob(INT_RECORD, writer, encoded, encoderType).get(FIELD_A));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void asciiStringWrittenWithUnionSchemaIsConvertedToBytesSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_STRING_BYTES_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, "42");
byte[] encoded = encodeGenericBlob(record, encoderType);
ByteBuffer actual = (ByteBuffer) decodeGenericBlob(BYTES_RECORD, writer, encoded, encoderType).get(FIELD_A);
assertArrayEquals("42".getBytes(StandardCharsets.UTF_8), actual.array());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void utf8StringWrittenWithUnionSchemaIsConvertedToBytesSchema(EncoderType encoderType) throws Exception {
String goeran = String.format("G%sran", LATIN_SMALL_LETTER_O_WITH_DIARESIS);
Schema writer = UNION_STRING_BYTES_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, goeran);
byte[] encoded = encodeGenericBlob(record, encoderType);
ByteBuffer actual = (ByteBuffer) decodeGenericBlob(BYTES_RECORD, writer, encoded, encoderType).get(FIELD_A);
assertArrayEquals(goeran.getBytes(StandardCharsets.UTF_8), actual.array());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void asciiBytesWrittenWithUnionSchemaIsConvertedToStringSchema(EncoderType encoderType) throws Exception {
Schema writer = UNION_STRING_BYTES_RECORD;
ByteBuffer buf = ByteBuffer.wrap("42".getBytes(StandardCharsets.UTF_8));
Record record = defaultRecordWithSchema(writer, FIELD_A, buf);
byte[] encoded = encodeGenericBlob(record, encoderType);
CharSequence read = (CharSequence) decodeGenericBlob(STRING_RECORD, writer, encoded, encoderType).get(FIELD_A);
assertEquals("42", read.toString());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void utf8BytesWrittenWithUnionSchemaIsConvertedToStringSchema(EncoderType encoderType) throws Exception {
String goeran = String.format("G%sran", LATIN_SMALL_LETTER_O_WITH_DIARESIS);
Schema writer = UNION_STRING_BYTES_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, goeran);
byte[] encoded = encodeGenericBlob(record, encoderType);
CharSequence read = (CharSequence) decodeGenericBlob(STRING_RECORD, writer, encoded, encoderType).get(FIELD_A);
assertEquals(goeran, read.toString());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void enumRecordCanBeReadWithExtendedEnumSchema(EncoderType encoderType) throws Exception {
Schema writer = ENUM_AB_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, new EnumSymbol(ENUM_AB, "A"));
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(ENUM_ABC_RECORD, writer, encoded, encoderType);
assertEquals("A", decoded.get(FIELD_A).toString());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void enumRecordWithExtendedSchemaCanBeReadWithOriginalEnumSchemaIfOnlyOldValues(EncoderType encoderType)
throws Exception {
Schema writer = ENUM_ABC_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, new EnumSymbol(ENUM_ABC, "A"));
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(ENUM_AB_RECORD, writer, encoded, encoderType);
assertEquals("A", decoded.get(FIELD_A).toString());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void enumRecordWithExtendedSchemaCanNotBeReadIfNewValuesAreUsed(EncoderType encoderType) throws Exception {
Schema writer = ENUM_ABC_RECORD;
Record record = defaultRecordWithSchema(writer, FIELD_A, new EnumSymbol(ENUM_ABC, "C"));
byte[] encoded = encodeGenericBlob(record, encoderType);
AvroTypeException exception = Assertions.assertThrows(AvroTypeException.class,
() -> decodeGenericBlob(ENUM_AB_RECORD, writer, encoded, encoderType));
Assertions.assertEquals("No match for C", exception.getMessage());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void recordWrittenWithExtendedSchemaCanBeReadWithOriginalSchemaButLossOfData(EncoderType encoderType)
throws Exception {
Schema writer = SchemaBuilder.record(RECORD_A) //
.fields() //
.name("newTopField").type().stringType().noDefault() //
.name(FIELD_A).type().intType().noDefault() //
.endRecord();
Record record = defaultRecordWithSchema(writer, FIELD_A, 42);
record.put("newTopField", "not decoded");
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(INT_RECORD, writer, encoded, encoderType);
assertEquals(42, decoded.get(FIELD_A));
try {
decoded.get("newTopField");
Assertions.fail("get should throw a exception");
} catch (AvroRuntimeException ex) {
Assertions.assertEquals("Not a valid schema field: newTopField", ex.getMessage());
}
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void readerWithoutDefaultValueThrowsException(EncoderType encoderType) throws Exception {
Schema reader = SchemaBuilder.record(RECORD_A) //
.fields() //
.name("newField").type().intType().noDefault() //
.name(FIELD_A).type().intType().noDefault() //
.endRecord();
Record record = defaultRecordWithSchema(INT_RECORD, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
AvroTypeException exception = Assertions.assertThrows(AvroTypeException.class,
() -> decodeGenericBlob(reader, INT_RECORD, encoded, encoderType));
Assertions.assertTrue(exception.getMessage().contains("missing required field newField"), exception.getMessage());
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void readerWithDefaultValueIsApplied(EncoderType encoderType) throws Exception {
Schema reader = SchemaBuilder.record(RECORD_A) //
.fields() //
.name("newFieldWithDefault").type().intType().intDefault(314) //
.name(FIELD_A).type().intType().noDefault() //
.endRecord();
Record record = defaultRecordWithSchema(INT_RECORD, FIELD_A, 42);
byte[] encoded = encodeGenericBlob(record, encoderType);
Record decoded = decodeGenericBlob(reader, INT_RECORD, encoded, encoderType);
assertEquals(42, decoded.get(FIELD_A));
assertEquals(314, decoded.get("newFieldWithDefault"));
}
@ParameterizedTest
@EnumSource(EncoderType.class)
void aliasesInSchema(EncoderType encoderType) throws Exception {
Schema writer = new Schema.Parser()
.parse("{\"namespace\": \"example.avro\", \"type\": \"record\", \"name\": \"User\", \"fields\": ["
+ "{\"name\": \"name\", \"type\": \"int\"}\n" + "]}\n");
Schema reader = new Schema.Parser()
.parse("{\"namespace\": \"example.avro\", \"type\": \"record\", \"name\": \"User\", \"fields\": ["
+ "{\"name\": \"fname\", \"type\": \"int\", \"aliases\" : [ \"name\" ]}\n" + "]}\n");
GenericData.Record record = defaultRecordWithSchema(writer, "name", 1);
byte[] encoded = encodeGenericBlob(record, encoderType);
GenericData.Record decoded = decodeGenericBlob(reader, reader, encoded, encoderType);
assertEquals(1, decoded.get("fname"));
}
private <T> Record defaultRecordWithSchema(Schema schema, String key, T value) {
Record data = new GenericData.Record(schema);
data.put(key, value);
return data;
}
private byte[] encodeGenericBlob(GenericRecord data, EncoderType encoderType) throws IOException {
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(data.getSchema());
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
Encoder encoder = encoderType == EncoderType.BINARY ? EncoderFactory.get().binaryEncoder(outStream, null)
: EncoderFactory.get().jsonEncoder(data.getSchema(), outStream);
writer.write(data, encoder);
encoder.flush();
outStream.close();
return outStream.toByteArray();
}
private Record decodeGenericBlob(Schema expectedSchema, Schema schemaOfBlob, byte[] blob, EncoderType encoderType)
throws IOException {
if (blob == null) {
return null;
}
GenericDatumReader<Record> reader = new GenericDatumReader<>();
reader.setExpected(expectedSchema);
reader.setSchema(schemaOfBlob);
Decoder decoder = encoderType == EncoderType.BINARY ? DecoderFactory.get().binaryDecoder(blob, null)
: DecoderFactory.get().jsonDecoder(schemaOfBlob, new ByteArrayInputStream(blob));
return reader.read(null, decoder);
}
}
| 7,144 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import com.fasterxml.jackson.databind.node.NullNode;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema.Field.Order;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecordBuilder;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestSchemaBuilder {
@TempDir
public File DIR;
@Test
void record() {
Schema schema = SchemaBuilder.record("myrecord").namespace("org.example").aliases("oldrecord").fields().name("f0")
.aliases("f0alias").type().stringType().noDefault().name("f1").doc("This is f1").type().longType().noDefault()
.name("f2").type().nullable().booleanType().booleanDefault(true).name("f3").type().unionOf().nullType().and()
.booleanType().endUnion().nullDefault().endRecord();
assertEquals("myrecord", schema.getName());
assertEquals("org.example", schema.getNamespace());
assertEquals("org.example.oldrecord", schema.getAliases().iterator().next());
assertFalse(schema.isError());
List<Schema.Field> fields = schema.getFields();
assertEquals(4, fields.size());
assertEquals(new Schema.Field("f0", Schema.create(Schema.Type.STRING)), fields.get(0));
assertTrue(fields.get(0).aliases().contains("f0alias"));
assertEquals(new Schema.Field("f1", Schema.create(Schema.Type.LONG), "This is f1"), fields.get(1));
List<Schema> types = new ArrayList<>();
types.add(Schema.create(Schema.Type.BOOLEAN));
types.add(Schema.create(Schema.Type.NULL));
Schema optional = Schema.createUnion(types);
assertEquals(new Schema.Field("f2", optional, null, true), fields.get(2));
List<Schema> types2 = new ArrayList<>();
types2.add(Schema.create(Schema.Type.NULL));
types2.add(Schema.create(Schema.Type.BOOLEAN));
Schema optional2 = Schema.createUnion(types2);
assertNotEquals(new Schema.Field("f3", optional2, null, (Object) null), fields.get(3));
assertEquals(new Schema.Field("f3", optional2, null, Schema.Field.NULL_DEFAULT_VALUE), fields.get(3));
}
@Test
void doc() {
Schema s = SchemaBuilder.fixed("myfixed").doc("mydoc").size(1);
assertEquals("mydoc", s.getDoc());
}
@Test
void props() {
Schema s = SchemaBuilder.builder().intBuilder().prop("p1", "v1").prop("p2", "v2").prop("p2", "v2real") // overwrite
.endInt();
int size = s.getObjectProps().size();
assertEquals(2, size);
assertEquals("v1", s.getProp("p1"));
assertEquals("v2real", s.getProp("p2"));
}
@Test
void objectProps() {
Schema s = SchemaBuilder.builder().intBuilder().prop("booleanProp", true).prop("intProp", Integer.MAX_VALUE)
.prop("longProp", Long.MAX_VALUE).prop("floatProp", 1.0f).prop("doubleProp", Double.MAX_VALUE)
.prop("byteProp", new byte[] { 0x41, 0x42, 0x43 }).prop("stringProp", "abc").endInt();
// object properties
assertEquals(7, s.getObjectProps().size());
assertTrue(s.getObjectProp("booleanProp") instanceof Boolean);
assertEquals(true, s.getObjectProp("booleanProp"));
assertTrue(s.getObjectProp("intProp") instanceof Integer);
assertEquals(Integer.MAX_VALUE, s.getObjectProp("intProp"));
assertTrue(s.getObjectProp("intProp") instanceof Integer);
assertTrue(s.getObjectProp("longProp") instanceof Long);
assertEquals(Long.MAX_VALUE, s.getObjectProp("longProp"));
assertTrue(s.getObjectProp("floatProp") instanceof Float);
// float converts to double
assertEquals(1.0f, s.getObjectProp("floatProp"));
assertTrue(s.getObjectProp("doubleProp") instanceof Double);
assertEquals(Double.MAX_VALUE, s.getObjectProp("doubleProp"));
// byte[] converts to string
assertTrue(s.getObjectProp("byteProp") instanceof byte[]);
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) s.getObjectProp("byteProp"));
assertTrue(s.getObjectProp("stringProp") instanceof String);
assertEquals("abc", s.getObjectProp("stringProp"));
}
@Test
void fieldObjectProps() {
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("booleanProp", true)
.prop("intProp", Integer.MAX_VALUE).prop("longProp", Long.MAX_VALUE).prop("floatProp", 1.0f)
.prop("doubleProp", Double.MAX_VALUE).prop("byteProp", new byte[] { 0x41, 0x42, 0x43 })
.prop("stringProp", "abc").type().intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertEquals(7, f.getObjectProps().size());
assertTrue(f.getObjectProp("booleanProp") instanceof Boolean);
assertEquals(true, f.getObjectProp("booleanProp"));
assertTrue(f.getObjectProp("intProp") instanceof Integer);
assertEquals(Integer.MAX_VALUE, f.getObjectProp("intProp"));
assertTrue(f.getObjectProp("intProp") instanceof Integer);
assertTrue(f.getObjectProp("longProp") instanceof Long);
assertEquals(Long.MAX_VALUE, f.getObjectProp("longProp"));
assertTrue(f.getObjectProp("floatProp") instanceof Float);
// float converts to double
assertEquals(1.0f, f.getObjectProp("floatProp"));
assertTrue(f.getObjectProp("doubleProp") instanceof Double);
assertEquals(Double.MAX_VALUE, f.getObjectProp("doubleProp"));
// byte[] converts to string
assertTrue(f.getObjectProp("byteProp") instanceof byte[]);
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) f.getObjectProp("byteProp"));
assertTrue(f.getObjectProp("stringProp") instanceof String);
assertEquals("abc", f.getObjectProp("stringProp"));
assertEquals("abc", f.getObjectProp("stringProp", "default"));
assertEquals("default", f.getObjectProp("unknwon", "default"));
}
@Test
void arrayObjectProp() {
List<Object> values = new ArrayList<>();
values.add(true);
values.add(Integer.MAX_VALUE);
values.add(Long.MAX_VALUE);
values.add(1.0f);
values.add(Double.MAX_VALUE);
values.add(new byte[] { 0x41, 0x42, 0x43 });
values.add("abc");
Schema s = SchemaBuilder.builder().intBuilder().prop("arrayProp", values).endInt();
// object properties
assertEquals(1, s.getObjectProps().size());
assertTrue(s.getObjectProp("arrayProp") instanceof Collection);
@SuppressWarnings("unchecked")
Collection<Object> valueCollection = (Collection<Object>) s.getObjectProp("arrayProp");
Iterator<Object> iter = valueCollection.iterator();
assertEquals(7, valueCollection.size());
assertEquals(true, iter.next());
assertEquals(Integer.MAX_VALUE, iter.next());
assertEquals(Long.MAX_VALUE, iter.next());
assertEquals(1.0f, iter.next());
assertEquals(Double.MAX_VALUE, iter.next());
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) iter.next());
assertEquals("abc", iter.next());
}
@Test
void fieldArrayObjectProp() {
List<Object> values = new ArrayList<>();
values.add(true);
values.add(Integer.MAX_VALUE);
values.add(Long.MAX_VALUE);
values.add(1.0f);
values.add(Double.MAX_VALUE);
values.add(new byte[] { 0x41, 0x42, 0x43 });
values.add("abc");
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("arrayProp", values).type()
.intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertEquals(1, f.getObjectProps().size());
assertTrue(f.getObjectProp("arrayProp") instanceof Collection);
@SuppressWarnings("unchecked")
Collection<Object> valueCollection = (Collection<Object>) f.getObjectProp("arrayProp");
Iterator<Object> iter = valueCollection.iterator();
assertEquals(7, valueCollection.size());
assertEquals(true, iter.next());
assertEquals(Integer.MAX_VALUE, iter.next());
assertEquals(Long.MAX_VALUE, iter.next());
assertEquals(1.0f, iter.next());
assertEquals(Double.MAX_VALUE, iter.next());
assertArrayEquals(new byte[] { 0x41, 0x42, 0x43 }, (byte[]) iter.next());
assertEquals("abc", iter.next());
}
@Test
void mapObjectProp() {
Map<String, Object> values = new HashMap<>();
values.put("booleanKey", true);
values.put("intKey", Integer.MAX_VALUE);
values.put("longKey", Long.MAX_VALUE);
values.put("floatKey", 1.0f);
values.put("doubleKey", Double.MAX_VALUE);
values.put("byteKey", new byte[] { 0x41, 0x42, 0x43 });
values.put("stringKey", "abc");
Schema s = SchemaBuilder.builder().intBuilder().prop("mapProp", values).endInt();
// object properties
assertTrue(s.getObjectProp("mapProp") instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> valueMap = (Map<String, Object>) s.getObjectProp("mapProp");
assertEquals(values.size(), valueMap.size());
assertTrue(valueMap.get("booleanKey") instanceof Boolean);
assertEquals(true, valueMap.get("booleanKey"));
assertTrue(valueMap.get("intKey") instanceof Integer);
assertEquals(Integer.MAX_VALUE, valueMap.get("intKey"));
assertTrue(valueMap.get("longKey") instanceof Long);
assertEquals(Long.MAX_VALUE, valueMap.get("longKey"));
assertTrue(valueMap.get("floatKey") instanceof Float);
assertEquals(1.0f, valueMap.get("floatKey"));
assertTrue(valueMap.get("doubleKey") instanceof Double);
assertEquals(Double.MAX_VALUE, valueMap.get("doubleKey"));
assertTrue(valueMap.get("byteKey") instanceof byte[]);
assertArrayEquals("ABC".getBytes(StandardCharsets.UTF_8), (byte[]) valueMap.get("byteKey"));
assertTrue(valueMap.get("stringKey") instanceof String);
assertEquals("abc", valueMap.get("stringKey"));
}
@Test
void fieldMapObjectProp() {
Map<String, Object> values = new HashMap<>();
values.put("booleanKey", true);
values.put("intKey", Integer.MAX_VALUE);
values.put("longKey", Long.MAX_VALUE);
values.put("floatKey", 1.0f);
values.put("doubleKey", Double.MAX_VALUE);
values.put("byteKey", new byte[] { 0x41, 0x42, 0x43 });
values.put("stringKey", "abc");
Schema s = SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("mapProp", values).type()
.intType().noDefault().endRecord();
Schema.Field f = s.getField("myField");
// object properties
assertTrue(f.getObjectProp("mapProp") instanceof Map);
@SuppressWarnings("unchecked")
Map<String, Object> valueMap = (Map<String, Object>) f.getObjectProp("mapProp");
assertEquals(values.size(), valueMap.size());
assertTrue(valueMap.get("booleanKey") instanceof Boolean);
assertEquals(true, valueMap.get("booleanKey"));
assertTrue(valueMap.get("intKey") instanceof Integer);
assertEquals(Integer.MAX_VALUE, valueMap.get("intKey"));
assertTrue(valueMap.get("longKey") instanceof Long);
assertEquals(Long.MAX_VALUE, valueMap.get("longKey"));
assertTrue(valueMap.get("floatKey") instanceof Float);
assertEquals(1.0f, valueMap.get("floatKey"));
assertTrue(valueMap.get("doubleKey") instanceof Double);
assertEquals(Double.MAX_VALUE, valueMap.get("doubleKey"));
assertTrue(valueMap.get("byteKey") instanceof byte[]);
assertEquals("ABC", new String((byte[]) valueMap.get("byteKey")));
assertTrue(valueMap.get("stringKey") instanceof String);
assertEquals("abc", valueMap.get("stringKey"));
}
@Test
void nullObjectProp() {
assertThrows(AvroRuntimeException.class, () -> {
SchemaBuilder.builder().intBuilder().prop("nullProp", (Object) null).endInt();
});
}
@Test
void fieldNullObjectProp() {
assertThrows(AvroRuntimeException.class, () -> {
SchemaBuilder.builder().record("MyRecord").fields().name("myField").prop("nullProp", (Object) null).type()
.intType().noDefault().endRecord();
});
}
@Test
void namespaces() {
Schema s1 = SchemaBuilder.record("myrecord").namespace("org.example").fields().name("myint").type().intType()
.noDefault().endRecord();
Schema s2 = SchemaBuilder.record("org.example.myrecord").fields().name("myint").type().intType().noDefault()
.endRecord();
Schema s3 = SchemaBuilder.record("org.example.myrecord").namespace("org.example2").fields().name("myint").type()
.intType().noDefault().endRecord();
Schema s4 = SchemaBuilder.builder("org.example").record("myrecord").fields().name("myint").type().intType()
.noDefault().endRecord();
assertEquals("myrecord", s1.getName());
assertEquals("myrecord", s2.getName());
assertEquals("myrecord", s3.getName());
assertEquals("myrecord", s4.getName());
assertEquals("org.example", s1.getNamespace());
assertEquals("org.example", s2.getNamespace());
assertEquals("org.example", s3.getNamespace()); // namespace call is ignored
assertEquals("org.example", s4.getNamespace());
assertEquals("org.example.myrecord", s1.getFullName());
assertEquals("org.example.myrecord", s2.getFullName());
assertEquals("org.example.myrecord", s3.getFullName());
assertEquals("org.example.myrecord", s4.getFullName());
}
@Test
void missingRecordName() {
assertThrows(NullPointerException.class, () -> {
SchemaBuilder.record(null).fields() // null name
.name("f0").type().stringType().noDefault().endRecord();
});
}
@Test
void testBoolean() {
Schema.Type type = Schema.Type.BOOLEAN;
Schema simple = SchemaBuilder.builder().booleanType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().booleanBuilder().prop("p", "v").endBoolean();
assertEquals(expected, built1);
}
@Test
void testInt() {
Schema.Type type = Schema.Type.INT;
Schema simple = SchemaBuilder.builder().intType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().intBuilder().prop("p", "v").endInt();
assertEquals(expected, built1);
}
@Test
void testLong() {
Schema.Type type = Schema.Type.LONG;
Schema simple = SchemaBuilder.builder().longType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().longBuilder().prop("p", "v").endLong();
assertEquals(expected, built1);
}
@Test
void testFloat() {
Schema.Type type = Schema.Type.FLOAT;
Schema simple = SchemaBuilder.builder().floatType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().floatBuilder().prop("p", "v").endFloat();
assertEquals(expected, built1);
}
@Test
void duble() {
Schema.Type type = Schema.Type.DOUBLE;
Schema simple = SchemaBuilder.builder().doubleType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().doubleBuilder().prop("p", "v").endDouble();
assertEquals(expected, built1);
}
@Test
void string() {
Schema.Type type = Schema.Type.STRING;
Schema simple = SchemaBuilder.builder().stringType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().stringBuilder().prop("p", "v").endString();
assertEquals(expected, built1);
}
@Test
void bytes() {
Schema.Type type = Schema.Type.BYTES;
Schema simple = SchemaBuilder.builder().bytesType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().bytesBuilder().prop("p", "v").endBytes();
assertEquals(expected, built1);
}
@Test
void testNull() {
Schema.Type type = Schema.Type.NULL;
Schema simple = SchemaBuilder.builder().nullType();
Schema expected = primitive(type, simple);
Schema built1 = SchemaBuilder.builder().nullBuilder().prop("p", "v").endNull();
assertEquals(expected, built1);
}
private Schema primitive(Schema.Type type, Schema bare) {
// test creation of bare schema by name
Schema bareByName = SchemaBuilder.builder().type(type.getName());
assertEquals(Schema.create(type), bareByName);
assertEquals(bareByName, bare);
// return a schema with custom prop set
Schema p = Schema.create(type);
p.addProp("p", "v");
return p;
}
// @Test
// public void testError() {
// Schema schema = SchemaBuilder
// .errorType("myerror")
// .requiredString("message")
// .build();
//
// Assert.assertEquals("myerror", schema.getName());
// Assert.assertTrue(schema.isError());
// }
@Test
void recursiveRecord() {
Schema schema = SchemaBuilder.record("LongList").fields().name("value").type().longType().noDefault().name("next")
.type().optional().type("LongList").endRecord();
assertEquals("LongList", schema.getName());
List<Schema.Field> fields = schema.getFields();
assertEquals(2, fields.size());
assertEquals(new Schema.Field("value", Schema.create(Schema.Type.LONG), null), fields.get(0));
assertEquals(Schema.Type.UNION, fields.get(1).schema().getType());
assertEquals(Schema.Type.NULL, fields.get(1).schema().getTypes().get(0).getType());
Schema recordSchema = fields.get(1).schema().getTypes().get(1);
assertEquals(Schema.Type.RECORD, recordSchema.getType());
assertEquals("LongList", recordSchema.getName());
assertEquals(NullNode.getInstance(), fields.get(1).defaultValue());
}
@Test
void testEnum() {
List<String> symbols = Arrays.asList("a", "b");
Schema expected = Schema.createEnum("myenum", null, null, symbols);
expected.addProp("p", "v");
Schema schema = SchemaBuilder.enumeration("myenum").prop("p", "v").symbols("a", "b");
assertEquals(expected, schema);
}
@Test
void enumWithDefault() {
List<String> symbols = Arrays.asList("a", "b");
String enumDefault = "a";
Schema expected = Schema.createEnum("myenum", null, null, symbols, enumDefault);
expected.addProp("p", "v");
Schema schema = SchemaBuilder.enumeration("myenum").prop("p", "v").defaultSymbol(enumDefault).symbols("a", "b");
assertEquals(expected, schema);
}
@Test
void fixed() {
Schema expected = Schema.createFixed("myfixed", null, null, 16);
expected.addAlias("myOldFixed");
Schema schema = SchemaBuilder.fixed("myfixed").aliases("myOldFixed").size(16);
assertEquals(expected, schema);
}
@Test
void array() {
Schema longSchema = Schema.create(Schema.Type.LONG);
Schema expected = Schema.createArray(longSchema);
Schema schema1 = SchemaBuilder.array().items().longType();
assertEquals(expected, schema1);
Schema schema2 = SchemaBuilder.array().items(longSchema);
assertEquals(expected, schema2);
Schema schema3 = SchemaBuilder.array().prop("p", "v").items().type("long");
expected.addProp("p", "v");
assertEquals(expected, schema3);
}
@Test
void map() {
Schema intSchema = Schema.create(Schema.Type.INT);
Schema expected = Schema.createMap(intSchema);
Schema schema1 = SchemaBuilder.map().values().intType();
assertEquals(expected, schema1);
Schema schema2 = SchemaBuilder.map().values(intSchema);
assertEquals(expected, schema2);
Schema schema3 = SchemaBuilder.map().prop("p", "v").values().type("int");
expected.addProp("p", "v");
assertEquals(expected, schema3);
}
@Test
void unionAndNullable() {
List<Schema> types = new ArrayList<>();
types.add(Schema.create(Schema.Type.LONG));
types.add(Schema.create(Schema.Type.NULL));
Schema expected = Schema.createUnion(types);
Schema schema = SchemaBuilder.unionOf().longType().and().nullType().endUnion();
assertEquals(expected, schema);
schema = SchemaBuilder.nullable().longType();
assertEquals(expected, schema);
}
@Test
void fields() {
Schema rec = SchemaBuilder.record("Rec").fields().name("documented").doc("documented").type().nullType().noDefault()
.name("ascending").orderAscending().type().booleanType().noDefault().name("descending").orderDescending().type()
.floatType().noDefault().name("ignored").orderIgnore().type().doubleType().noDefault().name("aliased")
.aliases("anAlias").type().stringType().noDefault().endRecord();
assertEquals("documented", rec.getField("documented").doc());
assertEquals(Order.ASCENDING, rec.getField("ascending").order());
assertEquals(Order.DESCENDING, rec.getField("descending").order());
assertEquals(Order.IGNORE, rec.getField("ignored").order());
assertTrue(rec.getField("aliased").aliases().contains("anAlias"));
}
@Test
void fieldShortcuts() {
Schema full = SchemaBuilder.record("Blah").fields().name("rbool").type().booleanType().noDefault().name("obool")
.type().optional().booleanType().name("nbool").type().nullable().booleanType().booleanDefault(true).name("rint")
.type().intType().noDefault().name("oint").type().optional().intType().name("nint").type().nullable().intType()
.intDefault(1).name("rlong").type().longType().noDefault().name("olong").type().optional().longType()
.name("nlong").type().nullable().longType().longDefault(2L).name("rfloat").type().floatType().noDefault()
.name("ofloat").type().optional().floatType().name("nfloat").type().nullable().floatType().floatDefault(-1.1f)
.name("rdouble").type().doubleType().noDefault().name("odouble").type().optional().doubleType().name("ndouble")
.type().nullable().doubleType().doubleDefault(99.9d).name("rstring").type().stringType().noDefault()
.name("ostring").type().optional().stringType().name("nstring").type().nullable().stringType()
.stringDefault("def").name("rbytes").type().bytesType().noDefault().name("obytes").type().optional().bytesType()
.name("nbytes").type().nullable().bytesType().bytesDefault(new byte[] { 1, 2, 3 }).endRecord();
Schema shortcut = SchemaBuilder.record("Blah").fields().requiredBoolean("rbool").optionalBoolean("obool")
.nullableBoolean("nbool", true).requiredInt("rint").optionalInt("oint").nullableInt("nint", 1)
.requiredLong("rlong").optionalLong("olong").nullableLong("nlong", 2L).requiredFloat("rfloat")
.optionalFloat("ofloat").nullableFloat("nfloat", -1.1f).requiredDouble("rdouble").optionalDouble("odouble")
.nullableDouble("ndouble", 99.9d).requiredString("rstring").optionalString("ostring")
.nullableString("nstring", "def").requiredBytes("rbytes").optionalBytes("obytes")
.nullableBytes("nbytes", new byte[] { 1, 2, 3 }).endRecord();
assertEquals(full, shortcut);
}
@Test
void names() {
// no contextual namespace
Schema r = SchemaBuilder.record("Rec").fields().name("f0").type().fixed("org.foo.MyFixed").size(1).noDefault()
.name("f1").type("org.foo.MyFixed").noDefault().name("f2").type("org.foo.MyFixed", "").noDefault().name("f3")
.type("org.foo.MyFixed", null).noDefault().name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
.name("f5").type("MyFixed", "org.foo").noDefault().endRecord();
Schema expected = Schema.createFixed("org.foo.MyFixed", null, null, 1);
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
// context namespace
Schema f = SchemaBuilder.builder("").fixed("Foo").size(1);
assertEquals(Schema.createFixed("Foo", null, null, 1), f);
// context namespace from record matches
r = SchemaBuilder.record("Rec").namespace("org.foo").fields().name("f0").type().fixed("MyFixed").size(1).noDefault()
.name("f1").type("org.foo.MyFixed").noDefault().name("f2").type("org.foo.MyFixed", "").noDefault().name("f3")
.type("org.foo.MyFixed", null).noDefault().name("f4").type("org.foo.MyFixed", "ignorethis").noDefault()
.name("f5").type("MyFixed", "org.foo").noDefault().name("f6").type("MyFixed", null).noDefault().name("f7")
.type("MyFixed").noDefault().endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
checkField(r, expected, "f6");
checkField(r, expected, "f7");
// context namespace from record does not match
r = SchemaBuilder.record("Rec").namespace("org.rec").fields().name("f0").type().fixed("MyFixed")
.namespace("org.foo").size(1).noDefault().name("f1").type("org.foo.MyFixed").noDefault().name("f2")
.type("org.foo.MyFixed", "").noDefault().name("f3").type("org.foo.MyFixed", null).noDefault().name("f4")
.type("org.foo.MyFixed", "ignorethis").noDefault().name("f5").type("MyFixed", "org.foo").noDefault()
.endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
checkField(r, expected, "f2");
checkField(r, expected, "f3");
checkField(r, expected, "f4");
checkField(r, expected, "f5");
// context namespace from record, nested has no namespace
expected = Schema.createFixed("MyFixed", null, null, 1);
r = SchemaBuilder.record("Rec").namespace("org.rec").fields().name("f0").type().fixed("MyFixed").namespace("")
.size(1).noDefault().name("f1").type("MyFixed", "").noDefault().endRecord();
checkField(r, expected, "f0");
checkField(r, expected, "f1");
// mimic names of primitives, but with a namesapce. This is OK
SchemaBuilder.fixed("org.test.long").size(1);
SchemaBuilder.fixed("long").namespace("org.test").size(1);
SchemaBuilder.builder("org.test").fixed("long").size(1);
}
private void checkField(Schema r, Schema expected, String name) {
assertEquals(expected, r.getField(name).schema());
}
@Test
void namesFailRedefined() {
assertThrows(SchemaParseException.class, () -> {
SchemaBuilder.record("Rec").fields().name("f0").type().enumeration("MyEnum").symbols("A", "B").enumDefault("A")
.name("f1").type().enumeration("MyEnum").symbols("X", "Y").noDefault().endRecord();
});
}
@Test
void namesFailAbsent() {
assertThrows(SchemaParseException.class, () -> {
SchemaBuilder.builder().type("notdefined");
});
}
@Test
void nameReserved() {
assertThrows(AvroTypeException.class, () -> {
SchemaBuilder.fixed("long").namespace("").size(1);
});
}
@Test
void fieldTypesAndDefaultValues() {
byte[] bytedef = new byte[] { 3 };
ByteBuffer bufdef = ByteBuffer.wrap(bytedef);
String strdef = "\u0003";
HashMap<String, String> mapdef = new HashMap<>();
mapdef.put("a", "A");
List<String> arrdef = Collections.singletonList("arr");
Schema rec = SchemaBuilder.record("inner").fields().name("f").type().intType().noDefault().endRecord();
Schema rec2 = SchemaBuilder.record("inner2").fields().name("f2").type().intType().noDefault().endRecord();
GenericData.Record recdef = new GenericRecordBuilder(rec).set("f", 1).build();
GenericData.Record recdef2 = new GenericRecordBuilder(rec2).set("f2", 2).build();
Schema r = SchemaBuilder.record("r").fields().name("boolF").type().booleanType().booleanDefault(false).name("intF")
.type().intType().intDefault(1).name("longF").type().longType().longDefault(2L).name("floatF").type()
.floatType().floatDefault(3.0f).name("doubleF").type().doubleType().doubleDefault(4.0d).name("stringF").type()
.stringType().stringDefault("def").name("bytesF1").type().bytesType().bytesDefault(bytedef).name("bytesF2")
.type().bytesType().bytesDefault(bufdef).name("bytesF3").type().bytesType().bytesDefault(strdef).name("nullF")
.type().nullType().nullDefault().name("fixedF1").type().fixed("F1").size(1).fixedDefault(bytedef)
.name("fixedF2").type().fixed("F2").size(1).fixedDefault(bufdef).name("fixedF3").type().fixed("F3").size(1)
.fixedDefault(strdef).name("enumF").type().enumeration("E1").symbols("S").enumDefault("S").name("mapF").type()
.map().values().stringType().mapDefault(mapdef).name("arrayF").type().array().items().stringType()
.arrayDefault(arrdef).name("recordF").type().record("inner").fields().name("f").type().intType().noDefault()
.endRecord().recordDefault(recdef).name("byName").type("E1").withDefault("S")
// union builders, one for each 'first type' in a union:
.name("boolU").type().unionOf().booleanType().and().intType().endUnion().booleanDefault(false).name("intU")
.type().unionOf().intType().and().longType().endUnion().intDefault(1).name("longU").type().unionOf().longType()
.and().intType().endUnion().longDefault(2L).name("floatU").type().unionOf().floatType().and().intType()
.endUnion().floatDefault(3.0f).name("doubleU").type().unionOf().doubleType().and().intType().endUnion()
.doubleDefault(4.0d).name("stringU").type().unionOf().stringType().and().intType().endUnion()
.stringDefault("def").name("bytesU").type().unionOf().bytesType().and().intType().endUnion()
.bytesDefault(bytedef).name("nullU").type().unionOf().nullType().and().intType().endUnion().nullDefault()
.name("fixedU").type().unionOf().fixed("F4").size(1).and().intType().endUnion().fixedDefault(bytedef)
.name("enumU").type().unionOf().enumeration("E2").symbols("SS").and().intType().endUnion().enumDefault("SS")
.name("mapU").type().unionOf().map().values().stringType().and().intType().endUnion().mapDefault(mapdef)
.name("arrayU").type().unionOf().array().items().stringType().and().intType().endUnion().arrayDefault(arrdef)
.name("recordU").type().unionOf().record("inner2").fields().name("f2").type().intType().noDefault().endRecord()
.and().intType().endUnion().recordDefault(recdef2).endRecord();
GenericData.Record newRec = new GenericRecordBuilder(r).build();
assertEquals(false, newRec.get("boolF"));
assertEquals(false, newRec.get("boolU"));
assertEquals(1, newRec.get("intF"));
assertEquals(1, newRec.get("intU"));
assertEquals(2L, newRec.get("longF"));
assertEquals(2L, newRec.get("longU"));
assertEquals(3f, newRec.get("floatF"));
assertEquals(3f, newRec.get("floatU"));
assertEquals(4d, newRec.get("doubleF"));
assertEquals(4d, newRec.get("doubleU"));
assertEquals("def", newRec.get("stringF").toString());
assertEquals("def", newRec.get("stringU").toString());
assertEquals(bufdef, newRec.get("bytesF1"));
assertEquals(bufdef, newRec.get("bytesF2"));
assertEquals(bufdef, newRec.get("bytesF3"));
assertEquals(bufdef, newRec.get("bytesU"));
assertNull(newRec.get("nullF"));
assertNull(newRec.get("nullU"));
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF1")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF2")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedF3")).bytes());
assertArrayEquals(bytedef, ((GenericData.Fixed) newRec.get("fixedU")).bytes());
assertEquals("S", newRec.get("enumF").toString());
assertEquals("SS", newRec.get("enumU").toString());
@SuppressWarnings("unchecked")
Map<CharSequence, CharSequence> map = (Map<CharSequence, CharSequence>) newRec.get("mapF");
assertEquals(mapdef.size(), map.size());
for (Map.Entry<CharSequence, CharSequence> e : map.entrySet()) {
assertEquals(mapdef.get(e.getKey().toString()), e.getValue().toString());
}
assertEquals(newRec.get("mapF"), newRec.get("mapU"));
@SuppressWarnings("unchecked")
GenericData.Array<CharSequence> arr = (GenericData.Array<CharSequence>) newRec.get("arrayF");
assertEquals(arrdef.size(), arr.size());
for (CharSequence c : arr) {
assertTrue(arrdef.contains(c.toString()));
}
assertEquals(newRec.get("arrayF"), newRec.get("arrayU"));
assertEquals(recdef, newRec.get("recordF"));
assertEquals(recdef2, newRec.get("recordU"));
assertEquals("S", newRec.get("byName").toString());
}
@Test
void badDefault() {
assertThrows(SchemaBuilderException.class, () -> {
SchemaBuilder.record("r").fields().name("f").type(Schema.create(Schema.Type.INT)).withDefault(new Object())
.endRecord();
});
}
@Test
void unionFieldBuild() {
SchemaBuilder.record("r").fields().name("allUnion").type().unionOf().booleanType().and().intType().and().longType()
.and().floatType().and().doubleType().and().stringType().and().bytesType().and().nullType().and().fixed("Fix")
.size(1).and().enumeration("Enu").symbols("Q").and().array().items().intType().and().map().values().longType()
.and().record("Rec").fields().name("one").type("Fix").noDefault().endRecord().endUnion().booleanDefault(false)
.endRecord();
}
@Test
void defaults() throws IOException {
Schema writeSchema = SchemaBuilder.record("r").fields().name("requiredInt").type().intType().noDefault()
.name("optionalInt").type().optional().intType().name("nullableIntWithDefault").type().nullable().intType()
.intDefault(3).endRecord();
GenericData.Record rec1 = new GenericRecordBuilder(writeSchema).set("requiredInt", 1).build();
assertEquals(1, rec1.get("requiredInt"));
assertNull(rec1.get("optionalInt"));
assertEquals(3, rec1.get("nullableIntWithDefault"));
GenericData.Record rec2 = new GenericRecordBuilder(writeSchema).set("requiredInt", 1).set("optionalInt", 2)
.set("nullableIntWithDefault", 13).build();
assertEquals(1, rec2.get("requiredInt"));
assertEquals(2, rec2.get("optionalInt"));
assertEquals(13, rec2.get("nullableIntWithDefault"));
// write to file
File file = new File(DIR.getPath(), "testDefaults.avro");
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>())) {
writer.create(writeSchema, file);
writer.append(rec1);
writer.append(rec2);
}
Schema readSchema = SchemaBuilder.record("r").fields().name("requiredInt").type().intType().noDefault()
.name("optionalInt").type().optional().intType().name("nullableIntWithDefault").type().nullable().intType()
.intDefault(3).name("newOptionalInt").type().optional().intType().name("newNullableIntWithDefault").type()
.nullable().intType().intDefault(5).endRecord();
try (DataFileReader<GenericData.Record> reader = new DataFileReader<>(file,
new GenericDatumReader<>(writeSchema, readSchema))) {
GenericData.Record rec1read = reader.iterator().next();
assertEquals(1, rec1read.get("requiredInt"));
assertNull(rec1read.get("optionalInt"));
assertEquals(3, rec1read.get("nullableIntWithDefault"));
assertNull(rec1read.get("newOptionalInt"));
assertEquals(5, rec1read.get("newNullableIntWithDefault"));
GenericData.Record rec2read = reader.iterator().next();
assertEquals(1, rec2read.get("requiredInt"));
assertEquals(2, rec2read.get("optionalInt"));
assertEquals(13, rec2read.get("nullableIntWithDefault"));
assertNull(rec2read.get("newOptionalInt"));
assertEquals(5, rec2read.get("newNullableIntWithDefault"));
}
}
@Test
void defaultTypes() {
Integer intDef = 1;
Long longDef = 2L;
Float floatDef = 3F;
Double doubleDef = 4D;
Schema schema = SchemaBuilder.record("r").fields().name("int").type().intType().intDefault(intDef).name("long")
.type().longType().longDefault(longDef).name("float").type().floatType().floatDefault(floatDef).name("double")
.type().doubleType().doubleDefault(doubleDef).endRecord();
assertEquals(intDef, schema.getField("int").defaultVal(), "int field default type or value mismatch");
assertEquals(longDef, schema.getField("long").defaultVal(), "long field default type or value mismatch");
assertEquals(floatDef, schema.getField("float").defaultVal(), "float field default type or value mismatch");
assertEquals(doubleDef, schema.getField("double").defaultVal(), "double field default type or value mismatch");
}
@Test
void validateDefaultsEnabled() {
assertThrows(AvroRuntimeException.class, () -> {
try {
SchemaBuilder.record("ValidationRecord").fields().name("IntegerField").type("int").withDefault("Invalid")
.endRecord();
} catch (AvroRuntimeException e) {
assertEquals("Invalid default for field IntegerField: \"Invalid\" not a \"int\"", e.getMessage(),
"Default behavior is to raise an exception due to record having an invalid default");
throw e;
}
});
}
@Test
void validateDefaultsDisabled() {
final String fieldName = "IntegerField";
final String defaultValue = "foo";
Schema schema = SchemaBuilder.record("ValidationRecord").fields().name(fieldName).notValidatingDefaults()
.type("int").withDefault(defaultValue) // Would throw an exception on endRecord() if validations enabled
.endRecord();
assertNull(schema.getField(fieldName).defaultVal(), "Differing types, so this returns null");
assertEquals(defaultValue, schema.getField(fieldName).defaultValue().asText(),
"Schema is able to be successfully created as is without validation");
}
/**
* https://issues.apache.org/jira/browse/AVRO-1965
*/
@Test
void namespaceDefaulting() {
Schema d = SchemaBuilder.builder().intType();
Schema c = SchemaBuilder.record("c").fields().name("d").type(d).noDefault().endRecord();
Schema b = SchemaBuilder.record("b").fields().name("c").type(c).noDefault().endRecord();
Schema a1 = SchemaBuilder.record("default.a").fields().name("b").type(b).noDefault().endRecord();
Schema a2 = new Schema.Parser().parse(a1.toString());
assertEquals(a2, a1);
}
@Test
void namesAcceptAll() throws InterruptedException {
// Ensure that Schema.setNameValidator won't interfere with others unit tests.
Runnable r = () -> {
Schema.setNameValidator(Schema.NameValidator.NO_VALIDATION);
final Schema schema = SchemaBuilder.record("7name").fields().name("123").type(Schema.create(Schema.Type.INT))
.noDefault().endRecord();
Assertions.assertNotNull(schema);
Assertions.assertEquals("7name", schema.getName());
final Schema.Field field = schema.getField("123");
Assertions.assertEquals("123", field.name());
};
final Throwable[] exception = new Throwable[] { null };
Thread t = new Thread(r);
t.setUncaughtExceptionHandler((Thread th, Throwable e) -> exception[0] = e);
t.start();
t.join();
Assertions.assertNull(exception[0], () -> exception[0].getMessage());
}
}
| 7,145 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/FooBarSpecificRecord.java | /*
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.SchemaStore;
import org.apache.avro.specific.SpecificData;
@org.apache.avro.specific.AvroGenerated
public class FooBarSpecificRecord extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 1031933828916876443L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"FooBarSpecificRecord\",\"namespace\":\"org.apache.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"nicknames\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"relatedids\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"typeEnum\",\"type\":[\"null\",{\"type\":\"enum\",\"name\":\"TypeEnum\",\"symbols\":[\"a\",\"b\",\"c\"]}],\"default\":null}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<FooBarSpecificRecord> ENCODER = new BinaryMessageEncoder<>(MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<FooBarSpecificRecord> DECODER = new BinaryMessageDecoder<>(MODEL$, SCHEMA$);
/**
* Return the BinaryMessageDecoder instance used by this class.
*
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<FooBarSpecificRecord> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the
* specified {@link SchemaStore}.
*
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given
* SchemaStore
*/
public static BinaryMessageDecoder<FooBarSpecificRecord> createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this FooBarSpecificRecord to a ByteBuffer.
*
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a FooBarSpecificRecord from a ByteBuffer.
*
* @param b a byte buffer holding serialized data for an instance of this class
* @return a FooBarSpecificRecord instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into
* an instance of this class
*/
public static FooBarSpecificRecord fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
@Deprecated
public int id;
@Deprecated
public java.lang.String name;
@Deprecated
public java.util.List<java.lang.String> nicknames;
@Deprecated
public java.util.List<java.lang.Integer> relatedids;
@Deprecated
public org.apache.avro.TypeEnum typeEnum;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* <code>newBuilder()</code>.
*/
public FooBarSpecificRecord() {
}
/**
* All-args constructor.
*
* @param id The new value for id
* @param name The new value for name
* @param nicknames The new value for nicknames
* @param relatedids The new value for relatedids
* @param typeEnum The new value for typeEnum
*/
public FooBarSpecificRecord(java.lang.Integer id, java.lang.String name, java.util.List<java.lang.String> nicknames,
java.util.List<java.lang.Integer> relatedids, org.apache.avro.TypeEnum typeEnum) {
this.id = id;
this.name = name;
this.nicknames = nicknames;
this.relatedids = relatedids;
this.typeEnum = typeEnum;
}
@Override
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
@Override
public java.lang.Object get(int field$) {
switch (field$) {
case 0:
return id;
case 1:
return name;
case 2:
return nicknames;
case 3:
return relatedids;
case 4:
return typeEnum;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value = "unchecked")
@Override
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0:
id = (java.lang.Integer) value$;
break;
case 1:
name = (java.lang.String) value$;
break;
case 2:
nicknames = (java.util.List<java.lang.String>) value$;
break;
case 3:
relatedids = (java.util.List<java.lang.Integer>) value$;
break;
case 4:
typeEnum = (org.apache.avro.TypeEnum) value$;
break;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'id' field.
*
* @return The value of the 'id' field.
*/
public java.lang.Integer getId() {
return id;
}
/**
* Sets the value of the 'id' field.
*
* @param value the value to set.
*/
public void setId(java.lang.Integer value) {
this.id = value;
}
/**
* Gets the value of the 'name' field.
*
* @return The value of the 'name' field.
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the value of the 'name' field.
*
* @param value the value to set.
*/
public void setName(java.lang.String value) {
this.name = value;
}
/**
* Gets the value of the 'nicknames' field.
*
* @return The value of the 'nicknames' field.
*/
public java.util.List<java.lang.String> getNicknames() {
return nicknames;
}
/**
* Sets the value of the 'nicknames' field.
*
* @param value the value to set.
*/
public void setNicknames(java.util.List<java.lang.String> value) {
this.nicknames = value;
}
/**
* Gets the value of the 'relatedids' field.
*
* @return The value of the 'relatedids' field.
*/
public java.util.List<java.lang.Integer> getRelatedids() {
return relatedids;
}
/**
* Sets the value of the 'relatedids' field.
*
* @param value the value to set.
*/
public void setRelatedids(java.util.List<java.lang.Integer> value) {
this.relatedids = value;
}
/**
* Gets the value of the 'typeEnum' field.
*
* @return The value of the 'typeEnum' field.
*/
public org.apache.avro.TypeEnum getTypeEnum() {
return typeEnum;
}
/**
* Sets the value of the 'typeEnum' field.
*
* @param value the value to set.
*/
public void setTypeEnum(org.apache.avro.TypeEnum value) {
this.typeEnum = value;
}
/**
* Creates a new FooBarSpecificRecord RecordBuilder.
*
* @return A new FooBarSpecificRecord RecordBuilder
*/
public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder() {
return new org.apache.avro.FooBarSpecificRecord.Builder();
}
/**
* Creates a new FooBarSpecificRecord RecordBuilder by copying an existing
* Builder.
*
* @param other The existing builder to copy.
* @return A new FooBarSpecificRecord RecordBuilder
*/
public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(
org.apache.avro.FooBarSpecificRecord.Builder other) {
if (other == null) {
return new org.apache.avro.FooBarSpecificRecord.Builder();
} else {
return new org.apache.avro.FooBarSpecificRecord.Builder(other);
}
}
/**
* Creates a new FooBarSpecificRecord RecordBuilder by copying an existing
* FooBarSpecificRecord instance.
*
* @param other The existing instance to copy.
* @return A new FooBarSpecificRecord RecordBuilder
*/
public static org.apache.avro.FooBarSpecificRecord.Builder newBuilder(org.apache.avro.FooBarSpecificRecord other) {
if (other == null) {
return new org.apache.avro.FooBarSpecificRecord.Builder();
} else {
return new org.apache.avro.FooBarSpecificRecord.Builder(other);
}
}
/**
* RecordBuilder for FooBarSpecificRecord instances.
*/
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<FooBarSpecificRecord>
implements org.apache.avro.data.RecordBuilder<FooBarSpecificRecord> {
private int id;
private java.lang.String name;
private java.util.List<java.lang.String> nicknames;
private java.util.List<java.lang.Integer> relatedids;
private org.apache.avro.TypeEnum typeEnum;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(org.apache.avro.FooBarSpecificRecord.Builder other) {
super(other);
if (isValidValue(fields()[0], other.id)) {
this.id = data().deepCopy(fields()[0].schema(), other.id);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.name)) {
this.name = data().deepCopy(fields()[1].schema(), other.name);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.nicknames)) {
this.nicknames = data().deepCopy(fields()[2].schema(), other.nicknames);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.relatedids)) {
this.relatedids = data().deepCopy(fields()[3].schema(), other.relatedids);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.typeEnum)) {
this.typeEnum = data().deepCopy(fields()[4].schema(), other.typeEnum);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
}
/**
* Creates a Builder by copying an existing FooBarSpecificRecord instance
*
* @param other The existing instance to copy.
*/
private Builder(org.apache.avro.FooBarSpecificRecord other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.id)) {
this.id = data().deepCopy(fields()[0].schema(), other.id);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.name)) {
this.name = data().deepCopy(fields()[1].schema(), other.name);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.nicknames)) {
this.nicknames = data().deepCopy(fields()[2].schema(), other.nicknames);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.relatedids)) {
this.relatedids = data().deepCopy(fields()[3].schema(), other.relatedids);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.typeEnum)) {
this.typeEnum = data().deepCopy(fields()[4].schema(), other.typeEnum);
fieldSetFlags()[4] = true;
}
}
/**
* Gets the value of the 'id' field.
*
* @return The value.
*/
public java.lang.Integer getId() {
return id;
}
/**
* Sets the value of the 'id' field.
*
* @param value The value of 'id'.
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder setId(int value) {
validate(fields()[0], value);
this.id = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'id' field has been set.
*
* @return True if the 'id' field has been set, false otherwise.
*/
public boolean hasId() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'id' field.
*
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder clearId() {
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'name' field.
*
* @return The value.
*/
public java.lang.String getName() {
return name;
}
/**
* Sets the value of the 'name' field.
*
* @param value The value of 'name'.
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder setName(java.lang.String value) {
validate(fields()[1], value);
this.name = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'name' field has been set.
*
* @return True if the 'name' field has been set, false otherwise.
*/
public boolean hasName() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'name' field.
*
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder clearName() {
name = null;
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'nicknames' field.
*
* @return The value.
*/
public java.util.List<java.lang.String> getNicknames() {
return nicknames;
}
/**
* Sets the value of the 'nicknames' field.
*
* @param value The value of 'nicknames'.
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder setNicknames(java.util.List<java.lang.String> value) {
validate(fields()[2], value);
this.nicknames = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'nicknames' field has been set.
*
* @return True if the 'nicknames' field has been set, false otherwise.
*/
public boolean hasNicknames() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'nicknames' field.
*
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder clearNicknames() {
nicknames = null;
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'relatedids' field.
*
* @return The value.
*/
public java.util.List<java.lang.Integer> getRelatedids() {
return relatedids;
}
/**
* Sets the value of the 'relatedids' field.
*
* @param value The value of 'relatedids'.
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder setRelatedids(java.util.List<java.lang.Integer> value) {
validate(fields()[3], value);
this.relatedids = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'relatedids' field has been set.
*
* @return True if the 'relatedids' field has been set, false otherwise.
*/
public boolean hasRelatedids() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'relatedids' field.
*
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder clearRelatedids() {
relatedids = null;
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'typeEnum' field.
*
* @return The value.
*/
public org.apache.avro.TypeEnum getTypeEnum() {
return typeEnum;
}
/**
* Sets the value of the 'typeEnum' field.
*
* @param value The value of 'typeEnum'.
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder setTypeEnum(org.apache.avro.TypeEnum value) {
validate(fields()[4], value);
this.typeEnum = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'typeEnum' field has been set.
*
* @return True if the 'typeEnum' field has been set, false otherwise.
*/
public boolean hasTypeEnum() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'typeEnum' field.
*
* @return This builder.
*/
public org.apache.avro.FooBarSpecificRecord.Builder clearTypeEnum() {
typeEnum = null;
fieldSetFlags()[4] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public FooBarSpecificRecord build() {
try {
FooBarSpecificRecord record = new FooBarSpecificRecord();
record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]);
record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]);
record.nicknames = fieldSetFlags()[2] ? this.nicknames
: (java.util.List<java.lang.String>) defaultValue(fields()[2]);
record.relatedids = fieldSetFlags()[3] ? this.relatedids
: (java.util.List<java.lang.Integer>) defaultValue(fields()[3]);
record.typeEnum = fieldSetFlags()[4] ? this.typeEnum : (org.apache.avro.TypeEnum) defaultValue(fields()[4]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<FooBarSpecificRecord> WRITER$ = (org.apache.avro.io.DatumWriter<FooBarSpecificRecord>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<FooBarSpecificRecord> READER$ = (org.apache.avro.io.DatumReader<FooBarSpecificRecord>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
}
| 7,146 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/BarRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
public class BarRecord {
private String beerMsg;
public BarRecord() {
}
public BarRecord(String beerMsg) {
this.beerMsg = beerMsg;
}
@Override
public boolean equals(Object that) {
if (that instanceof BarRecord) {
if (this.beerMsg == null) {
return ((BarRecord) that).beerMsg == null;
} else {
return this.beerMsg.equals(((BarRecord) that).beerMsg);
}
}
return false;
}
@Override
public int hashCode() {
return beerMsg.hashCode();
}
@Override
public String toString() {
return BarRecord.class.getSimpleName() + "{msg=" + beerMsg + "}";
}
}
| 7,147 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaWarnings.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import static org.apache.avro.LogicalType.LOGICAL_TYPE_PROP;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
public class TestSchemaWarnings {
private final static PrintStream originalErr = System.err;
/**
* The capturable replacement for the system err stream.
*/
private final ByteArrayOutputStream capturedErr = new ByteArrayOutputStream();
@BeforeEach
public void setupStdErr() {
capturedErr.reset();
System.setErr(new PrintStream(capturedErr));
}
@AfterAll
public static void restoreStdErr() {
System.setErr(originalErr);
}
public String getCapturedStdErr() {
System.out.flush();
String stderr = new String(capturedErr.toByteArray(), StandardCharsets.UTF_8);
capturedErr.reset();
return stderr;
}
@Test
void warnWhenTheLogicalTypeIsOnTheField() {
// A record with a single int field.
Schema s = SchemaBuilder.record("A").fields().requiredInt("a1").endRecord();
// Force reparsing the schema, and no warning should be logged.
s = new Schema.Parser().parse(s.toString());
assertThat(s.getField("a1").schema().getLogicalType(), nullValue());
assertThat(getCapturedStdErr(), is(""));
// Add the logical type annotation to the field (as opposed to the field schema)
// and parse it again. This is a common error, see AVRO-3014, AVRO-2015.
s.getField("a1").addProp(LOGICAL_TYPE_PROP, LogicalTypes.date().getName());
assertThat(s.getField("a1").schema().getLogicalType(), nullValue());
// Force reparsing the schema, and a warning should be logged.
s = new Schema.Parser().parse(s.toString());
assertThat(getCapturedStdErr(), containsString("Ignored the A.a1.logicalType property (\"date\"). It should"
+ " probably be nested inside the \"type\" for the field."));
assertThat(s.getField("a1").schema().getLogicalType(), nullValue());
// Add the logical type annotation to the field schema. This doesn't change the
// logical type of an already parsed schema.
s.getField("a1").schema().addProp(LOGICAL_TYPE_PROP, LogicalTypes.date().getName());
assertThat(s.getField("a1").schema().getLogicalType(), nullValue());
// Force reparsing the schema. No warning should be logged, and the logical type
// should be applied.
s = new Schema.Parser().parse(s.toString());
assertThat(getCapturedStdErr(), is(""));
assertThat(s.getField("a1").schema().getLogicalType(), is(LogicalTypes.date()));
}
@Test
void warnWhenTheLogicalTypeIsIgnored() {
// A record with a single int field.
Schema s = SchemaBuilder.record("A").fields().requiredLong("a1").endRecord();
// Add the logical type annotation to the field (as opposed to the field schema)
// and parse it again.
s.getField("a1").schema().addProp(LOGICAL_TYPE_PROP, LogicalTypes.date().getName());
// Force reparsing the schema. No warning should be logged, and the logical type
// should be applied.
s = new Schema.Parser().parse(s.toString());
assertThat(s.getField("a1").schema().getLogicalType(), nullValue());
assertThat(getCapturedStdErr(), containsString("Ignoring invalid logical type for name: date"));
}
}
| 7,148 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestFixed.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.apache.avro;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class TestFixed {
@Test
void fixedDefaultValueDrop() {
Schema md5 = SchemaBuilder.builder().fixed("MD5").size(16);
Schema frec = SchemaBuilder.builder().record("test").fields().name("hash").type(md5).withDefault(new byte[16])
.endRecord();
Schema.Field field = frec.getField("hash");
assertNotNull(field.defaultVal());
assertArrayEquals(new byte[16], (byte[]) field.defaultVal());
}
@Test
void fixedLengthOutOfLimit() {
Exception ex = assertThrows(UnsupportedOperationException.class,
() -> Schema.createFixed("oversize", "doc", "space", Integer.MAX_VALUE));
assertEquals(TestSystemLimitException.ERROR_VM_LIMIT_BYTES, ex.getMessage());
}
@Test
void fixedNegativeLength() {
Exception ex = assertThrows(AvroRuntimeException.class, () -> Schema.createFixed("negative", "doc", "space", -1));
assertEquals(TestSystemLimitException.ERROR_NEGATIVE, ex.getMessage());
}
}
| 7,149 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.io.IOException;
import java.util.Arrays;
import org.apache.avro.data.TimeConversions;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.FastReaderBuilder;
import org.apache.avro.io.JsonDecoder;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class TestResolver {
/**
* Test promote action INT -> LONG, with logical type for LONG.
*/
@Test
void resolveTime() {
final Schema writeSchema = Schema.create(Schema.Type.INT);
final Schema readSchema = new TimeConversions.TimeMicrosConversion().getRecommendedSchema(); // LONG
Resolver.Action action = Resolver.resolve(writeSchema, readSchema);
Assertions.assertNotNull(action);
MatcherAssert.assertThat("Wrong class for action", action, Matchers.instanceOf(Resolver.Promote.class));
Assertions.assertEquals(action.type, Resolver.Action.Type.PROMOTE);
Assertions.assertNotNull(action.logicalType);
}
/**
* Test union type with promote action INT -> LONG, with logical type for LONG.
*/
@Test
void resolveUnion() {
final Schema schema = new TimeConversions.TimeMicrosConversion().getRecommendedSchema();
final Schema writeSchema = Schema.createUnion(Schema.create(Schema.Type.INT));
final Schema readSchema = Schema.createUnion(schema);
Resolver.Action action = Resolver.resolve(writeSchema, readSchema);
Assertions.assertNotNull(action);
Assertions.assertEquals(action.type, Resolver.Action.Type.WRITER_UNION);
MatcherAssert.assertThat("Wrong class for action", action, Matchers.instanceOf(Resolver.WriterUnion.class));
Assertions.assertEquals(1, ((Resolver.WriterUnion) action).actions.length);
Resolver.Action innerAction = ((Resolver.WriterUnion) action).actions[0];
MatcherAssert.assertThat("Wrong class for action", innerAction, Matchers.instanceOf(Resolver.ReaderUnion.class));
Resolver.ReaderUnion innerUnionAction = (Resolver.ReaderUnion) innerAction;
Resolver.Action promoteAction = innerUnionAction.actualAction;
Assertions.assertEquals(promoteAction.type, Resolver.Action.Type.PROMOTE);
Assertions.assertNotNull(promoteAction.logicalType);
}
@Test
void resolveEnum() throws IOException {
final Schema writeSchema = Schema.createEnum("myEnum", "", "n1", Arrays.asList("e1", "e3", "e4"));
final Schema readSchema = Schema.createEnum("myEnum", "", "n1", Arrays.asList("e1", "e2", "e3"), "e2");
Resolver.Action action = Resolver.resolve(writeSchema, readSchema);
Assertions.assertNotNull(action);
Assertions.assertEquals(action.type, Resolver.Action.Type.ENUM);
MatcherAssert.assertThat("Wrong class for action", action, Matchers.instanceOf(Resolver.EnumAdjust.class));
Resolver.EnumAdjust adjust = (Resolver.EnumAdjust) action;
Assertions.assertArrayEquals(new int[] { 0, 2, 1 }, adjust.adjustments);
Assertions.assertEquals("e1", adjust.values[0].toString());
Assertions.assertEquals("e3", adjust.values[1].toString());
Assertions.assertEquals("e2", adjust.values[2].toString());
FastReaderBuilder reader = FastReaderBuilder.get();
Schema writeRecord = Schema.createRecord("rec1", "", "", false,
Arrays.asList(new Schema.Field("f1", writeSchema, "")));
Schema readRecord = Schema.createRecord("rec1", "", "", false,
Arrays.asList(new Schema.Field("f1", readSchema, "")));
DatumReader<Object> datumReader = reader.createDatumReader(writeRecord, readRecord);
JsonDecoder e2 = DecoderFactory.get().jsonDecoder(readRecord, "{ \"f1\" : \"e2\" }");
Object read = datumReader.read(null, e2);
Assertions.assertNotNull(read);
MatcherAssert.assertThat("", read, Matchers.instanceOf(IndexedRecord.class));
IndexedRecord result = (IndexedRecord) read;
Assertions.assertEquals("e3", result.get(0).toString());
}
@Test
void promoteIsValid() {
Assertions.assertThrows(IllegalArgumentException.class,
() -> Resolver.Promote.isValid(Schema.create(Schema.Type.INT), Schema.create(Schema.Type.INT)));
Assertions.assertTrue(Resolver.Promote.isValid(Schema.create(Schema.Type.INT), Schema.create(Schema.Type.LONG)));
Assertions.assertFalse(Resolver.Promote.isValid(Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.INT)));
Assertions.assertTrue(Resolver.Promote.isValid(Schema.create(Schema.Type.INT), Schema.create(Schema.Type.FLOAT)));
Assertions.assertFalse(Resolver.Promote.isValid(Schema.create(Schema.Type.FLOAT), Schema.create(Schema.Type.INT)));
Assertions
.assertTrue(Resolver.Promote.isValid(Schema.create(Schema.Type.FLOAT), Schema.create(Schema.Type.DOUBLE)));
Assertions
.assertFalse(Resolver.Promote.isValid(Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.FLOAT)));
}
}
| 7,150 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/CustomTypeLogicalTypeFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
public class CustomTypeLogicalTypeFactory implements LogicalTypes.LogicalTypeFactory {
@Override
public LogicalType fromSchema(Schema schema) {
return new LogicalType(getTypeName());
}
@Override
public String getTypeName() {
return "custom";
}
}
| 7,151 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaCompatibility.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static java.util.Arrays.asList;
import static org.apache.avro.SchemaCompatibility.Incompatibility;
import static org.apache.avro.SchemaCompatibility.SchemaCompatibilityResult;
import static org.apache.avro.SchemaCompatibility.SchemaCompatibilityType;
import static org.apache.avro.SchemaCompatibility.SchemaIncompatibilityType;
import static org.apache.avro.SchemaCompatibility.SchemaPairCompatibility;
import static org.apache.avro.SchemaCompatibility.checkReaderWriterCompatibility;
import static org.apache.avro.TestSchemas.A_DINT_B_DINT_RECORD1;
import static org.apache.avro.TestSchemas.A_DINT_RECORD1;
import static org.apache.avro.TestSchemas.A_INT_B_DINT_RECORD1;
import static org.apache.avro.TestSchemas.A_INT_B_INT_RECORD1;
import static org.apache.avro.TestSchemas.A_INT_RECORD1;
import static org.apache.avro.TestSchemas.A_LONG_RECORD1;
import static org.apache.avro.TestSchemas.BOOLEAN_SCHEMA;
import static org.apache.avro.TestSchemas.BYTES_SCHEMA;
import static org.apache.avro.TestSchemas.BYTES_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.DOUBLE_SCHEMA;
import static org.apache.avro.TestSchemas.DOUBLE_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.EMPTY_RECORD1;
import static org.apache.avro.TestSchemas.EMPTY_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_ABC_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA_DEFAULT;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA_NAMESPACE_1;
import static org.apache.avro.TestSchemas.ENUM1_AB_SCHEMA_NAMESPACE_2;
import static org.apache.avro.TestSchemas.ENUM1_BC_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM_ABC_ENUM_DEFAULT_A_RECORD;
import static org.apache.avro.TestSchemas.ENUM_ABC_ENUM_DEFAULT_A_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM_ABC_FIELD_DEFAULT_B_ENUM_DEFAULT_A_RECORD;
import static org.apache.avro.TestSchemas.ENUM_AB_ENUM_DEFAULT_A_RECORD;
import static org.apache.avro.TestSchemas.ENUM_AB_ENUM_DEFAULT_A_SCHEMA;
import static org.apache.avro.TestSchemas.ENUM_AB_FIELD_DEFAULT_A_ENUM_DEFAULT_B_RECORD;
import static org.apache.avro.TestSchemas.FIXED_4_BYTES;
import static org.apache.avro.TestSchemas.FLOAT_SCHEMA;
import static org.apache.avro.TestSchemas.FLOAT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.INT_FLOAT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_LIST_RECORD;
import static org.apache.avro.TestSchemas.INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_LONG_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_MAP_SCHEMA;
import static org.apache.avro.TestSchemas.INT_SCHEMA;
import static org.apache.avro.TestSchemas.INT_STRING_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.INT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_LIST_RECORD;
import static org.apache.avro.TestSchemas.LONG_MAP_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_SCHEMA;
import static org.apache.avro.TestSchemas.LONG_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.NS_RECORD1;
import static org.apache.avro.TestSchemas.NS_RECORD2;
import static org.apache.avro.TestSchemas.WITH_NS;
import static org.apache.avro.TestSchemas.WITHOUT_NS;
import static org.apache.avro.TestSchemas.NULL_SCHEMA;
import static org.apache.avro.TestSchemas.ReaderWriter;
import static org.apache.avro.TestSchemas.STRING_ARRAY_SCHEMA;
import static org.apache.avro.TestSchemas.STRING_INT_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.STRING_SCHEMA;
import static org.apache.avro.TestSchemas.STRING_UNION_SCHEMA;
import static org.apache.avro.TestSchemas.assertSchemaContains;
import static org.apache.avro.TestSchemas.list;
import static org.junit.jupiter.api.Assertions.*;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.EnumSymbol;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Unit-tests for SchemaCompatibility.
*/
public class TestSchemaCompatibility {
private static final Logger LOG = LoggerFactory.getLogger(TestSchemaCompatibility.class);
// -----------------------------------------------------------------------------------------------
private static final Schema WRITER_SCHEMA = Schema.createRecord(list(
new Schema.Field("oldfield1", INT_SCHEMA, null, null), new Schema.Field("oldfield2", STRING_SCHEMA, null, null)));
@Test
void validateSchemaPairMissingField() {
final List<Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null));
final Schema reader = Schema.createRecord(null, null, null, false, readerFields);
final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
// Test omitting a field.
assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
}
@Test
void validateSchemaPairMissingSecondField() {
final List<Schema.Field> readerFields = list(new Schema.Field("oldfield2", STRING_SCHEMA, null, null));
final Schema reader = Schema.createRecord(null, null, null, false, readerFields);
final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
// Test omitting other field.
assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
}
@Test
void validateSchemaPairAllFields() {
final List<Schema.Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null),
new Schema.Field("oldfield2", STRING_SCHEMA, null, null));
final Schema reader = Schema.createRecord(null, null, null, false, readerFields);
final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
// Test with all fields.
assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
}
@Test
void validateSchemaNewFieldWithDefault() {
final List<Schema.Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null),
new Schema.Field("newfield1", INT_SCHEMA, null, 42));
final Schema reader = Schema.createRecord(null, null, null, false, readerFields);
final SchemaCompatibility.SchemaPairCompatibility expectedResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), reader, WRITER_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
// Test new field with default value.
assertEquals(expectedResult, checkReaderWriterCompatibility(reader, WRITER_SCHEMA));
}
@Test
void validateSchemaNewField() {
final List<Schema.Field> readerFields = list(new Schema.Field("oldfield1", INT_SCHEMA, null, null),
new Schema.Field("newfield1", INT_SCHEMA, null, null));
final Schema reader = Schema.createRecord(null, null, null, false, readerFields);
SchemaPairCompatibility compatibility = checkReaderWriterCompatibility(reader, WRITER_SCHEMA);
// Test new field without default value.
assertEquals(SchemaCompatibility.SchemaCompatibilityType.INCOMPATIBLE, compatibility.getType());
assertEquals(SchemaCompatibility.SchemaCompatibilityResult.incompatible(
SchemaIncompatibilityType.READER_FIELD_MISSING_DEFAULT_VALUE, reader, WRITER_SCHEMA, "newfield1",
asList("", "fields", "1")), compatibility.getResult());
assertEquals(String.format(
"Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n",
WRITER_SCHEMA.toString(true), reader.toString(true)), compatibility.getDescription());
assertEquals(reader, compatibility.getReader());
assertEquals(WRITER_SCHEMA, compatibility.getWriter());
}
@Test
void validateArrayWriterSchema() {
final Schema validReader = Schema.createArray(STRING_SCHEMA);
final Schema invalidReader = Schema.createMap(STRING_SCHEMA);
final SchemaCompatibility.SchemaPairCompatibility validResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), validReader, STRING_ARRAY_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
final SchemaCompatibility.SchemaPairCompatibility invalidResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.incompatible(
SchemaIncompatibilityType.TYPE_MISMATCH, invalidReader, STRING_ARRAY_SCHEMA,
"reader type: MAP not compatible with writer type: ARRAY", Collections.singletonList("")),
invalidReader, STRING_ARRAY_SCHEMA,
String.format(
"Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n",
STRING_ARRAY_SCHEMA.toString(true), invalidReader.toString(true)));
assertEquals(validResult, checkReaderWriterCompatibility(validReader, STRING_ARRAY_SCHEMA));
assertEquals(invalidResult, checkReaderWriterCompatibility(invalidReader, STRING_ARRAY_SCHEMA));
}
@Test
void validatePrimitiveWriterSchema() {
final Schema validReader = Schema.create(Schema.Type.STRING);
final SchemaCompatibility.SchemaPairCompatibility validResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.compatible(), validReader, STRING_SCHEMA,
SchemaCompatibility.READER_WRITER_COMPATIBLE_MESSAGE);
final SchemaCompatibility.SchemaPairCompatibility invalidResult = new SchemaCompatibility.SchemaPairCompatibility(
SchemaCompatibility.SchemaCompatibilityResult.incompatible(SchemaIncompatibilityType.TYPE_MISMATCH, INT_SCHEMA,
STRING_SCHEMA, "reader type: INT not compatible with writer type: STRING", Collections.singletonList("")),
INT_SCHEMA, STRING_SCHEMA,
String.format(
"Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n",
STRING_SCHEMA.toString(true), INT_SCHEMA.toString(true)));
assertEquals(validResult, checkReaderWriterCompatibility(validReader, STRING_SCHEMA));
assertEquals(invalidResult, checkReaderWriterCompatibility(INT_SCHEMA, STRING_SCHEMA));
}
/**
* Reader union schema must contain all writer union branches.
*/
@Test
void unionReaderWriterSubsetIncompatibility() {
final Schema unionWriter = Schema.createUnion(list(INT_SCHEMA, STRING_SCHEMA, LONG_SCHEMA));
final Schema unionReader = Schema.createUnion(list(INT_SCHEMA, STRING_SCHEMA));
final SchemaPairCompatibility result = checkReaderWriterCompatibility(unionReader, unionWriter);
assertEquals(SchemaCompatibilityType.INCOMPATIBLE, result.getType());
assertEquals("/2", result.getResult().getIncompatibilities().get(0).getLocation());
}
@Test
void unionWriterSimpleReaderIncompatibility() {
Schema mandatorySchema = SchemaBuilder.record("Account").fields().name("age").type().intType().noDefault()
.endRecord();
Schema optionalSchema = SchemaBuilder.record("Account").fields().optionalInt("age").endRecord();
SchemaPairCompatibility compatibility = checkReaderWriterCompatibility(mandatorySchema, optionalSchema);
assertEquals(SchemaCompatibilityType.INCOMPATIBLE, compatibility.getType());
Incompatibility incompatibility = compatibility.getResult().getIncompatibilities().get(0);
assertEquals("reader type: INT not compatible with writer type: NULL", incompatibility.getMessage());
assertEquals("/fields/0/type/0", incompatibility.getLocation());
}
// -----------------------------------------------------------------------------------------------
/**
* Collection of reader/writer schema pair that are compatible.
*/
public static final List<ReaderWriter> COMPATIBLE_READER_WRITER_TEST_CASES = list(
new ReaderWriter(BOOLEAN_SCHEMA, BOOLEAN_SCHEMA),
new ReaderWriter(INT_SCHEMA, INT_SCHEMA),
new ReaderWriter(LONG_SCHEMA, INT_SCHEMA), new ReaderWriter(LONG_SCHEMA, LONG_SCHEMA),
// Avro spec says INT/LONG can be promoted to FLOAT/DOUBLE.
// This is arguable as this causes a loss of precision.
new ReaderWriter(FLOAT_SCHEMA, INT_SCHEMA), new ReaderWriter(FLOAT_SCHEMA, LONG_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, LONG_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_SCHEMA), new ReaderWriter(DOUBLE_SCHEMA, FLOAT_SCHEMA),
new ReaderWriter(STRING_SCHEMA, STRING_SCHEMA),
new ReaderWriter(BYTES_SCHEMA, BYTES_SCHEMA),
new ReaderWriter(INT_ARRAY_SCHEMA, INT_ARRAY_SCHEMA), new ReaderWriter(LONG_ARRAY_SCHEMA, INT_ARRAY_SCHEMA),
new ReaderWriter(INT_MAP_SCHEMA, INT_MAP_SCHEMA), new ReaderWriter(LONG_MAP_SCHEMA, INT_MAP_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_AB_SCHEMA), new ReaderWriter(ENUM1_ABC_SCHEMA, ENUM1_AB_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA_DEFAULT, ENUM1_ABC_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_AB_SCHEMA_NAMESPACE_1),
new ReaderWriter(ENUM1_AB_SCHEMA_NAMESPACE_1, ENUM1_AB_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA_NAMESPACE_1, ENUM1_AB_SCHEMA_NAMESPACE_2),
// String-to/from-bytes, introduced in Avro 1.7.7
new ReaderWriter(STRING_SCHEMA, BYTES_SCHEMA), new ReaderWriter(BYTES_SCHEMA, STRING_SCHEMA),
// Tests involving unions:
new ReaderWriter(EMPTY_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, EMPTY_UNION_SCHEMA), new ReaderWriter(FLOAT_UNION_SCHEMA, INT_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, LONG_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, INT_LONG_UNION_SCHEMA), new ReaderWriter(INT_UNION_SCHEMA, INT_UNION_SCHEMA),
new ReaderWriter(INT_STRING_UNION_SCHEMA, STRING_INT_UNION_SCHEMA),
new ReaderWriter(INT_UNION_SCHEMA, EMPTY_UNION_SCHEMA), new ReaderWriter(LONG_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(LONG_UNION_SCHEMA, INT_UNION_SCHEMA), new ReaderWriter(FLOAT_UNION_SCHEMA, INT_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, INT_UNION_SCHEMA), new ReaderWriter(FLOAT_UNION_SCHEMA, LONG_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, LONG_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, FLOAT_UNION_SCHEMA),
new ReaderWriter(STRING_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(STRING_UNION_SCHEMA, BYTES_UNION_SCHEMA),
new ReaderWriter(BYTES_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(BYTES_UNION_SCHEMA, STRING_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, INT_FLOAT_UNION_SCHEMA),
// Readers capable of reading all branches of a union are compatible
new ReaderWriter(FLOAT_SCHEMA, INT_FLOAT_UNION_SCHEMA), new ReaderWriter(LONG_SCHEMA, INT_LONG_UNION_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_FLOAT_UNION_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA),
// Special case of singleton unions:
new ReaderWriter(FLOAT_SCHEMA, FLOAT_UNION_SCHEMA), new ReaderWriter(INT_UNION_SCHEMA, INT_SCHEMA),
new ReaderWriter(INT_SCHEMA, INT_UNION_SCHEMA),
// Fixed types
new ReaderWriter(FIXED_4_BYTES, FIXED_4_BYTES),
// Tests involving records:
new ReaderWriter(EMPTY_RECORD1, EMPTY_RECORD1), new ReaderWriter(EMPTY_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_RECORD1, A_INT_RECORD1), new ReaderWriter(A_DINT_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_DINT_RECORD1, A_DINT_RECORD1), new ReaderWriter(A_INT_RECORD1, A_DINT_RECORD1),
new ReaderWriter(A_LONG_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_RECORD1, A_INT_B_INT_RECORD1), new ReaderWriter(A_DINT_RECORD1, A_INT_B_INT_RECORD1),
new ReaderWriter(A_INT_B_DINT_RECORD1, A_INT_RECORD1), new ReaderWriter(A_DINT_B_DINT_RECORD1, EMPTY_RECORD1),
new ReaderWriter(A_DINT_B_DINT_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_B_INT_RECORD1, A_DINT_B_DINT_RECORD1),
new ReaderWriter(INT_LIST_RECORD, INT_LIST_RECORD), new ReaderWriter(LONG_LIST_RECORD, LONG_LIST_RECORD),
new ReaderWriter(LONG_LIST_RECORD, INT_LIST_RECORD),
new ReaderWriter(NULL_SCHEMA, NULL_SCHEMA),
new ReaderWriter(ENUM_AB_ENUM_DEFAULT_A_RECORD, ENUM_ABC_ENUM_DEFAULT_A_RECORD),
new ReaderWriter(ENUM_AB_FIELD_DEFAULT_A_ENUM_DEFAULT_B_RECORD, ENUM_ABC_FIELD_DEFAULT_B_ENUM_DEFAULT_A_RECORD),
// This is comparing two records that have an inner array of records with
// different namespaces.
new ReaderWriter(NS_RECORD1, NS_RECORD2), new ReaderWriter(WITHOUT_NS, WITH_NS));
// -----------------------------------------------------------------------------------------------
/**
* The reader/writer pairs that are incompatible are now moved to specific test
* classes, one class per error case (for easier pinpointing of errors). The
* method to validate incompatibility is still here.
*/
public static void validateIncompatibleSchemas(Schema reader, Schema writer,
SchemaIncompatibilityType incompatibility, String message, String location) {
validateIncompatibleSchemas(reader, writer, Collections.singletonList(incompatibility),
Collections.singletonList(message), Collections.singletonList(location));
}
// -----------------------------------------------------------------------------------------------
public static void validateIncompatibleSchemas(Schema reader, Schema writer,
List<SchemaIncompatibilityType> incompatibilityTypes, List<String> messages, List<String> locations) {
SchemaPairCompatibility compatibility = checkReaderWriterCompatibility(reader, writer);
SchemaCompatibilityResult compatibilityResult = compatibility.getResult();
assertEquals(reader, compatibility.getReader());
assertEquals(writer, compatibility.getWriter());
assertEquals(SchemaCompatibilityType.INCOMPATIBLE, compatibilityResult.getCompatibility());
assertEquals(incompatibilityTypes.size(), compatibilityResult.getIncompatibilities().size());
for (int i = 0; i < incompatibilityTypes.size(); i++) {
Incompatibility incompatibility = compatibilityResult.getIncompatibilities().get(i);
assertSchemaContains(incompatibility.getReaderFragment(), reader);
assertSchemaContains(incompatibility.getWriterFragment(), writer);
assertEquals(incompatibilityTypes.get(i), incompatibility.getType());
assertEquals(messages.get(i), incompatibility.getMessage());
assertEquals(locations.get(i), incompatibility.getLocation());
}
String description = String.format(
"Data encoded using writer schema:%n%s%n" + "will or may fail to decode using reader schema:%n%s%n",
writer.toString(true), reader.toString(true));
assertEquals(description, compatibility.getDescription());
}
// -----------------------------------------------------------------------------------------------
/**
* Tests reader/writer compatibility validation.
*/
@Test
void readerWriterCompatibility() {
for (ReaderWriter readerWriter : COMPATIBLE_READER_WRITER_TEST_CASES) {
final Schema reader = readerWriter.getReader();
final Schema writer = readerWriter.getWriter();
LOG.debug("Testing compatibility of reader {} with writer {}.", reader, writer);
final SchemaPairCompatibility result = checkReaderWriterCompatibility(reader, writer);
assertEquals(SchemaCompatibilityType.COMPATIBLE, result.getType(), String
.format("Expecting reader %s to be compatible with writer %s, but tested incompatible.", reader, writer));
}
}
// -----------------------------------------------------------------------------------------------
/**
* Descriptor for a test case that encodes a datum according to a given writer
* schema, then decodes it according to reader schema and validates the decoded
* value.
*/
private static final class DecodingTestCase {
/**
* Writer schema used to encode the datum.
*/
private final Schema mWriterSchema;
/**
* Datum to encode according to the specified writer schema.
*/
private final Object mDatum;
/**
* Reader schema used to decode the datum encoded using the writer schema.
*/
private final Schema mReaderSchema;
/**
* Expected datum value when using the reader schema to decode from the writer
* schema.
*/
private final Object mDecodedDatum;
public DecodingTestCase(final Schema writerSchema, final Object datum, final Schema readerSchema,
final Object decoded) {
mWriterSchema = writerSchema;
mDatum = datum;
mReaderSchema = readerSchema;
mDecodedDatum = decoded;
}
public Schema getReaderSchema() {
return mReaderSchema;
}
public Schema getWriterSchema() {
return mWriterSchema;
}
public Object getDatum() {
return mDatum;
}
public Object getDecodedDatum() {
return mDecodedDatum;
}
}
// -----------------------------------------------------------------------------------------------
public static final List<DecodingTestCase> DECODING_COMPATIBILITY_TEST_CASES = list(
new DecodingTestCase(INT_SCHEMA, 1, INT_SCHEMA, 1), new DecodingTestCase(INT_SCHEMA, 1, LONG_SCHEMA, 1L),
new DecodingTestCase(INT_SCHEMA, 1, FLOAT_SCHEMA, 1.0f), new DecodingTestCase(INT_SCHEMA, 1, DOUBLE_SCHEMA, 1.0d),
// This is currently accepted but causes a precision loss:
// IEEE 754 floats have 24 bits signed mantissa
new DecodingTestCase(INT_SCHEMA, (1 << 24) + 1, FLOAT_SCHEMA, (float) ((1 << 24) + 1)),
// new DecodingTestCase(LONG_SCHEMA, 1L, INT_SCHEMA, 1), // should work in
// best-effort!
new DecodingTestCase(ENUM1_AB_SCHEMA, new EnumSymbol(ENUM1_AB_SCHEMA, "A"), ENUM1_ABC_SCHEMA,
new EnumSymbol(ENUM1_ABC_SCHEMA, "A")),
new DecodingTestCase(ENUM1_ABC_SCHEMA, new EnumSymbol(ENUM1_ABC_SCHEMA, "A"), ENUM1_AB_SCHEMA,
new EnumSymbol(ENUM1_AB_SCHEMA, "A")),
new DecodingTestCase(ENUM1_ABC_SCHEMA, new EnumSymbol(ENUM1_ABC_SCHEMA, "B"), ENUM1_BC_SCHEMA,
new EnumSymbol(ENUM1_BC_SCHEMA, "B")),
new DecodingTestCase(ENUM_ABC_ENUM_DEFAULT_A_SCHEMA, new EnumSymbol(ENUM_ABC_ENUM_DEFAULT_A_SCHEMA, "C"),
ENUM_AB_ENUM_DEFAULT_A_SCHEMA, new EnumSymbol(ENUM_AB_ENUM_DEFAULT_A_SCHEMA, "A")),
new DecodingTestCase(INT_STRING_UNION_SCHEMA, "the string", STRING_SCHEMA, new Utf8("the string")),
new DecodingTestCase(INT_STRING_UNION_SCHEMA, "the string", STRING_UNION_SCHEMA, new Utf8("the string")));
/**
* Tests the reader/writer compatibility at decoding time.
*/
@Test
void readerWriterDecodingCompatibility() throws Exception {
for (DecodingTestCase testCase : DECODING_COMPATIBILITY_TEST_CASES) {
final Schema readerSchema = testCase.getReaderSchema();
final Schema writerSchema = testCase.getWriterSchema();
final Object datum = testCase.getDatum();
final Object expectedDecodedDatum = testCase.getDecodedDatum();
LOG.debug("Testing incompatibility of reader {} with writer {}.", readerSchema, writerSchema);
LOG.debug("Encode datum {} with writer {}.", datum, writerSchema);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final Encoder encoder = EncoderFactory.get().binaryEncoder(baos, null);
final DatumWriter<Object> datumWriter = new GenericDatumWriter<>(writerSchema);
datumWriter.write(datum, encoder);
encoder.flush();
LOG.debug("Decode datum {} whose writer is {} with reader {}.", datum, writerSchema, readerSchema);
final byte[] bytes = baos.toByteArray();
final Decoder decoder = DecoderFactory.get().resolvingDecoder(writerSchema, readerSchema,
DecoderFactory.get().binaryDecoder(bytes, null));
final DatumReader<Object> datumReader = new GenericDatumReader<>(readerSchema);
final Object decodedDatum = datumReader.read(null, decoder);
assertEquals(expectedDecodedDatum, decodedDatum,
String.format(
"Expecting decoded value %s when decoding value %s whose writer schema is %s "
+ "using reader schema %s, but value was %s.",
expectedDecodedDatum, datum, writerSchema, readerSchema, decodedDatum));
}
}
private Schema readSchemaFromResources(String name) throws IOException {
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(name)) {
final String result = new BufferedReader(new InputStreamReader(inputStream)).lines()
.collect(Collectors.joining("\n"));
return new Schema.Parser().parse(result);
}
}
@Test
void checkResolvingDecoder() throws IOException {
final Schema locationSchema = readSchemaFromResources("schema-location.json");
final Schema writeSchema = readSchemaFromResources("schema-location-write.json");
// For the read schema the long field has been removed
// And a new field has been added, called long_r2
// This one should be null.
final Schema readSchema = readSchemaFromResources("schema-location-read.json");
// Create some testdata
GenericData.Record record = new GenericData.Record(writeSchema);
GenericData.Record location = new GenericData.Record(locationSchema);
location.put("lat", 52.995143f);
location.put("long", -1.539054f);
HashMap<String, Record> locations = new HashMap<>();
locations.put("l1", location);
record.put("location", locations);
// Write the record to bytes
byte[] payload;
try (ByteArrayOutputStream bbos = new ByteArrayOutputStream()) {
DatumWriter<GenericData.Record> datumWriter = new GenericDatumWriter<>(writeSchema);
Encoder enc = EncoderFactory.get().binaryEncoder(bbos, null);
datumWriter.write(record, enc);
enc.flush();
payload = bbos.toByteArray();
}
// Read the record, and decode it using the read with the long
// And project it using the other schema with the long_r2
BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(payload, null);
GenericDatumReader<GenericData.Record> reader = new GenericDatumReader<>();
reader.setSchema(writeSchema);
reader.setExpected(readSchema);
// Get the object we're looking for
GenericData.Record r = reader.read(null, decoder);
HashMap<Utf8, GenericData.Record> locs = (HashMap<Utf8, GenericData.Record>) r.get("location");
GenericData.Record loc = locs.get(new Utf8("l1"));
assertNotNull(loc.get("lat"));
// This is a new field, and should be null
assertNull(loc.get("long_r2"));
}
}
| 7,152 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/CustomTypeConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
public class CustomTypeConverter extends Conversion<CustomType> {
private static final CustomTypeLogicalTypeFactory logicalTypeFactory = new CustomTypeLogicalTypeFactory();
@Override
public Class<CustomType> getConvertedType() {
return CustomType.class;
}
@Override
public String getLogicalTypeName() {
return logicalTypeFactory.getTypeName();
}
@Override
public Schema getRecommendedSchema() {
return Schema.create(Schema.Type.STRING);
}
@Override
public CustomType fromCharSequence(CharSequence value, Schema schema, LogicalType type) {
return new CustomType(value);
}
@Override
public CharSequence toCharSequence(CustomType value, Schema schema, LogicalType type) {
return value.getName();
}
}
| 7,153 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestDataFileReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.nio.file.Files;
import java.nio.file.Path;
import com.sun.management.UnixOperatingSystemMXBean;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableFileInput;
import org.apache.avro.file.SeekableInput;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@SuppressWarnings("restriction")
public class TestDataFileReader {
@TempDir
public Path dataDir;
// regression test for bug AVRO-2286
@Test
void forLeakingFileDescriptors() throws IOException {
StringBuilder sb = new StringBuilder();
int maxTries = 3;
for (int tries = 0; tries < maxTries; tries++) {
Path emptyFile = Files.createTempFile("empty", ".avro");
Files.deleteIfExists(emptyFile);
Files.createFile(emptyFile);
long openFilesBeforeOperation = getNumberOfOpenFileDescriptors();
try (DataFileReader<Object> reader = new DataFileReader<>(emptyFile.toFile(), new GenericDatumReader<>())) {
fail("Reading on empty file is supposed to fail.");
} catch (IOException e) {
// everything going as supposed to
}
Files.delete(emptyFile);
long openFilesAfterOperation = getNumberOfOpenFileDescriptors();
if (openFilesBeforeOperation == openFilesAfterOperation)
return;
// Sometimes the number of file descriptors is off due to other processes or
// garbage collection. We note each inconsistency and retry.
sb.append(openFilesBeforeOperation).append("!=").append(openFilesAfterOperation).append(",");
}
fail("File descriptor leaked from new DataFileReader() over " + maxTries + " tries: ("
+ sb.substring(0, sb.length() - 1) + ")");
}
private long getNumberOfOpenFileDescriptors() {
OperatingSystemMXBean osMxBean = ManagementFactory.getOperatingSystemMXBean();
if (osMxBean instanceof UnixOperatingSystemMXBean) {
return ((UnixOperatingSystemMXBean) osMxBean).getOpenFileDescriptorCount();
}
return 0;
}
// regression test for bug AVRO-2944
@Test
void throttledInputStream() throws IOException {
// AVRO-2944 describes hanging/failure in reading Avro file with performing
// magic header check. This happens with throttled input stream,
// where we read into buffer less bytes than requested.
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = dataDir.resolve("testThrottledInputStream.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
}
// Without checking for magic header, throttled input has no effect
FileReader r = new DataFileReader(throttledInputStream(f), new GenericDatumReader<>());
assertEquals("TestSchema", r.getSchema().getName());
// With checking for magic header, throttled input should pass too.
FileReader r2 = DataFileReader.openReader(throttledInputStream(f), new GenericDatumReader<>());
assertEquals("TestSchema", r2.getSchema().getName());
}
private SeekableInput throttledInputStream(File f) throws IOException {
SeekableFileInput input = new SeekableFileInput(f);
return new SeekableInput() {
@Override
public void close() throws IOException {
input.close();
}
@Override
public void seek(long p) throws IOException {
input.seek(p);
}
@Override
public long tell() throws IOException {
return input.tell();
}
@Override
public long length() throws IOException {
return input.length();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
if (len == 1) {
return input.read(b, off, len);
} else {
return input.read(b, off, len - 1);
}
}
};
}
// another regression test for bug AVRO-2944, testing EOF case
@Test
void inputStreamEOF() throws IOException {
assertThrows(EOFException.class, () -> {
// AVRO-2944 describes hanging/failure in reading Avro file with performing
// magic header check. This potentially happens with a defective input stream
// where a -1 value is unexpectedly returned from a read.
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = dataDir.resolve("testInputStreamEOF.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
}
// Should throw an EOFException
DataFileReader.openReader(eofInputStream(f), new GenericDatumReader<>());
});
}
private SeekableInput eofInputStream(File f) throws IOException {
SeekableFileInput input = new SeekableFileInput(f);
return new SeekableInput() {
@Override
public void close() throws IOException {
input.close();
}
@Override
public void seek(long p) throws IOException {
input.seek(p);
}
@Override
public long tell() throws IOException {
return input.tell();
}
@Override
public long length() throws IOException {
return input.length();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
return -1;
}
};
}
@Test
void ignoreSchemaValidationOnRead() throws IOException {
// This schema has an accent in the name and the default for the field doesn't
// match the first type in the union. A Java SDK in the past could create a file
// containing this schema.
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"InvalidAccëntWithInvalidNull\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
// Create a file with the legacy schema.
File f = dataDir.resolve("testIgnoreSchemaValidationOnRead.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
}
// This should not throw an exception.
try (DataFileStream<Void> r = new DataFileStream<>(new FileInputStream(f), new GenericDatumReader<>())) {
assertEquals(legacySchema, r.getSchema());
}
}
@Test
void invalidMagicLength() throws IOException {
File f = dataDir.resolve("testInvalidMagicLength.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("-");
}
try (SeekableFileInput fileInput = new SeekableFileInput(f)) {
assertThrows(InvalidAvroMagicException.class,
() -> DataFileReader.openReader(fileInput, new GenericDatumReader<>()));
}
}
@Test
void invalidMagicBytes() throws IOException {
File f = dataDir.resolve("testInvalidMagicBytes.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("invalid");
}
try (SeekableFileInput fileInput = new SeekableFileInput(f)) {
assertThrows(InvalidAvroMagicException.class,
() -> DataFileReader.openReader(fileInput, new GenericDatumReader<>()));
}
}
}
| 7,154 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/TestSchemaValidation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.apache.avro;
import static org.apache.avro.TestSchemas.*;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.avro.reflect.ReflectData;
import org.junit.jupiter.api.Test;
public class TestSchemaValidation {
/** Collection of reader/writer schema pair that are compatible. */
public static final List<ReaderWriter> COMPATIBLE_READER_WRITER_TEST_CASES = list(
new ReaderWriter(BOOLEAN_SCHEMA, BOOLEAN_SCHEMA),
new ReaderWriter(INT_SCHEMA, INT_SCHEMA),
new ReaderWriter(LONG_SCHEMA, INT_SCHEMA), new ReaderWriter(LONG_SCHEMA, LONG_SCHEMA),
// Avro spec says INT/LONG can be promoted to FLOAT/DOUBLE.
// This is arguable as this causes a loss of precision.
new ReaderWriter(FLOAT_SCHEMA, INT_SCHEMA), new ReaderWriter(FLOAT_SCHEMA, LONG_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, LONG_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_SCHEMA), new ReaderWriter(DOUBLE_SCHEMA, FLOAT_SCHEMA),
new ReaderWriter(STRING_SCHEMA, STRING_SCHEMA),
new ReaderWriter(BYTES_SCHEMA, BYTES_SCHEMA),
new ReaderWriter(INT_ARRAY_SCHEMA, INT_ARRAY_SCHEMA), new ReaderWriter(LONG_ARRAY_SCHEMA, INT_ARRAY_SCHEMA),
new ReaderWriter(INT_MAP_SCHEMA, INT_MAP_SCHEMA), new ReaderWriter(LONG_MAP_SCHEMA, INT_MAP_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_AB_SCHEMA), new ReaderWriter(ENUM1_ABC_SCHEMA, ENUM1_AB_SCHEMA),
// String-to/from-bytes, introduced in Avro 1.7.7
new ReaderWriter(STRING_SCHEMA, BYTES_SCHEMA), new ReaderWriter(BYTES_SCHEMA, STRING_SCHEMA),
// Tests involving unions:
new ReaderWriter(EMPTY_UNION_SCHEMA, EMPTY_UNION_SCHEMA), new ReaderWriter(INT_UNION_SCHEMA, INT_UNION_SCHEMA),
new ReaderWriter(INT_STRING_UNION_SCHEMA, STRING_INT_UNION_SCHEMA),
new ReaderWriter(INT_UNION_SCHEMA, EMPTY_UNION_SCHEMA), new ReaderWriter(LONG_UNION_SCHEMA, INT_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, INT_UNION_SCHEMA), new ReaderWriter(FLOAT_UNION_SCHEMA, LONG_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, INT_UNION_SCHEMA), new ReaderWriter(LONG_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, LONG_UNION_SCHEMA),
new ReaderWriter(FLOAT_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, FLOAT_UNION_SCHEMA),
new ReaderWriter(STRING_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(STRING_UNION_SCHEMA, BYTES_UNION_SCHEMA),
new ReaderWriter(BYTES_UNION_SCHEMA, EMPTY_UNION_SCHEMA),
new ReaderWriter(BYTES_UNION_SCHEMA, STRING_UNION_SCHEMA),
new ReaderWriter(DOUBLE_UNION_SCHEMA, INT_FLOAT_UNION_SCHEMA),
new ReaderWriter(NULL_INT_ARRAY_UNION_SCHEMA, INT_ARRAY_SCHEMA),
new ReaderWriter(NULL_INT_MAP_UNION_SCHEMA, INT_MAP_SCHEMA),
// Readers capable of reading all branches of a union are compatible
new ReaderWriter(FLOAT_SCHEMA, INT_FLOAT_UNION_SCHEMA), new ReaderWriter(LONG_SCHEMA, INT_LONG_UNION_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_FLOAT_UNION_SCHEMA),
new ReaderWriter(DOUBLE_SCHEMA, INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA),
// Special case of singleton unions:
new ReaderWriter(FLOAT_SCHEMA, FLOAT_UNION_SCHEMA), new ReaderWriter(INT_UNION_SCHEMA, INT_SCHEMA),
new ReaderWriter(INT_SCHEMA, INT_UNION_SCHEMA),
// Tests involving records:
new ReaderWriter(EMPTY_RECORD1, EMPTY_RECORD1), new ReaderWriter(EMPTY_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_RECORD1, A_INT_RECORD1), new ReaderWriter(A_DINT_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_DINT_RECORD1, A_DINT_RECORD1), new ReaderWriter(A_INT_RECORD1, A_DINT_RECORD1),
new ReaderWriter(A_LONG_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_RECORD1, A_INT_B_INT_RECORD1), new ReaderWriter(A_DINT_RECORD1, A_INT_B_INT_RECORD1),
new ReaderWriter(A_INT_B_DINT_RECORD1, A_INT_RECORD1), new ReaderWriter(A_DINT_B_DINT_RECORD1, EMPTY_RECORD1),
new ReaderWriter(A_DINT_B_DINT_RECORD1, A_INT_RECORD1),
new ReaderWriter(A_INT_B_INT_RECORD1, A_DINT_B_DINT_RECORD1),
// The SchemaValidator, unlike the SchemaCompatibility class, cannot cope with
// recursive schemas
// See AVRO-2074
// new ReaderWriter(INT_LIST_RECORD, INT_LIST_RECORD),
// new ReaderWriter(LONG_LIST_RECORD, LONG_LIST_RECORD),
// new ReaderWriter(LONG_LIST_RECORD, INT_LIST_RECORD),
new ReaderWriter(NULL_SCHEMA, NULL_SCHEMA),
// This is comparing two records that have an inner array of records with
// different namespaces.
new ReaderWriter(NS_RECORD1, NS_RECORD2));
/** Collection of reader/writer schema pair that are incompatible. */
public static final List<ReaderWriter> INCOMPATIBLE_READER_WRITER_TEST_CASES = list(
new ReaderWriter(NULL_SCHEMA, INT_SCHEMA), new ReaderWriter(NULL_SCHEMA, LONG_SCHEMA),
new ReaderWriter(BOOLEAN_SCHEMA, INT_SCHEMA),
new ReaderWriter(INT_SCHEMA, NULL_SCHEMA), new ReaderWriter(INT_SCHEMA, BOOLEAN_SCHEMA),
new ReaderWriter(INT_SCHEMA, LONG_SCHEMA), new ReaderWriter(INT_SCHEMA, FLOAT_SCHEMA),
new ReaderWriter(INT_SCHEMA, DOUBLE_SCHEMA),
new ReaderWriter(LONG_SCHEMA, FLOAT_SCHEMA), new ReaderWriter(LONG_SCHEMA, DOUBLE_SCHEMA),
new ReaderWriter(FLOAT_SCHEMA, DOUBLE_SCHEMA),
new ReaderWriter(STRING_SCHEMA, BOOLEAN_SCHEMA), new ReaderWriter(STRING_SCHEMA, INT_SCHEMA),
new ReaderWriter(BYTES_SCHEMA, NULL_SCHEMA), new ReaderWriter(BYTES_SCHEMA, INT_SCHEMA),
new ReaderWriter(INT_ARRAY_SCHEMA, LONG_ARRAY_SCHEMA), new ReaderWriter(INT_MAP_SCHEMA, INT_ARRAY_SCHEMA),
new ReaderWriter(INT_ARRAY_SCHEMA, INT_MAP_SCHEMA), new ReaderWriter(INT_MAP_SCHEMA, LONG_MAP_SCHEMA),
// new ReaderWriter(ENUM1_AB_SCHEMA, ENUM1_ABC_SCHEMA),
// new ReaderWriter(ENUM1_BC_SCHEMA, ENUM1_ABC_SCHEMA),
new ReaderWriter(ENUM1_AB_SCHEMA, ENUM2_AB_SCHEMA), new ReaderWriter(INT_SCHEMA, ENUM2_AB_SCHEMA),
new ReaderWriter(ENUM2_AB_SCHEMA, INT_SCHEMA),
// Tests involving unions:
new ReaderWriter(INT_UNION_SCHEMA, INT_STRING_UNION_SCHEMA),
new ReaderWriter(STRING_UNION_SCHEMA, INT_STRING_UNION_SCHEMA),
new ReaderWriter(FLOAT_SCHEMA, INT_LONG_FLOAT_DOUBLE_UNION_SCHEMA),
new ReaderWriter(LONG_SCHEMA, INT_FLOAT_UNION_SCHEMA), new ReaderWriter(INT_SCHEMA, INT_FLOAT_UNION_SCHEMA),
// new ReaderWriter(EMPTY_RECORD2, EMPTY_RECORD1),
new ReaderWriter(A_INT_RECORD1, EMPTY_RECORD1), new ReaderWriter(A_INT_B_DINT_RECORD1, EMPTY_RECORD1),
// new ReaderWriter(INT_LIST_RECORD, LONG_LIST_RECORD),
new ReaderWriter(NULL_SCHEMA, INT_SCHEMA));
SchemaValidatorBuilder builder = new SchemaValidatorBuilder();
Schema rec = SchemaBuilder.record("test.Rec").fields().name("a").type().intType().intDefault(1).name("b").type()
.longType().noDefault().endRecord();
Schema rec2 = SchemaBuilder.record("test.Rec").fields().name("a").type().intType().intDefault(1).name("b").type()
.longType().noDefault().name("c").type().intType().intDefault(0).endRecord();
Schema rec3 = SchemaBuilder.record("test.Rec").fields().name("b").type().longType().noDefault().name("c").type()
.intType().intDefault(0).endRecord();
Schema rec4 = SchemaBuilder.record("test.Rec").fields().name("b").type().longType().noDefault().name("c").type()
.intType().noDefault().endRecord();
Schema rec5 = SchemaBuilder.record("test.Rec").fields().name("a").type().stringType().stringDefault("") // different
// type from
// original
.name("b").type().longType().noDefault().name("c").type().intType().intDefault(0).endRecord();
@Test
void allTypes() throws SchemaValidationException {
Schema s = SchemaBuilder.record("r").fields().requiredBoolean("boolF").requiredInt("intF").requiredLong("longF")
.requiredFloat("floatF").requiredDouble("doubleF").requiredString("stringF").requiredBytes("bytesF")
.name("fixedF1").type().fixed("F1").size(1).noDefault().name("enumF").type().enumeration("E1").symbols("S")
.noDefault().name("mapF").type().map().values().stringType().noDefault().name("arrayF").type().array().items()
.stringType().noDefault().name("recordF").type().record("inner").fields().name("f").type().intType().noDefault()
.endRecord().noDefault().optionalBoolean("boolO").endRecord();
testValidatorPasses(builder.mutualReadStrategy().validateLatest(), s, s);
}
@Test
void readOnePrior() throws SchemaValidationException {
testValidatorPasses(builder.canReadStrategy().validateLatest(), rec3, rec);
testValidatorPasses(builder.canReadStrategy().validateLatest(), rec5, rec3);
testValidatorFails(builder.canReadStrategy().validateLatest(), rec4, rec);
}
@Test
void readAllPrior() throws SchemaValidationException {
testValidatorPasses(builder.canReadStrategy().validateAll(), rec3, rec, rec2);
testValidatorFails(builder.canReadStrategy().validateAll(), rec4, rec, rec2, rec3);
testValidatorFails(builder.canReadStrategy().validateAll(), rec5, rec, rec2, rec3);
}
@Test
void onePriorCanRead() throws SchemaValidationException {
testValidatorPasses(builder.canBeReadStrategy().validateLatest(), rec, rec3);
testValidatorFails(builder.canBeReadStrategy().validateLatest(), rec, rec4);
}
@Test
void allPriorCanRead() throws SchemaValidationException {
testValidatorPasses(builder.canBeReadStrategy().validateAll(), rec, rec3, rec2);
testValidatorFails(builder.canBeReadStrategy().validateAll(), rec, rec4, rec3, rec2);
}
@Test
void onePriorCompatible() throws SchemaValidationException {
testValidatorPasses(builder.mutualReadStrategy().validateLatest(), rec, rec3);
testValidatorFails(builder.mutualReadStrategy().validateLatest(), rec, rec4);
}
@Test
void allPriorCompatible() throws SchemaValidationException {
testValidatorPasses(builder.mutualReadStrategy().validateAll(), rec, rec3, rec2);
testValidatorFails(builder.mutualReadStrategy().validateAll(), rec, rec4, rec3, rec2);
}
@Test
void invalidBuild() {
assertThrows(AvroRuntimeException.class, () -> {
builder.strategy(null).validateAll();
});
}
public static class Point {
double x;
double y;
}
public static class Circle {
Point center;
double radius;
}
public static final Schema circleSchema = SchemaBuilder.record("Circle").fields().name("center").type()
.record("Point").fields().requiredDouble("x").requiredDouble("y").endRecord().noDefault().requiredDouble("radius")
.endRecord();
public static final Schema circleSchemaDifferentNames = SchemaBuilder.record("crcl").fields().name("center").type()
.record("pt").fields().requiredDouble("x").requiredDouble("y").endRecord().noDefault().requiredDouble("radius")
.endRecord();
@Test
void reflectMatchStructure() throws SchemaValidationException {
testValidatorPasses(builder.canBeReadStrategy().validateAll(), circleSchemaDifferentNames,
ReflectData.get().getSchema(Circle.class));
}
@Test
void reflectWithAllowNullMatchStructure() throws SchemaValidationException {
testValidatorPasses(builder.canBeReadStrategy().validateAll(), circleSchemaDifferentNames,
ReflectData.AllowNull.get().getSchema(Circle.class));
}
@Test
void unionWithIncompatibleElements() throws SchemaValidationException {
Schema union1 = Schema.createUnion(Collections.singletonList(rec));
Schema union2 = Schema.createUnion(Collections.singletonList(rec4));
testValidatorFails(builder.canReadStrategy().validateAll(), union2, union1);
}
@Test
void unionWithCompatibleElements() throws SchemaValidationException {
Schema union1 = Schema.createUnion(Collections.singletonList(rec));
Schema union2 = Schema.createUnion(Collections.singletonList(rec3));
testValidatorPasses(builder.canReadStrategy().validateAll(), union2, union1);
}
@Test
void schemaCompatibilitySuccesses() throws SchemaValidationException {
// float-union-to-int/long-union does not work...
// and neither does recursive types
for (ReaderWriter tc : COMPATIBLE_READER_WRITER_TEST_CASES) {
testValidatorPasses(builder.canReadStrategy().validateAll(), tc.getReader(), tc.getWriter());
}
}
@Test
void schemaCompatibilityFailures() {
for (ReaderWriter tc : INCOMPATIBLE_READER_WRITER_TEST_CASES) {
Schema reader = tc.getReader();
Schema writer = tc.getWriter();
String expectedMsg = "Unable to read schema: \n" + writer.toString(false);
SchemaValidator validator = builder.canReadStrategy().validateAll();
SchemaValidationException exception = assertThrows(SchemaValidationException.class,
() -> validator.validate(reader, Collections.singleton(writer)),
"No or wrong exception for (" + reader.toString(false) + "; " + writer.toString(false) + ")");
assertTrue(exception.getMessage().contains("Unable to read schema:"),
"'" + expectedMsg + "' != '" + exception.getMessage() + "'");
}
}
private void testValidatorPasses(SchemaValidator validator, Schema schema, Schema... prev)
throws SchemaValidationException {
ArrayList<Schema> prior = new ArrayList<>();
for (int i = prev.length - 1; i >= 0; i--) {
prior.add(prev[i]);
}
validator.validate(schema, prior);
}
private void testValidatorFails(SchemaValidator validator, Schema schemaFails, Schema... prev)
throws SchemaValidationException {
ArrayList<Schema> prior = new ArrayList<>();
for (int i = prev.length - 1; i >= 0; i--) {
prior.add(prev[i]);
}
boolean threw = false;
try {
// should fail
validator.validate(schemaFails, prior);
} catch (SchemaValidationException sve) {
threw = true;
}
assertTrue(threw);
}
public static final org.apache.avro.Schema recursiveSchema = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"Node\",\"namespace\":\"avro\",\"fields\":[{\"name\":\"value\",\"type\":[\"null\",\"Node\"],\"default\":null}]}");
/**
* Unit test to verify that recursive schemas can be validated. See AVRO-2122.
*/
@Test
void recursiveSchemaValidation() throws SchemaValidationException {
// before AVRO-2122, this would cause a StackOverflowError
final SchemaValidator backwardValidator = builder.canReadStrategy().validateLatest();
backwardValidator.validate(recursiveSchema, Collections.singletonList(recursiveSchema));
}
}
| 7,155 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificToFromByteArray.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalTypes;
import org.apache.avro.message.MissingSchemaException;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.temporal.ChronoUnit;
public class TestSpecificToFromByteArray {
@Test
void specificToFromByteBufferWithLogicalTypes() throws IOException {
// Java 9+ comes with NANO precision and since we encode it using millis
// precision
// Therefore we won't want to have NANOs in the input
LocalTime t = LocalTime.now().truncatedTo(ChronoUnit.MILLIS);
Instant instant = Instant.now().truncatedTo(ChronoUnit.MILLIS);
final TestRecordWithLogicalTypes record = new TestRecordWithLogicalTypes(true, 34, 35L, 3.14F, 3019.34, null,
LocalDate.now(), t, instant, new BigDecimal("123.45"), new BigDecimal(-23.456562323));
final ByteBuffer b = record.toByteBuffer();
final TestRecordWithLogicalTypes copy = TestRecordWithLogicalTypes.fromByteBuffer(b);
assertEquals(record, copy);
}
@Test
void specificToFromByteBufferWithoutLogicalTypes() throws IOException {
final TestRecordWithoutLogicalTypes record = new TestRecordWithoutLogicalTypes(true, 34, 35L, 3.14F, 3019.34, null,
(int) System.currentTimeMillis() / 1000, (int) System.currentTimeMillis() / 1000, System.currentTimeMillis(),
new Conversions.DecimalConversion().toBytes(new BigDecimal("123.45"), null, LogicalTypes.decimal(9, 2)));
final ByteBuffer b = record.toByteBuffer();
final TestRecordWithoutLogicalTypes copy = TestRecordWithoutLogicalTypes.fromByteBuffer(b);
assertEquals(record, copy);
}
@Test
void specificByteArrayIncompatibleWithLogicalTypes() throws IOException {
assertThrows(MissingSchemaException.class, () -> {
final TestRecordWithoutLogicalTypes withoutLogicalTypes = new TestRecordWithoutLogicalTypes(true, 34, 35L, 3.14F,
3019.34, null, (int) System.currentTimeMillis() / 1000, (int) System.currentTimeMillis() / 1000,
System.currentTimeMillis(),
new Conversions.DecimalConversion().toBytes(new BigDecimal("123.45"), null, LogicalTypes.decimal(9, 2)));
final ByteBuffer b = withoutLogicalTypes.toByteBuffer();
TestRecordWithLogicalTypes.fromByteBuffer(b);
});
}
@Test
void specificByteArrayIncompatibleWithoutLogicalTypes() throws IOException {
assertThrows(MissingSchemaException.class, () -> {
final TestRecordWithLogicalTypes withLogicalTypes = new TestRecordWithLogicalTypes(true, 34, 35L, 3.14F, 3019.34,
null, LocalDate.now(), LocalTime.now(), Instant.now(), new BigDecimal("123.45"),
new BigDecimal(-23.456562323));
final ByteBuffer b = withLogicalTypes.toByteBuffer();
TestRecordWithoutLogicalTypes.fromByteBuffer(b);
});
}
}
| 7,156 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import org.apache.avro.Conversions;
import org.apache.avro.data.TimeConversions;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.SchemaStore;
@SuppressWarnings("all")
@org.apache.avro.specific.AvroGenerated
public class TestRecordWithLogicalTypes extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 3313339903648295220L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"TestRecordWithLogicalTypes\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"b\",\"type\":\"boolean\"},{\"name\":\"i32\",\"type\":\"int\"},{\"name\":\"i64\",\"type\":\"long\"},{\"name\":\"f32\",\"type\":\"float\"},{\"name\":\"f64\",\"type\":\"double\"},{\"name\":\"s\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"d\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},{\"name\":\"t\",\"type\":{\"type\":\"int\",\"logicalType\":\"time-millis\"}},{\"name\":\"ts\",\"type\":{\"type\":\"long\",\"logicalType\":\"timestamp-millis\"}},{\"name\":\"dec\",\"type\":{\"type\":\"bytes\",\"logicalType\":\"decimal\",\"precision\":9,\"scale\":2}},{\"name\":\"bd\",\"type\":{\"type\":\"bytes\",\"logicalType\":\"big-decimal\"}}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<TestRecordWithLogicalTypes> ENCODER = new BinaryMessageEncoder<TestRecordWithLogicalTypes>(
MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<TestRecordWithLogicalTypes> DECODER = new BinaryMessageDecoder<TestRecordWithLogicalTypes>(
MODEL$, SCHEMA$);
/**
* Return the BinaryMessageDecoder instance used by this class.
*/
public static BinaryMessageDecoder<TestRecordWithLogicalTypes> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the
* specified {@link SchemaStore}.
*
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
*/
public static BinaryMessageDecoder<TestRecordWithLogicalTypes> createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<TestRecordWithLogicalTypes>(MODEL$, SCHEMA$, resolver);
}
/** Serializes this TestRecordWithLogicalTypes to a ByteBuffer. */
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/** Deserializes a TestRecordWithLogicalTypes from a ByteBuffer. */
public static TestRecordWithLogicalTypes fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
@Deprecated
public boolean b;
@Deprecated
public int i32;
@Deprecated
public long i64;
@Deprecated
public float f32;
@Deprecated
public double f64;
@Deprecated
public java.lang.CharSequence s;
@Deprecated
public java.time.LocalDate d;
@Deprecated
public java.time.LocalTime t;
@Deprecated
public java.time.Instant ts;
@Deprecated
public java.math.BigDecimal dec;
@Deprecated
public java.math.BigDecimal bd;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* <code>newBuilder()</code>.
*/
public TestRecordWithLogicalTypes() {
}
/**
* All-args constructor.
*
* @param b The new value for b
* @param i32 The new value for i32
* @param i64 The new value for i64
* @param f32 The new value for f32
* @param f64 The new value for f64
* @param s The new value for s
* @param d The new value for d
* @param t The new value for t
* @param ts The new value for ts
* @param dec The new value for dec
* @param bd The new value for bd
*/
public TestRecordWithLogicalTypes(java.lang.Boolean b, java.lang.Integer i32, java.lang.Long i64, java.lang.Float f32,
java.lang.Double f64, java.lang.CharSequence s, java.time.LocalDate d, java.time.LocalTime t,
java.time.Instant ts, java.math.BigDecimal dec, java.math.BigDecimal bd) {
this.b = b;
this.i32 = i32;
this.i64 = i64;
this.f32 = f32;
this.f64 = f64;
this.s = s;
this.d = d;
this.t = t;
this.ts = ts;
this.dec = dec;
this.bd = bd;
}
@Override
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
@Override
public java.lang.Object get(int field$) {
switch (field$) {
case 0:
return b;
case 1:
return i32;
case 2:
return i64;
case 3:
return f32;
case 4:
return f64;
case 5:
return s;
case 6:
return d;
case 7:
return t;
case 8:
return ts;
case 9:
return dec;
case 10:
return bd;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index " + field$);
}
}
protected static final org.apache.avro.Conversions.DecimalConversion DECIMAL_CONVERSION = new org.apache.avro.Conversions.DecimalConversion();
protected static final Conversions.BigDecimalConversion BIG_DECIMAL_CONVERSION = new org.apache.avro.Conversions.BigDecimalConversion();
protected static final TimeConversions.DateConversion DATE_CONVERSION = new TimeConversions.DateConversion();
protected static final TimeConversions.TimeMillisConversion TIME_CONVERSION = new TimeConversions.TimeMillisConversion();
protected static final TimeConversions.TimestampMillisConversion TIMESTAMP_CONVERSION = new TimeConversions.TimestampMillisConversion();
private static final org.apache.avro.Conversion<?>[] conversions = new org.apache.avro.Conversion<?>[] { null, null,
null, null, null, null, DATE_CONVERSION, TIME_CONVERSION, TIMESTAMP_CONVERSION, DECIMAL_CONVERSION,
BIG_DECIMAL_CONVERSION };
@Override
public org.apache.avro.Conversion<?> getConversion(int field) {
return conversions[field];
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value = "unchecked")
@Override
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0:
b = (java.lang.Boolean) value$;
break;
case 1:
i32 = (java.lang.Integer) value$;
break;
case 2:
i64 = (java.lang.Long) value$;
break;
case 3:
f32 = (java.lang.Float) value$;
break;
case 4:
f64 = (java.lang.Double) value$;
break;
case 5:
s = (java.lang.CharSequence) value$;
break;
case 6:
d = (java.time.LocalDate) value$;
break;
case 7:
t = (java.time.LocalTime) value$;
break;
case 8:
ts = (java.time.Instant) value$;
break;
case 9:
dec = (java.math.BigDecimal) value$;
break;
case 10:
bd = (java.math.BigDecimal) value$;
break;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'b' field.
*
* @return The value of the 'b' field.
*/
public java.lang.Boolean getB() {
return b;
}
/**
* Sets the value of the 'b' field.
*
* @param value the value to set.
*/
public void setB(java.lang.Boolean value) {
this.b = value;
}
/**
* Gets the value of the 'i32' field.
*
* @return The value of the 'i32' field.
*/
public java.lang.Integer getI32() {
return i32;
}
/**
* Sets the value of the 'i32' field.
*
* @param value the value to set.
*/
public void setI32(java.lang.Integer value) {
this.i32 = value;
}
/**
* Gets the value of the 'i64' field.
*
* @return The value of the 'i64' field.
*/
public java.lang.Long getI64() {
return i64;
}
/**
* Sets the value of the 'i64' field.
*
* @param value the value to set.
*/
public void setI64(java.lang.Long value) {
this.i64 = value;
}
/**
* Gets the value of the 'f32' field.
*
* @return The value of the 'f32' field.
*/
public java.lang.Float getF32() {
return f32;
}
/**
* Sets the value of the 'f32' field.
*
* @param value the value to set.
*/
public void setF32(java.lang.Float value) {
this.f32 = value;
}
/**
* Gets the value of the 'f64' field.
*
* @return The value of the 'f64' field.
*/
public java.lang.Double getF64() {
return f64;
}
/**
* Sets the value of the 'f64' field.
*
* @param value the value to set.
*/
public void setF64(java.lang.Double value) {
this.f64 = value;
}
/**
* Gets the value of the 's' field.
*
* @return The value of the 's' field.
*/
public java.lang.CharSequence getS() {
return s;
}
/**
* Sets the value of the 's' field.
*
* @param value the value to set.
*/
public void setS(java.lang.CharSequence value) {
this.s = value;
}
/**
* Gets the value of the 'd' field.
*
* @return The value of the 'd' field.
*/
public java.time.LocalDate getD() {
return d;
}
/**
* Sets the value of the 'd' field.
*
* @param value the value to set.
*/
public void setD(java.time.LocalDate value) {
this.d = value;
}
/**
* Gets the value of the 't' field.
*
* @return The value of the 't' field.
*/
public java.time.LocalTime getT() {
return t;
}
/**
* Sets the value of the 't' field.
*
* @param value the value to set.
*/
public void setT(java.time.LocalTime value) {
this.t = value;
}
/**
* Gets the value of the 'ts' field.
*
* @return The value of the 'ts' field.
*/
public java.time.Instant getTs() {
return ts;
}
/**
* Sets the value of the 'ts' field.
*
* @param value the value to set.
*/
public void setTs(java.time.Instant value) {
this.ts = value;
}
/**
* Gets the value of the 'dec' field.
*
* @return The value of the 'dec' field.
*/
public java.math.BigDecimal getDec() {
return dec;
}
/**
* Sets the value of the 'dec' field.
*
* @param value the value to set.
*/
public void setDec(java.math.BigDecimal value) {
this.dec = value;
}
/**
* Creates a new TestRecordWithLogicalTypes RecordBuilder.
*
* @return A new TestRecordWithLogicalTypes RecordBuilder
*/
public static TestRecordWithLogicalTypes.Builder newBuilder() {
return new TestRecordWithLogicalTypes.Builder();
}
/**
* Creates a new TestRecordWithLogicalTypes RecordBuilder by copying an existing
* Builder.
*
* @param other The existing builder to copy.
* @return A new TestRecordWithLogicalTypes RecordBuilder
*/
public static TestRecordWithLogicalTypes.Builder newBuilder(TestRecordWithLogicalTypes.Builder other) {
if (other == null) {
return new TestRecordWithLogicalTypes.Builder();
} else {
return new TestRecordWithLogicalTypes.Builder(other);
}
}
/**
* Creates a new TestRecordWithLogicalTypes RecordBuilder by copying an existing
* TestRecordWithLogicalTypes instance.
*
* @param other The existing instance to copy.
* @return A new TestRecordWithLogicalTypes RecordBuilder
*/
public static TestRecordWithLogicalTypes.Builder newBuilder(TestRecordWithLogicalTypes other) {
if (other == null) {
return new TestRecordWithLogicalTypes.Builder();
} else {
return new TestRecordWithLogicalTypes.Builder(other);
}
}
/**
* RecordBuilder for TestRecordWithLogicalTypes instances.
*/
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<TestRecordWithLogicalTypes>
implements org.apache.avro.data.RecordBuilder<TestRecordWithLogicalTypes> {
private boolean b;
private int i32;
private long i64;
private float f32;
private double f64;
private java.lang.CharSequence s;
private java.time.LocalDate d;
private java.time.LocalTime t;
private java.time.Instant ts;
private java.math.BigDecimal dec;
private java.math.BigDecimal bd;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(TestRecordWithLogicalTypes.Builder other) {
super(other);
if (isValidValue(fields()[0], other.b)) {
this.b = data().deepCopy(fields()[0].schema(), other.b);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.i32)) {
this.i32 = data().deepCopy(fields()[1].schema(), other.i32);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
if (isValidValue(fields()[2], other.i64)) {
this.i64 = data().deepCopy(fields()[2].schema(), other.i64);
fieldSetFlags()[2] = other.fieldSetFlags()[2];
}
if (isValidValue(fields()[3], other.f32)) {
this.f32 = data().deepCopy(fields()[3].schema(), other.f32);
fieldSetFlags()[3] = other.fieldSetFlags()[3];
}
if (isValidValue(fields()[4], other.f64)) {
this.f64 = data().deepCopy(fields()[4].schema(), other.f64);
fieldSetFlags()[4] = other.fieldSetFlags()[4];
}
if (isValidValue(fields()[5], other.s)) {
this.s = data().deepCopy(fields()[5].schema(), other.s);
fieldSetFlags()[5] = other.fieldSetFlags()[5];
}
if (isValidValue(fields()[6], other.d)) {
this.d = data().deepCopy(fields()[6].schema(), other.d);
fieldSetFlags()[6] = other.fieldSetFlags()[6];
}
if (isValidValue(fields()[7], other.t)) {
this.t = data().deepCopy(fields()[7].schema(), other.t);
fieldSetFlags()[7] = other.fieldSetFlags()[7];
}
if (isValidValue(fields()[8], other.ts)) {
this.ts = data().deepCopy(fields()[8].schema(), other.ts);
fieldSetFlags()[8] = other.fieldSetFlags()[8];
}
if (isValidValue(fields()[9], other.dec)) {
this.dec = data().deepCopy(fields()[9].schema(), other.dec);
fieldSetFlags()[9] = other.fieldSetFlags()[9];
}
if (isValidValue(fields()[10], other.bd)) {
this.bd = data().deepCopy(fields()[10].schema(), other.bd);
fieldSetFlags()[10] = other.fieldSetFlags()[10];
}
}
/**
* Creates a Builder by copying an existing TestRecordWithLogicalTypes instance
*
* @param other The existing instance to copy.
*/
private Builder(TestRecordWithLogicalTypes other) {
super(SCHEMA$);
if (isValidValue(fields()[0], other.b)) {
this.b = data().deepCopy(fields()[0].schema(), other.b);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.i32)) {
this.i32 = data().deepCopy(fields()[1].schema(), other.i32);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.i64)) {
this.i64 = data().deepCopy(fields()[2].schema(), other.i64);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.f32)) {
this.f32 = data().deepCopy(fields()[3].schema(), other.f32);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.f64)) {
this.f64 = data().deepCopy(fields()[4].schema(), other.f64);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.s)) {
this.s = data().deepCopy(fields()[5].schema(), other.s);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.d)) {
this.d = data().deepCopy(fields()[6].schema(), other.d);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.t)) {
this.t = data().deepCopy(fields()[7].schema(), other.t);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.ts)) {
this.ts = data().deepCopy(fields()[8].schema(), other.ts);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.dec)) {
this.dec = data().deepCopy(fields()[9].schema(), other.dec);
fieldSetFlags()[9] = true;
}
if (isValidValue(fields()[10], other.bd)) {
this.bd = data().deepCopy(fields()[10].schema(), other.bd);
fieldSetFlags()[10] = true;
}
}
/**
* Gets the value of the 'b' field.
*
* @return The value.
*/
public java.lang.Boolean getB() {
return b;
}
/**
* Sets the value of the 'b' field.
*
* @param value The value of 'b'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setB(boolean value) {
validate(fields()[0], value);
this.b = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'b' field has been set.
*
* @return True if the 'b' field has been set, false otherwise.
*/
public boolean hasB() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'b' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearB() {
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'i32' field.
*
* @return The value.
*/
public java.lang.Integer getI32() {
return i32;
}
/**
* Sets the value of the 'i32' field.
*
* @param value The value of 'i32'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setI32(int value) {
validate(fields()[1], value);
this.i32 = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'i32' field has been set.
*
* @return True if the 'i32' field has been set, false otherwise.
*/
public boolean hasI32() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'i32' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearI32() {
fieldSetFlags()[1] = false;
return this;
}
/**
* Gets the value of the 'i64' field.
*
* @return The value.
*/
public java.lang.Long getI64() {
return i64;
}
/**
* Sets the value of the 'i64' field.
*
* @param value The value of 'i64'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setI64(long value) {
validate(fields()[2], value);
this.i64 = value;
fieldSetFlags()[2] = true;
return this;
}
/**
* Checks whether the 'i64' field has been set.
*
* @return True if the 'i64' field has been set, false otherwise.
*/
public boolean hasI64() {
return fieldSetFlags()[2];
}
/**
* Clears the value of the 'i64' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearI64() {
fieldSetFlags()[2] = false;
return this;
}
/**
* Gets the value of the 'f32' field.
*
* @return The value.
*/
public java.lang.Float getF32() {
return f32;
}
/**
* Sets the value of the 'f32' field.
*
* @param value The value of 'f32'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setF32(float value) {
validate(fields()[3], value);
this.f32 = value;
fieldSetFlags()[3] = true;
return this;
}
/**
* Checks whether the 'f32' field has been set.
*
* @return True if the 'f32' field has been set, false otherwise.
*/
public boolean hasF32() {
return fieldSetFlags()[3];
}
/**
* Clears the value of the 'f32' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearF32() {
fieldSetFlags()[3] = false;
return this;
}
/**
* Gets the value of the 'f64' field.
*
* @return The value.
*/
public java.lang.Double getF64() {
return f64;
}
/**
* Sets the value of the 'f64' field.
*
* @param value The value of 'f64'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setF64(double value) {
validate(fields()[4], value);
this.f64 = value;
fieldSetFlags()[4] = true;
return this;
}
/**
* Checks whether the 'f64' field has been set.
*
* @return True if the 'f64' field has been set, false otherwise.
*/
public boolean hasF64() {
return fieldSetFlags()[4];
}
/**
* Clears the value of the 'f64' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearF64() {
fieldSetFlags()[4] = false;
return this;
}
/**
* Gets the value of the 's' field.
*
* @return The value.
*/
public java.lang.CharSequence getS() {
return s;
}
/**
* Sets the value of the 's' field.
*
* @param value The value of 's'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setS(java.lang.CharSequence value) {
validate(fields()[5], value);
this.s = value;
fieldSetFlags()[5] = true;
return this;
}
/**
* Checks whether the 's' field has been set.
*
* @return True if the 's' field has been set, false otherwise.
*/
public boolean hasS() {
return fieldSetFlags()[5];
}
/**
* Clears the value of the 's' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearS() {
s = null;
fieldSetFlags()[5] = false;
return this;
}
/**
* Gets the value of the 'd' field.
*
* @return The value.
*/
public java.time.LocalDate getD() {
return d;
}
/**
* Sets the value of the 'd' field.
*
* @param value The value of 'd'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setD(java.time.LocalDate value) {
validate(fields()[6], value);
this.d = value;
fieldSetFlags()[6] = true;
return this;
}
/**
* Checks whether the 'd' field has been set.
*
* @return True if the 'd' field has been set, false otherwise.
*/
public boolean hasD() {
return fieldSetFlags()[6];
}
/**
* Clears the value of the 'd' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearD() {
fieldSetFlags()[6] = false;
return this;
}
/**
* Gets the value of the 't' field.
*
* @return The value.
*/
public java.time.LocalTime getT() {
return t;
}
/**
* Sets the value of the 't' field.
*
* @param value The value of 't'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setT(java.time.LocalTime value) {
validate(fields()[7], value);
this.t = value;
fieldSetFlags()[7] = true;
return this;
}
/**
* Checks whether the 't' field has been set.
*
* @return True if the 't' field has been set, false otherwise.
*/
public boolean hasT() {
return fieldSetFlags()[7];
}
/**
* Clears the value of the 't' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearT() {
fieldSetFlags()[7] = false;
return this;
}
/**
* Gets the value of the 'ts' field.
*
* @return The value.
*/
public java.time.Instant getTs() {
return ts;
}
/**
* Sets the value of the 'ts' field.
*
* @param value The value of 'ts'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setTs(java.time.Instant value) {
validate(fields()[8], value);
this.ts = value;
fieldSetFlags()[8] = true;
return this;
}
/**
* Checks whether the 'ts' field has been set.
*
* @return True if the 'ts' field has been set, false otherwise.
*/
public boolean hasTs() {
return fieldSetFlags()[8];
}
/**
* Clears the value of the 'ts' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearTs() {
fieldSetFlags()[8] = false;
return this;
}
/**
* Gets the value of the 'dec' field.
*
* @return The value.
*/
public java.math.BigDecimal getDec() {
return dec;
}
/**
* Sets the value of the 'dec' field.
*
* @param value The value of 'dec'.
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder setDec(java.math.BigDecimal value) {
validate(fields()[9], value);
this.dec = value;
fieldSetFlags()[9] = true;
return this;
}
/**
* Checks whether the 'dec' field has been set.
*
* @return True if the 'dec' field has been set, false otherwise.
*/
public boolean hasDec() {
return fieldSetFlags()[9];
}
/**
* Clears the value of the 'dec' field.
*
* @return This builder.
*/
public TestRecordWithLogicalTypes.Builder clearDec() {
dec = null;
fieldSetFlags()[9] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TestRecordWithLogicalTypes build() {
try {
TestRecordWithLogicalTypes record = new TestRecordWithLogicalTypes();
record.b = fieldSetFlags()[0] ? this.b : (java.lang.Boolean) defaultValue(fields()[0]);
record.i32 = fieldSetFlags()[1] ? this.i32 : (java.lang.Integer) defaultValue(fields()[1]);
record.i64 = fieldSetFlags()[2] ? this.i64 : (java.lang.Long) defaultValue(fields()[2]);
record.f32 = fieldSetFlags()[3] ? this.f32 : (java.lang.Float) defaultValue(fields()[3]);
record.f64 = fieldSetFlags()[4] ? this.f64 : (java.lang.Double) defaultValue(fields()[4]);
record.s = fieldSetFlags()[5] ? this.s : (java.lang.CharSequence) defaultValue(fields()[5]);
record.d = fieldSetFlags()[6] ? this.d : (java.time.LocalDate) defaultValue(fields()[6]);
record.t = fieldSetFlags()[7] ? this.t : (java.time.LocalTime) defaultValue(fields()[7]);
record.ts = fieldSetFlags()[8] ? this.ts : (java.time.Instant) defaultValue(fields()[8]);
record.dec = fieldSetFlags()[9] ? this.dec : (java.math.BigDecimal) defaultValue(fields()[9]);
record.bd = fieldSetFlags()[10] ? this.dec : (java.math.BigDecimal) defaultValue(fields()[10]);
return record;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<TestRecordWithLogicalTypes> WRITER$ = (org.apache.avro.io.DatumWriter<TestRecordWithLogicalTypes>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<TestRecordWithLogicalTypes> READER$ = (org.apache.avro.io.DatumReader<TestRecordWithLogicalTypes>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
}
| 7,157 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestUnionRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.SchemaStore;
@SuppressWarnings("all")
@AvroGenerated
public class TestUnionRecord extends SpecificRecordBase implements SpecificRecord {
private static final long serialVersionUID = -3829374192747523457L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"TestUnionRecord\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"amount\",\"type\":[\"null\",{\"type\":\"bytes\",\"logicalType\":\"decimal\",\"precision\":31,\"scale\":8}],\"default\":null}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
static {
MODEL$.addLogicalTypeConversion(new org.apache.avro.Conversions.DecimalConversion());
}
private static final BinaryMessageEncoder<TestUnionRecord> ENCODER = new BinaryMessageEncoder<TestUnionRecord>(MODEL$,
SCHEMA$);
private static final BinaryMessageDecoder<TestUnionRecord> DECODER = new BinaryMessageDecoder<TestUnionRecord>(MODEL$,
SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
*
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder<TestUnionRecord> getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
*
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<TestUnionRecord> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the
* specified {@link SchemaStore}.
*
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given
* SchemaStore
*/
public static BinaryMessageDecoder<TestUnionRecord> createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<TestUnionRecord>(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this TestUnionRecord to a ByteBuffer.
*
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a TestUnionRecord from a ByteBuffer.
*
* @param b a byte buffer holding serialized data for an instance of this class
* @return a TestUnionRecord instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into
* an instance of this class
*/
public static TestUnionRecord fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private java.math.BigDecimal amount;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* <code>newBuilder()</code>.
*/
public TestUnionRecord() {
}
/**
* All-args constructor.
*
* @param amount The new value for amount
*/
public TestUnionRecord(java.math.BigDecimal amount) {
this.amount = amount;
}
@Override
public SpecificData getSpecificData() {
return MODEL$;
}
@Override
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
@Override
public Object get(int field$) {
switch (field$) {
case 0:
return amount;
default:
throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
// Used by DatumReader. Applications should not call.
@Override
@SuppressWarnings(value = "unchecked")
public void put(int field$, Object value$) {
switch (field$) {
case 0:
amount = (java.math.BigDecimal) value$;
break;
default:
throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
/**
* Gets the value of the 'amount' field.
*
* @return The value of the 'amount' field.
*/
public java.math.BigDecimal getAmount() {
return amount;
}
/**
* Sets the value of the 'amount' field.
*
* @param value the value to set.
*/
public void setAmount(java.math.BigDecimal value) {
this.amount = value;
}
/**
* Creates a new TestUnionRecord RecordBuilder.
*
* @return A new TestUnionRecord RecordBuilder
*/
public static Builder newBuilder() {
return new Builder();
}
/**
* Creates a new TestUnionRecord RecordBuilder by copying an existing Builder.
*
* @param other The existing builder to copy.
* @return A new TestUnionRecord RecordBuilder
*/
public static Builder newBuilder(Builder other) {
if (other == null) {
return new Builder();
} else {
return new Builder(other);
}
}
/**
* Creates a new TestUnionRecord RecordBuilder by copying an existing
* TestUnionRecord instance.
*
* @param other The existing instance to copy.
* @return A new TestUnionRecord RecordBuilder
*/
public static Builder newBuilder(TestUnionRecord other) {
if (other == null) {
return new Builder();
} else {
return new Builder(other);
}
}
/**
* RecordBuilder for TestUnionRecord instances.
*/
@AvroGenerated
public static class Builder extends SpecificRecordBuilderBase<TestUnionRecord>
implements org.apache.avro.data.RecordBuilder<TestUnionRecord> {
private java.math.BigDecimal amount;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(Builder other) {
super(other);
if (isValidValue(fields()[0], other.amount)) {
this.amount = data().deepCopy(fields()[0].schema(), other.amount);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
}
/**
* Creates a Builder by copying an existing TestUnionRecord instance
*
* @param other The existing instance to copy.
*/
private Builder(TestUnionRecord other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.amount)) {
this.amount = data().deepCopy(fields()[0].schema(), other.amount);
fieldSetFlags()[0] = true;
}
}
/**
* Gets the value of the 'amount' field.
*
* @return The value.
*/
public java.math.BigDecimal getAmount() {
return amount;
}
/**
* Sets the value of the 'amount' field.
*
* @param value The value of 'amount'.
* @return This builder.
*/
public Builder setAmount(java.math.BigDecimal value) {
validate(fields()[0], value);
this.amount = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'amount' field has been set.
*
* @return True if the 'amount' field has been set, false otherwise.
*/
public boolean hasAmount() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'amount' field.
*
* @return This builder.
*/
public Builder clearAmount() {
amount = null;
fieldSetFlags()[0] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TestUnionRecord build() {
try {
TestUnionRecord record = new TestUnionRecord();
record.amount = fieldSetFlags()[0] ? this.amount : (java.math.BigDecimal) defaultValue(fields()[0]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<TestUnionRecord> WRITER$ = (org.apache.avro.io.DatumWriter<TestUnionRecord>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<TestUnionRecord> READER$ = (org.apache.avro.io.DatumReader<TestUnionRecord>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
}
| 7,158 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithMapsAndArrays.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.util.Utf8;
import org.apache.avro.message.BinaryMessageEncoder;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.SchemaStore;
@org.apache.avro.specific.AvroGenerated
public class TestRecordWithMapsAndArrays extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
private static final long serialVersionUID = 3113266652594662627L;
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"TestRecordWithMapsAndArrays\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"arr\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"default\":[]}},{\"name\":\"map\",\"type\":{\"type\":\"map\",\"values\":\"long\",\"avro.java.string\":\"String\",\"default\":{}}}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<TestRecordWithMapsAndArrays> ENCODER = new BinaryMessageEncoder<>(MODEL$,
SCHEMA$);
private static final BinaryMessageDecoder<TestRecordWithMapsAndArrays> DECODER = new BinaryMessageDecoder<>(MODEL$,
SCHEMA$);
/**
* Return the BinaryMessageEncoder instance used by this class.
*
* @return the message encoder used by this class
*/
public static BinaryMessageEncoder<TestRecordWithMapsAndArrays> getEncoder() {
return ENCODER;
}
/**
* Return the BinaryMessageDecoder instance used by this class.
*
* @return the message decoder used by this class
*/
public static BinaryMessageDecoder<TestRecordWithMapsAndArrays> getDecoder() {
return DECODER;
}
/**
* Create a new BinaryMessageDecoder instance for this class that uses the
* specified {@link SchemaStore}.
*
* @param resolver a {@link SchemaStore} used to find schemas by fingerprint
* @return a BinaryMessageDecoder instance for this class backed by the given
* SchemaStore
*/
public static BinaryMessageDecoder<TestRecordWithMapsAndArrays> createDecoder(SchemaStore resolver) {
return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
}
/**
* Serializes this TestRecordWithMapsAndArrays to a ByteBuffer.
*
* @return a buffer holding the serialized data for this instance
* @throws java.io.IOException if this instance could not be serialized
*/
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/**
* Deserializes a TestRecordWithMapsAndArrays from a ByteBuffer.
*
* @param b a byte buffer holding serialized data for an instance of this class
* @return a TestRecordWithMapsAndArrays instance decoded from the given buffer
* @throws java.io.IOException if the given bytes could not be deserialized into
* an instance of this class
*/
public static TestRecordWithMapsAndArrays fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private java.util.List<java.lang.String> arr;
private java.util.Map<java.lang.String, java.lang.Long> map;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* <code>newBuilder()</code>.
*/
public TestRecordWithMapsAndArrays() {
}
/**
* All-args constructor.
*
* @param arr The new value for arr
* @param map The new value for map
*/
public TestRecordWithMapsAndArrays(java.util.List<java.lang.String> arr,
java.util.Map<java.lang.String, java.lang.Long> map) {
this.arr = arr;
this.map = map;
}
@Override
public org.apache.avro.specific.SpecificData getSpecificData() {
return MODEL$;
}
@Override
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
@Override
public java.lang.Object get(int field$) {
switch (field$) {
case 0:
return arr;
case 1:
return map;
default:
throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
// Used by DatumReader. Applications should not call.
@Override
@SuppressWarnings(value = "unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0:
arr = (java.util.List<java.lang.String>) value$;
break;
case 1:
map = (java.util.Map<java.lang.String, java.lang.Long>) value$;
break;
default:
throw new IndexOutOfBoundsException("Invalid index: " + field$);
}
}
/**
* Gets the value of the 'arr' field.
*
* @return The value of the 'arr' field.
*/
public java.util.List<java.lang.String> getArr() {
return arr;
}
/**
* Sets the value of the 'arr' field.
*
* @param value the value to set.
*/
public void setArr(java.util.List<java.lang.String> value) {
this.arr = value;
}
/**
* Gets the value of the 'map' field.
*
* @return The value of the 'map' field.
*/
public java.util.Map<java.lang.String, java.lang.Long> getMap() {
return map;
}
/**
* Sets the value of the 'map' field.
*
* @param value the value to set.
*/
public void setMap(java.util.Map<java.lang.String, java.lang.Long> value) {
this.map = value;
}
/**
* Creates a new TestRecordWithMapsAndArrays RecordBuilder.
*
* @return A new TestRecordWithMapsAndArrays RecordBuilder
*/
public static org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder newBuilder() {
return new org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder();
}
/**
* Creates a new TestRecordWithMapsAndArrays RecordBuilder by copying an
* existing Builder.
*
* @param other The existing builder to copy.
* @return A new TestRecordWithMapsAndArrays RecordBuilder
*/
public static org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder newBuilder(
org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder other) {
if (other == null) {
return new org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder();
} else {
return new org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder(other);
}
}
/**
* Creates a new TestRecordWithMapsAndArrays RecordBuilder by copying an
* existing TestRecordWithMapsAndArrays instance.
*
* @param other The existing instance to copy.
* @return A new TestRecordWithMapsAndArrays RecordBuilder
*/
public static org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder newBuilder(
org.apache.avro.specific.TestRecordWithMapsAndArrays other) {
if (other == null) {
return new org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder();
} else {
return new org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder(other);
}
}
/**
* RecordBuilder for TestRecordWithMapsAndArrays instances.
*/
@org.apache.avro.specific.AvroGenerated
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<TestRecordWithMapsAndArrays>
implements org.apache.avro.data.RecordBuilder<TestRecordWithMapsAndArrays> {
private java.util.List<java.lang.String> arr;
private java.util.Map<java.lang.String, java.lang.Long> map;
/** Creates a new Builder */
private Builder() {
super(SCHEMA$, MODEL$);
}
/**
* Creates a Builder by copying an existing Builder.
*
* @param other The existing Builder to copy.
*/
private Builder(org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder other) {
super(other);
if (isValidValue(fields()[0], other.arr)) {
this.arr = data().deepCopy(fields()[0].schema(), other.arr);
fieldSetFlags()[0] = other.fieldSetFlags()[0];
}
if (isValidValue(fields()[1], other.map)) {
this.map = data().deepCopy(fields()[1].schema(), other.map);
fieldSetFlags()[1] = other.fieldSetFlags()[1];
}
}
/**
* Creates a Builder by copying an existing TestRecordWithMapsAndArrays instance
*
* @param other The existing instance to copy.
*/
private Builder(org.apache.avro.specific.TestRecordWithMapsAndArrays other) {
super(SCHEMA$, MODEL$);
if (isValidValue(fields()[0], other.arr)) {
this.arr = data().deepCopy(fields()[0].schema(), other.arr);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.map)) {
this.map = data().deepCopy(fields()[1].schema(), other.map);
fieldSetFlags()[1] = true;
}
}
/**
* Gets the value of the 'arr' field.
*
* @return The value.
*/
public java.util.List<java.lang.String> getArr() {
return arr;
}
/**
* Sets the value of the 'arr' field.
*
* @param value The value of 'arr'.
* @return This builder.
*/
public org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder setArr(java.util.List<java.lang.String> value) {
validate(fields()[0], value);
this.arr = value;
fieldSetFlags()[0] = true;
return this;
}
/**
* Checks whether the 'arr' field has been set.
*
* @return True if the 'arr' field has been set, false otherwise.
*/
public boolean hasArr() {
return fieldSetFlags()[0];
}
/**
* Clears the value of the 'arr' field.
*
* @return This builder.
*/
public org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder clearArr() {
arr = null;
fieldSetFlags()[0] = false;
return this;
}
/**
* Gets the value of the 'map' field.
*
* @return The value.
*/
public java.util.Map<java.lang.String, java.lang.Long> getMap() {
return map;
}
/**
* Sets the value of the 'map' field.
*
* @param value The value of 'map'.
* @return This builder.
*/
public org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder setMap(
java.util.Map<java.lang.String, java.lang.Long> value) {
validate(fields()[1], value);
this.map = value;
fieldSetFlags()[1] = true;
return this;
}
/**
* Checks whether the 'map' field has been set.
*
* @return True if the 'map' field has been set, false otherwise.
*/
public boolean hasMap() {
return fieldSetFlags()[1];
}
/**
* Clears the value of the 'map' field.
*
* @return This builder.
*/
public org.apache.avro.specific.TestRecordWithMapsAndArrays.Builder clearMap() {
map = null;
fieldSetFlags()[1] = false;
return this;
}
@Override
@SuppressWarnings("unchecked")
public TestRecordWithMapsAndArrays build() {
try {
TestRecordWithMapsAndArrays record = new TestRecordWithMapsAndArrays();
record.arr = fieldSetFlags()[0] ? this.arr : (java.util.List<java.lang.String>) defaultValue(fields()[0]);
record.map = fieldSetFlags()[1] ? this.map
: (java.util.Map<java.lang.String, java.lang.Long>) defaultValue(fields()[1]);
return record;
} catch (org.apache.avro.AvroMissingFieldException e) {
throw e;
} catch (java.lang.Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumWriter<TestRecordWithMapsAndArrays> WRITER$ = (org.apache.avro.io.DatumWriter<TestRecordWithMapsAndArrays>) MODEL$
.createDatumWriter(SCHEMA$);
@Override
public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
WRITER$.write(this, SpecificData.getEncoder(out));
}
@SuppressWarnings("unchecked")
private static final org.apache.avro.io.DatumReader<TestRecordWithMapsAndArrays> READER$ = (org.apache.avro.io.DatumReader<TestRecordWithMapsAndArrays>) MODEL$
.createDatumReader(SCHEMA$);
@Override
public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
READER$.read(this, SpecificData.getDecoder(in));
}
@Override
protected boolean hasCustomCoders() {
return true;
}
@Override
public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException {
long size0 = this.arr.size();
out.writeArrayStart();
out.setItemCount(size0);
long actualSize0 = 0;
for (java.lang.String e0 : this.arr) {
actualSize0++;
out.startItem();
out.writeString(e0);
}
out.writeArrayEnd();
if (actualSize0 != size0)
throw new java.util.ConcurrentModificationException(
"Array-size written was " + size0 + ", but element count was " + actualSize0 + ".");
long size1 = this.map.size();
out.writeMapStart();
out.setItemCount(size1);
long actualSize1 = 0;
for (java.util.Map.Entry<java.lang.String, java.lang.Long> e1 : this.map.entrySet()) {
actualSize1++;
out.startItem();
out.writeString(e1.getKey());
java.lang.Long v1 = e1.getValue();
out.writeLong(v1);
}
out.writeMapEnd();
if (actualSize1 != size1)
throw new java.util.ConcurrentModificationException(
"Map-size written was " + size1 + ", but element count was " + actualSize1 + ".");
}
@Override
public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException {
org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
if (fieldOrder == null) {
long size0 = in.readArrayStart();
java.util.List<java.lang.String> a0 = this.arr;
if (a0 == null) {
a0 = new SpecificData.Array<java.lang.String>((int) size0, SCHEMA$.getField("arr").schema());
this.arr = a0;
} else
a0.clear();
SpecificData.Array<java.lang.String> ga0 = (a0 instanceof SpecificData.Array
? (SpecificData.Array<java.lang.String>) a0
: null);
for (; 0 < size0; size0 = in.arrayNext()) {
for (; size0 != 0; size0--) {
java.lang.String e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readString();
a0.add(e0);
}
}
long size1 = in.readMapStart();
java.util.Map<java.lang.String, java.lang.Long> m1 = this.map; // Need fresh name due to limitation of macro
// system
if (m1 == null) {
m1 = new java.util.HashMap<java.lang.String, java.lang.Long>((int) size1);
this.map = m1;
} else
m1.clear();
for (; 0 < size1; size1 = in.mapNext()) {
for (; size1 != 0; size1--) {
java.lang.String k1 = null;
k1 = in.readString();
java.lang.Long v1 = null;
v1 = in.readLong();
m1.put(k1, v1);
}
}
} else {
for (int i = 0; i < 2; i++) {
switch (fieldOrder[i].pos()) {
case 0:
long size0 = in.readArrayStart();
java.util.List<java.lang.String> a0 = this.arr;
if (a0 == null) {
a0 = new SpecificData.Array<java.lang.String>((int) size0, SCHEMA$.getField("arr").schema());
this.arr = a0;
} else
a0.clear();
SpecificData.Array<java.lang.String> ga0 = (a0 instanceof SpecificData.Array
? (SpecificData.Array<java.lang.String>) a0
: null);
for (; 0 < size0; size0 = in.arrayNext()) {
for (; size0 != 0; size0--) {
java.lang.String e0 = (ga0 != null ? ga0.peek() : null);
e0 = in.readString();
a0.add(e0);
}
}
break;
case 1:
long size1 = in.readMapStart();
java.util.Map<java.lang.String, java.lang.Long> m1 = this.map; // Need fresh name due to limitation of macro
// system
if (m1 == null) {
m1 = new java.util.HashMap<java.lang.String, java.lang.Long>((int) size1);
this.map = m1;
} else
m1.clear();
for (; 0 < size1; size1 = in.mapNext()) {
for (; size1 != 0; size1--) {
java.lang.String k1 = null;
k1 = in.readString();
java.lang.Long v1 = null;
v1 = in.readLong();
m1.put(k1, v1);
}
}
break;
default:
throw new java.io.IOException("Corrupt ResolvingDecoder.");
}
}
}
}
}
| 7,159 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithoutLogicalTypes.java | /**
* Autogenerated by Avro
*
* DO NOT EDIT DIRECTLY
*/
package org.apache.avro.specific;
import java.nio.ByteBuffer;
import org.apache.avro.message.BinaryMessageDecoder;
import org.apache.avro.message.BinaryMessageEncoder;
@SuppressWarnings("all")
@org.apache.avro.specific.AvroGenerated
public class TestRecordWithoutLogicalTypes extends org.apache.avro.specific.SpecificRecordBase
implements org.apache.avro.specific.SpecificRecord {
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
"{\"type\":\"record\",\"name\":\"TestRecordWithoutLogicalTypes\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"b\",\"type\":\"boolean\"},{\"name\":\"i32\",\"type\":\"int\"},{\"name\":\"i64\",\"type\":\"long\"},{\"name\":\"f32\",\"type\":\"float\"},{\"name\":\"f64\",\"type\":\"double\"},{\"name\":\"s\",\"type\":[\"null\",\"string\"],\"default\":null},{\"name\":\"d\",\"type\":{\"type\":\"int\",\"logicalType\":\"date\"}},{\"name\":\"t\",\"type\":{\"type\":\"int\",\"logicalType\":\"time-millis\"}},{\"name\":\"ts\",\"type\":{\"type\":\"long\",\"logicalType\":\"timestamp-millis\"}},{\"name\":\"dec\",\"type\":{\"type\":\"bytes\",\"logicalType\":\"decimal\",\"precision\":9,\"scale\":2}}]}");
public static org.apache.avro.Schema getClassSchema() {
return SCHEMA$;
}
private static final SpecificData MODEL$ = new SpecificData();
private static final BinaryMessageEncoder<TestRecordWithoutLogicalTypes> ENCODER = new BinaryMessageEncoder<TestRecordWithoutLogicalTypes>(
MODEL$, SCHEMA$);
private static final BinaryMessageDecoder<TestRecordWithoutLogicalTypes> DECODER = new BinaryMessageDecoder<TestRecordWithoutLogicalTypes>(
MODEL$, SCHEMA$);
/** Serializes this ${schema.getName()} to a ByteBuffer. */
public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
return ENCODER.encode(this);
}
/** Deserializes a ${schema.getName()} from a ByteBuffer. */
public static TestRecordWithoutLogicalTypes fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
return DECODER.decode(b);
}
private boolean b;
private int i32;
private long i64;
private float f32;
private double f64;
private java.lang.String s;
private int d;
private int t;
private long ts;
private ByteBuffer dec;
/**
* Default constructor. Note that this does not initialize fields to their
* default values from the schema. If that is desired then one should use
* {@link \#newBuilder()}.
*/
public TestRecordWithoutLogicalTypes() {
}
/**
* All-args constructor.
*/
public TestRecordWithoutLogicalTypes(java.lang.Boolean b, java.lang.Integer i32, java.lang.Long i64,
java.lang.Float f32, java.lang.Double f64, java.lang.String s, java.lang.Integer d, java.lang.Integer t,
java.lang.Long ts, java.nio.ByteBuffer dec) {
this.b = b;
this.i32 = i32;
this.i64 = i64;
this.f32 = f32;
this.f64 = f64;
this.s = s;
this.d = d;
this.t = t;
this.ts = ts;
this.dec = dec;
}
public org.apache.avro.Schema getSchema() {
return SCHEMA$;
}
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0:
return b;
case 1:
return i32;
case 2:
return i64;
case 3:
return f32;
case 4:
return f64;
case 5:
return s;
case 6:
return d;
case 7:
return t;
case 8:
return ts;
case 9:
return dec;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value = "unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0:
b = (java.lang.Boolean) value$;
break;
case 1:
i32 = (java.lang.Integer) value$;
break;
case 2:
i64 = (java.lang.Long) value$;
break;
case 3:
f32 = (java.lang.Float) value$;
break;
case 4:
f64 = (java.lang.Double) value$;
break;
case 5:
s = (java.lang.String) value$;
break;
case 6:
d = (java.lang.Integer) value$;
break;
case 7:
t = (java.lang.Integer) value$;
break;
case 8:
ts = (java.lang.Long) value$;
break;
case 9:
dec = (java.nio.ByteBuffer) value$;
break;
default:
throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'b' field.
*/
public java.lang.Boolean getB() {
return b;
}
/**
* Gets the value of the 'i32' field.
*/
public java.lang.Integer getI32() {
return i32;
}
/**
* Gets the value of the 'i64' field.
*/
public java.lang.Long getI64() {
return i64;
}
/**
* Gets the value of the 'f32' field.
*/
public java.lang.Float getF32() {
return f32;
}
/**
* Gets the value of the 'f64' field.
*/
public java.lang.Double getF64() {
return f64;
}
/**
* Gets the value of the 's' field.
*/
public java.lang.String getS() {
return s;
}
/**
* Gets the value of the 'd' field.
*/
public java.lang.Integer getD() {
return d;
}
/**
* Gets the value of the 't' field.
*/
public java.lang.Integer getT() {
return t;
}
/**
* Gets the value of the 'ts' field.
*/
public java.lang.Long getTs() {
return ts;
}
/**
* Gets the value of the 'ts' field.
*/
public java.nio.ByteBuffer getDec() {
return dec;
}
/** Creates a new TestRecordWithoutLogicalTypes RecordBuilder */
public static TestRecordWithoutLogicalTypes.Builder newBuilder() {
return new TestRecordWithoutLogicalTypes.Builder();
}
/**
* Creates a new TestRecordWithoutLogicalTypes RecordBuilder by copying an
* existing Builder
*/
public static TestRecordWithoutLogicalTypes.Builder newBuilder(TestRecordWithoutLogicalTypes.Builder other) {
return new TestRecordWithoutLogicalTypes.Builder(other);
}
/**
* Creates a new TestRecordWithoutLogicalTypes RecordBuilder by copying an
* existing TestRecordWithoutLogicalTypes instance
*/
public static TestRecordWithoutLogicalTypes.Builder newBuilder(TestRecordWithoutLogicalTypes other) {
return new TestRecordWithoutLogicalTypes.Builder(other);
}
/**
* RecordBuilder for TestRecordWithoutLogicalTypes instances.
*/
public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<TestRecordWithoutLogicalTypes>
implements org.apache.avro.data.RecordBuilder<TestRecordWithoutLogicalTypes> {
private boolean b;
private int i32;
private long i64;
private float f32;
private double f64;
private java.lang.String s;
private int d;
private int t;
private long ts;
private ByteBuffer dec;
/** Creates a new Builder */
private Builder() {
super(TestRecordWithoutLogicalTypes.SCHEMA$);
}
/** Creates a Builder by copying an existing Builder */
private Builder(TestRecordWithoutLogicalTypes.Builder other) {
super(other);
if (isValidValue(fields()[0], other.b)) {
this.b = data().deepCopy(fields()[0].schema(), other.b);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.i32)) {
this.i32 = data().deepCopy(fields()[1].schema(), other.i32);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.i64)) {
this.i64 = data().deepCopy(fields()[2].schema(), other.i64);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.f32)) {
this.f32 = data().deepCopy(fields()[3].schema(), other.f32);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.f64)) {
this.f64 = data().deepCopy(fields()[4].schema(), other.f64);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.s)) {
this.s = data().deepCopy(fields()[5].schema(), other.s);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.d)) {
this.d = data().deepCopy(fields()[6].schema(), other.d);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.t)) {
this.t = data().deepCopy(fields()[7].schema(), other.t);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.ts)) {
this.ts = data().deepCopy(fields()[8].schema(), other.ts);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.dec)) {
this.dec = data().deepCopy(fields()[9].schema(), other.dec);
fieldSetFlags()[9] = true;
}
}
/**
* Creates a Builder by copying an existing TestRecordWithoutLogicalTypes
* instance
*/
private Builder(TestRecordWithoutLogicalTypes other) {
super(TestRecordWithoutLogicalTypes.SCHEMA$);
if (isValidValue(fields()[0], other.b)) {
this.b = data().deepCopy(fields()[0].schema(), other.b);
fieldSetFlags()[0] = true;
}
if (isValidValue(fields()[1], other.i32)) {
this.i32 = data().deepCopy(fields()[1].schema(), other.i32);
fieldSetFlags()[1] = true;
}
if (isValidValue(fields()[2], other.i64)) {
this.i64 = data().deepCopy(fields()[2].schema(), other.i64);
fieldSetFlags()[2] = true;
}
if (isValidValue(fields()[3], other.f32)) {
this.f32 = data().deepCopy(fields()[3].schema(), other.f32);
fieldSetFlags()[3] = true;
}
if (isValidValue(fields()[4], other.f64)) {
this.f64 = data().deepCopy(fields()[4].schema(), other.f64);
fieldSetFlags()[4] = true;
}
if (isValidValue(fields()[5], other.s)) {
this.s = data().deepCopy(fields()[5].schema(), other.s);
fieldSetFlags()[5] = true;
}
if (isValidValue(fields()[6], other.d)) {
this.d = data().deepCopy(fields()[6].schema(), other.d);
fieldSetFlags()[6] = true;
}
if (isValidValue(fields()[7], other.t)) {
this.t = data().deepCopy(fields()[7].schema(), other.t);
fieldSetFlags()[7] = true;
}
if (isValidValue(fields()[8], other.ts)) {
this.ts = data().deepCopy(fields()[8].schema(), other.ts);
fieldSetFlags()[8] = true;
}
if (isValidValue(fields()[9], other.ts)) {
this.dec = data().deepCopy(fields()[9].schema(), other.dec);
fieldSetFlags()[9] = true;
}
}
/** Gets the value of the 'b' field */
public java.lang.Boolean getB() {
return b;
}
/** Sets the value of the 'b' field */
public TestRecordWithoutLogicalTypes.Builder setB(boolean value) {
validate(fields()[0], value);
this.b = value;
fieldSetFlags()[0] = true;
return this;
}
/** Checks whether the 'b' field has been set */
public boolean hasB() {
return fieldSetFlags()[0];
}
/** Clears the value of the 'b' field */
public TestRecordWithoutLogicalTypes.Builder clearB() {
fieldSetFlags()[0] = false;
return this;
}
/** Gets the value of the 'i32' field */
public java.lang.Integer getI32() {
return i32;
}
/** Sets the value of the 'i32' field */
public TestRecordWithoutLogicalTypes.Builder setI32(int value) {
validate(fields()[1], value);
this.i32 = value;
fieldSetFlags()[1] = true;
return this;
}
/** Checks whether the 'i32' field has been set */
public boolean hasI32() {
return fieldSetFlags()[1];
}
/** Clears the value of the 'i32' field */
public TestRecordWithoutLogicalTypes.Builder clearI32() {
fieldSetFlags()[1] = false;
return this;
}
/** Gets the value of the 'i64' field */
public java.lang.Long getI64() {
return i64;
}
/** Sets the value of the 'i64' field */
public TestRecordWithoutLogicalTypes.Builder setI64(long value) {
validate(fields()[2], value);
this.i64 = value;
fieldSetFlags()[2] = true;
return this;
}
/** Checks whether the 'i64' field has been set */
public boolean hasI64() {
return fieldSetFlags()[2];
}
/** Clears the value of the 'i64' field */
public TestRecordWithoutLogicalTypes.Builder clearI64() {
fieldSetFlags()[2] = false;
return this;
}
/** Gets the value of the 'f32' field */
public java.lang.Float getF32() {
return f32;
}
/** Sets the value of the 'f32' field */
public TestRecordWithoutLogicalTypes.Builder setF32(float value) {
validate(fields()[3], value);
this.f32 = value;
fieldSetFlags()[3] = true;
return this;
}
/** Checks whether the 'f32' field has been set */
public boolean hasF32() {
return fieldSetFlags()[3];
}
/** Clears the value of the 'f32' field */
public TestRecordWithoutLogicalTypes.Builder clearF32() {
fieldSetFlags()[3] = false;
return this;
}
/** Gets the value of the 'f64' field */
public java.lang.Double getF64() {
return f64;
}
/** Sets the value of the 'f64' field */
public TestRecordWithoutLogicalTypes.Builder setF64(double value) {
validate(fields()[4], value);
this.f64 = value;
fieldSetFlags()[4] = true;
return this;
}
/** Checks whether the 'f64' field has been set */
public boolean hasF64() {
return fieldSetFlags()[4];
}
/** Clears the value of the 'f64' field */
public TestRecordWithoutLogicalTypes.Builder clearF64() {
fieldSetFlags()[4] = false;
return this;
}
/** Gets the value of the 's' field */
public java.lang.String getS() {
return s;
}
/** Sets the value of the 's' field */
public TestRecordWithoutLogicalTypes.Builder setS(java.lang.String value) {
validate(fields()[5], value);
this.s = value;
fieldSetFlags()[5] = true;
return this;
}
/** Checks whether the 's' field has been set */
public boolean hasS() {
return fieldSetFlags()[5];
}
/** Clears the value of the 's' field */
public TestRecordWithoutLogicalTypes.Builder clearS() {
s = null;
fieldSetFlags()[5] = false;
return this;
}
/** Gets the value of the 'd' field */
public java.lang.Integer getD() {
return d;
}
/** Sets the value of the 'd' field */
public TestRecordWithoutLogicalTypes.Builder setD(int value) {
validate(fields()[6], value);
this.d = value;
fieldSetFlags()[6] = true;
return this;
}
/** Checks whether the 'd' field has been set */
public boolean hasD() {
return fieldSetFlags()[6];
}
/** Clears the value of the 'd' field */
public TestRecordWithoutLogicalTypes.Builder clearD() {
fieldSetFlags()[6] = false;
return this;
}
/** Gets the value of the 't' field */
public java.lang.Integer getT() {
return t;
}
/** Sets the value of the 't' field */
public TestRecordWithoutLogicalTypes.Builder setT(int value) {
validate(fields()[7], value);
this.t = value;
fieldSetFlags()[7] = true;
return this;
}
/** Checks whether the 't' field has been set */
public boolean hasT() {
return fieldSetFlags()[7];
}
/** Clears the value of the 't' field */
public TestRecordWithoutLogicalTypes.Builder clearT() {
fieldSetFlags()[7] = false;
return this;
}
/** Gets the value of the 'ts' field */
public java.lang.Long getTs() {
return ts;
}
/** Sets the value of the 'ts' field */
public TestRecordWithoutLogicalTypes.Builder setTs(long value) {
validate(fields()[8], value);
this.ts = value;
fieldSetFlags()[8] = true;
return this;
}
/** Checks whether the 'ts' field has been set */
public boolean hasTs() {
return fieldSetFlags()[8];
}
/** Clears the value of the 'ts' field */
public TestRecordWithoutLogicalTypes.Builder clearTs() {
fieldSetFlags()[8] = false;
return this;
}
/** Gedec the value of the 'dec' field */
public java.nio.ByteBuffer getDec() {
return dec;
}
/** Sedec the value of the 'dec' field */
public TestRecordWithoutLogicalTypes.Builder setDec(java.nio.ByteBuffer value) {
validate(fields()[9], value);
this.dec = value;
fieldSetFlags()[9] = true;
return this;
}
/** Checks whether the 'dec' field has been set */
public boolean hasDec() {
return fieldSetFlags()[9];
}
/** Clears the value of the 'dec' field */
public TestRecordWithoutLogicalTypes.Builder clearDec() {
fieldSetFlags()[9] = false;
return this;
}
@Override
public TestRecordWithoutLogicalTypes build() {
try {
TestRecordWithoutLogicalTypes record = new TestRecordWithoutLogicalTypes();
record.b = fieldSetFlags()[0] ? this.b : (java.lang.Boolean) defaultValue(fields()[0]);
record.i32 = fieldSetFlags()[1] ? this.i32 : (java.lang.Integer) defaultValue(fields()[1]);
record.i64 = fieldSetFlags()[2] ? this.i64 : (java.lang.Long) defaultValue(fields()[2]);
record.f32 = fieldSetFlags()[3] ? this.f32 : (java.lang.Float) defaultValue(fields()[3]);
record.f64 = fieldSetFlags()[4] ? this.f64 : (java.lang.Double) defaultValue(fields()[4]);
record.s = fieldSetFlags()[5] ? this.s : (java.lang.String) defaultValue(fields()[5]);
record.d = fieldSetFlags()[6] ? this.d : (java.lang.Integer) defaultValue(fields()[6]);
record.t = fieldSetFlags()[7] ? this.t : (java.lang.Integer) defaultValue(fields()[7]);
record.ts = fieldSetFlags()[8] ? this.ts : (java.lang.Long) defaultValue(fields()[8]);
record.dec = fieldSetFlags()[9] ? this.dec : (java.nio.ByteBuffer) defaultValue(fields()[9]);
return record;
} catch (Exception e) {
throw new org.apache.avro.AvroRuntimeException(e);
}
}
}
}
| 7,160 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public class TestSpecificDatumReader {
@Test
void readMyData() throws IOException {
// Check that method newInstanceFromString from SpecificDatumReader extension is
// called.
final EncoderFactory e_factory = new EncoderFactory().configureBufferSize(30);
final DecoderFactory factory = new DecoderFactory().configureDecoderBufferSize(30);
final MyReader reader = new MyReader();
reader.setExpected(Schema.create(Schema.Type.STRING));
reader.setSchema(Schema.create(Schema.Type.STRING));
final ByteArrayOutputStream out = new ByteArrayOutputStream(30);
final BinaryEncoder encoder = e_factory.binaryEncoder(out, null);
encoder.writeString(new Utf8("Hello"));
encoder.flush();
final BinaryDecoder decoder = factory.binaryDecoder(out.toByteArray(), null);
reader.getData().setFastReaderEnabled(false);
final MyData read = reader.read(null, decoder);
Assertions.assertNotNull(read, "MyReader.newInstanceFromString was not called");
Assertions.assertEquals("Hello", read.getContent());
}
public static class MyData {
private final String content;
public MyData(String content) {
this.content = content;
}
public String getContent() {
return content;
}
}
public static class MyReader extends SpecificDatumReader<MyData> {
@Override
protected Class findStringClass(Schema schema) {
return MyData.class;
}
@Override
protected Object newInstanceFromString(Class c, String s) {
return new MyData(s);
}
}
}
| 7,161 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificRecordWithUnion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.Decoder;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import static org.junit.Assert.assertEquals;
public class TestSpecificRecordWithUnion {
@Test
void unionLogicalDecimalConversion() throws IOException {
final TestUnionRecord record = TestUnionRecord.newBuilder().setAmount(BigDecimal.ZERO).build();
final Schema schema = SchemaBuilder.unionOf().nullType().and().type(record.getSchema()).endUnion();
byte[] recordBytes = serializeRecord(
"{ \"org.apache.avro.specific.TestUnionRecord\": { \"amount\": { \"bytes\": \"\\u0000\" } } }", schema);
SpecificDatumReader<SpecificRecord> specificDatumReader = new SpecificDatumReader<>(schema);
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(recordBytes);
Decoder decoder = DecoderFactory.get().binaryDecoder(byteArrayInputStream, null);
final SpecificRecord deserialized = specificDatumReader.read(null, decoder);
assertEquals(record, deserialized);
}
public static byte[] serializeRecord(String value, Schema schema) throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
Object object = reader.read(null, DecoderFactory.get().jsonDecoder(schema, value));
ByteArrayOutputStream out = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
writer.write(object, encoder);
encoder.flush();
byte[] bytes = out.toByteArray();
out.close();
return bytes;
}
}
| 7,162 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/specific/TestSpecificData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
/*
* If integerClass is primitive, reflection to find method will
* result in a NoSuchMethodException in the case of a UNION schema
*/
public class TestSpecificData {
private Class<?> intClass;
private Class<?> integerClass;
@BeforeEach
public void setUp() {
Schema intSchema = Schema.create(Type.INT);
intClass = SpecificData.get().getClass(intSchema);
Schema nullSchema = Schema.create(Type.NULL);
Schema nullIntUnionSchema = Schema.createUnion(Arrays.asList(nullSchema, intSchema));
integerClass = SpecificData.get().getClass(nullIntUnionSchema);
}
@Test
void classTypes() {
assertTrue(intClass.isPrimitive());
assertFalse(integerClass.isPrimitive());
}
@Test
void primitiveParam() throws Exception {
assertNotNull(Reflection.class.getMethod("primitive", intClass));
}
@Test
void primitiveParamError() throws Exception {
assertThrows(NoSuchMethodException.class, () -> {
Reflection.class.getMethod("primitiveWrapper", intClass);
});
}
@Test
void primitiveWrapperParam() throws Exception {
assertNotNull(Reflection.class.getMethod("primitiveWrapper", integerClass));
}
@Test
void primitiveWrapperParamError() throws Exception {
assertThrows(NoSuchMethodException.class, () -> {
Reflection.class.getMethod("primitive", integerClass);
});
}
static class Reflection {
public void primitive(int i) {
}
public void primitiveWrapper(Integer i) {
}
}
public static class TestRecord extends SpecificRecordBase {
private static final Schema SCHEMA = Schema.createRecord("TestRecord", null, null, false);
static {
List<Field> fields = new ArrayList<>();
fields.add(new Field("x", Schema.create(Type.INT), null, null));
Schema stringSchema = Schema.create(Type.STRING);
GenericData.setStringType(stringSchema, GenericData.StringType.String);
fields.add(new Field("y", stringSchema, null, null));
SCHEMA.setFields(fields);
}
private int x;
private String y;
@Override
public void put(int i, Object v) {
switch (i) {
case 0:
x = (Integer) v;
break;
case 1:
y = (String) v;
break;
default:
throw new RuntimeException();
}
}
@Override
public Object get(int i) {
switch (i) {
case 0:
return x;
case 1:
return y;
}
throw new RuntimeException();
}
@Override
public Schema getSchema() {
return SCHEMA;
}
}
@Test
void specificRecordBase() {
final TestRecord record = new TestRecord();
record.put("x", 1);
record.put("y", "str");
assertEquals(1, record.get("x"));
assertEquals("str", record.get("y"));
}
@Test
void externalizeable() throws Exception {
final TestRecord before = new TestRecord();
before.put("x", 1);
before.put("y", "str");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bytes);
out.writeObject(before);
out.close();
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()));
TestRecord after = (TestRecord) in.readObject();
assertEquals(before, after);
}
/** Tests that non Stringable datum are rejected by specific writers. */
@Test
void nonStringable() throws Exception {
final Schema string = Schema.create(Type.STRING);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final Encoder encoder = EncoderFactory.get().directBinaryEncoder(baos, null);
final DatumWriter<Object> writer = new SpecificDatumWriter<>(string);
try {
writer.write(new Object(), encoder);
fail("Non stringable object should be rejected.");
} catch (ClassCastException cce) {
// Expected error
}
}
@Test
void classNameContainingReservedWords() {
final Schema schema = Schema.createRecord("AnyName", null, "db.public.table", false);
assertEquals("db.public$.table.AnyName", SpecificData.getClassName(schema));
}
}
| 7,163 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/TimePeriodTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util;
import org.junit.jupiter.api.Test;
import java.time.DateTimeException;
import java.time.Duration;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.Period;
import java.time.chrono.IsoChronology;
import java.time.chrono.JapaneseChronology;
import java.time.temporal.ChronoUnit;
import java.time.temporal.Temporal;
import java.time.temporal.TemporalAmount;
import java.time.temporal.TemporalUnit;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.ERAS;
import static java.time.temporal.ChronoUnit.MICROS;
import static java.time.temporal.ChronoUnit.MILLIS;
import static java.time.temporal.ChronoUnit.MONTHS;
import static java.time.temporal.ChronoUnit.NANOS;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
class TimePeriodTest {
// This Long is too large to fit into an unsigned int.
private static final long TOO_LARGE = Integer.MAX_VALUE * 3L;
@Test
void validateConstruction() {
TimePeriod timePeriod = TimePeriod.of(12, 34, 56);
assertSame(timePeriod, TimePeriod.from(timePeriod));
assertComponents(12, 34, 56, timePeriod);
assertComponents(14, 3, 0, TimePeriod.from(IsoChronology.INSTANCE.period(1, 2, 3)));
assertComponents(36_000, 0, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.MILLENNIA, 3)));
assertComponents(3_600, 0, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.CENTURIES, 3)));
assertComponents(360, 0, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.DECADES, 3)));
assertComponents(36, 0, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.YEARS, 3)));
assertComponents(3, 0, 0, TimePeriod.from(TimeAmount.of(MONTHS, 3)));
assertComponents(0, 21, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.WEEKS, 3)));
assertComponents(0, 3, 0, TimePeriod.from(TimeAmount.of(DAYS, 3)));
assertComponents(0, 2, 0, TimePeriod.from(TimeAmount.of(ChronoUnit.HALF_DAYS, 4)));
assertComponents(0, 2, 43_200_000, TimePeriod.from(TimeAmount.of(ChronoUnit.HALF_DAYS, 5)));
assertComponents(0, 0, 10_800_000, TimePeriod.from(TimeAmount.of(ChronoUnit.HOURS, 3)));
assertComponents(0, 0, 180_000, TimePeriod.from(TimeAmount.of(ChronoUnit.MINUTES, 3)));
assertComponents(0, 0, 3_000, TimePeriod.from(TimeAmount.of(ChronoUnit.SECONDS, 3)));
assertComponents(0, 0, 3, TimePeriod.from(TimeAmount.of(MILLIS, 3)));
assertComponents(0, 0, 3, TimePeriod.from(TimeAmount.of(MICROS, 3_000)));
assertComponents(0, 0, 3, TimePeriod.from(TimeAmount.of(NANOS, 3_000_000)));
// Micros and nanos must be a multiple of milliseconds
assertThrows(DateTimeException.class, () -> TimePeriod.from(TimeAmount.of(ChronoUnit.MICROS, 3)));
assertThrows(DateTimeException.class, () -> TimePeriod.from(TimeAmount.of(ChronoUnit.NANOS, 3)));
// Unsupported cases (null, non-ISO chronology, unknown temporal unit,
// non-ChronoUnit)
assertThrows(NullPointerException.class, () -> TimePeriod.from(null));
assertThrows(DateTimeException.class, () -> TimePeriod.from(JapaneseChronology.INSTANCE.period(1, 2, 3)));
assertThrows(UnsupportedTemporalTypeException.class, () -> TimePeriod.from(TimeAmount.of(ChronoUnit.ERAS, 1)));
assertThrows(UnsupportedTemporalTypeException.class, () -> TimePeriod.from(TimeAmount.of(DummyUnit.INSTANCE, 3)));
// Arguments are long, but must fit an unsigned long
assertThrows(ArithmeticException.class, () -> TimePeriod.of(TOO_LARGE, 0, 0));
assertThrows(ArithmeticException.class, () -> TimePeriod.of(0, TOO_LARGE, 0));
assertThrows(ArithmeticException.class, () -> TimePeriod.of(0, 0, TOO_LARGE));
// Odd one out: querying an unsupported temporal unit
// (assertComponents handles all valid cases)
assertThrows(UnsupportedTemporalTypeException.class, () -> TimePeriod.of(1, 1, 1).get(ERAS));
}
@Test
void checkConversionsFromJavaTime() {
assertEquals(TimePeriod.of(12, 0, 0), TimePeriod.from(Period.ofYears(1)));
assertEquals(TimePeriod.of(2, 0, 0), TimePeriod.from(Period.ofMonths(2)));
assertEquals(TimePeriod.of(0, 21, 0), TimePeriod.from(Period.ofWeeks(3)));
assertEquals(TimePeriod.of(0, 4, 0), TimePeriod.from(Period.ofDays(4)));
assertEquals(TimePeriod.of(0, 0, 1), TimePeriod.from(Duration.ofNanos(1_000_000)));
assertEquals(TimePeriod.of(0, 0, 2), TimePeriod.from(Duration.ofMillis(2)));
assertEquals(TimePeriod.of(0, 0, 3_000), TimePeriod.from(Duration.ofSeconds(3)));
assertEquals(TimePeriod.of(0, 0, 240000), TimePeriod.from(Duration.ofMinutes(4)));
assertEquals(TimePeriod.of(0, 0, 18000000), TimePeriod.from(Duration.ofHours(5)));
// Duration never takes into account things like daylight saving
assertEquals(TimePeriod.of(0, 0, 518400000), TimePeriod.from(Duration.ofDays(6)));
}
@Test
void checkConversionsToJavaTime() {
TimePeriod months = TimePeriod.of(1, 0, 0);
TimePeriod days = TimePeriod.of(0, 2, 0);
TimePeriod time = TimePeriod.of(0, 0, 3);
TimePeriod all = TimePeriod.of(1, 2, 3);
assertTrue(months.isDateBased());
assertTrue(days.isDateBased());
assertFalse(all.isDateBased());
assertFalse(time.isDateBased());
assertEquals(Period.of(0, 1, 0), months.toPeriod());
assertEquals(Period.of(0, 0, 2), days.toPeriod());
assertThrows(DateTimeException.class, all::toPeriod);
assertThrows(DateTimeException.class, time::toPeriod);
assertThrows(DateTimeException.class, () -> TimePeriod.of(0, Integer.MAX_VALUE * 2L, 0).toPeriod());
assertFalse(months.isTimeBased());
assertFalse(days.isTimeBased());
assertFalse(all.isTimeBased());
assertTrue(time.isTimeBased());
assertThrows(DateTimeException.class, months::toDuration);
// Note: though Duration supports this, it uses a fixed 86400 seconds
assertEquals(Duration.ofSeconds(172800), days.toDuration());
assertThrows(DateTimeException.class, all::toDuration);
assertEquals(Duration.ofMillis(3), time.toDuration());
}
@Test
void checkAddingToTemporalItems() {
TimePeriod monthAndTwoDays = TimePeriod.of(1, 2, 0);
TimePeriod threeMillis = TimePeriod.of(0, 0, 3);
TimePeriod complexTimePeriod = TimePeriod.of(1, 2, 3);
LocalDateTime localDateTime = LocalDateTime.of(2001, 2, 3, 4, 5, 6, 7_000_000);
LocalDate localDate = LocalDate.of(2001, 2, 3);
LocalTime localTime = LocalTime.of(4, 5, 6, 7_000_000);
assertEquals(localDateTime.plusMonths(1).plusDays(2), localDateTime.plus(monthAndTwoDays));
assertEquals(localDateTime.plus(3, MILLIS), localDateTime.plus(threeMillis));
assertEquals(localDateTime.plusMonths(1).plusDays(2).plus(3, MILLIS), localDateTime.plus(complexTimePeriod));
assertEquals(localDate.plusMonths(1).plusDays(2), localDate.plus(monthAndTwoDays));
assertEquals(localTime.plus(3, MILLIS), localTime.plus(threeMillis));
assertEquals(localDateTime.minusMonths(1).minusDays(2), localDateTime.minus(monthAndTwoDays));
assertEquals(localDateTime.minus(3, MILLIS), localDateTime.minus(threeMillis));
assertEquals(localDateTime.minusMonths(1).minusDays(2).minus(3, MILLIS), localDateTime.minus(complexTimePeriod));
assertEquals(localDate.minusMonths(1).minusDays(2), localDate.minus(monthAndTwoDays));
assertEquals(localTime.minus(3, MILLIS), localTime.minus(threeMillis));
}
@Test
void checkEqualityTests() {
TimePeriod timePeriod1a = TimePeriod.of(1, 2, 3);
TimePeriod timePeriod1b = TimePeriod.of(1, 2, 3);
TimePeriod timePeriod2 = TimePeriod.of(9, 9, 9);
TimePeriod timePeriod3 = TimePeriod.of(1, 9, 9);
TimePeriod timePeriod4 = TimePeriod.of(1, 2, 9);
// noinspection EqualsWithItself
assertEquals(timePeriod1a, timePeriod1a);
assertEquals(timePeriod1a, timePeriod1b);
assertEquals(timePeriod1a.hashCode(), timePeriod1b.hashCode());
assertNotEquals(timePeriod1a, null);
// noinspection AssertBetweenInconvertibleTypes
assertNotEquals(timePeriod1a, "not equal");
assertNotEquals(timePeriod1a, timePeriod2);
assertNotEquals(timePeriod1a.hashCode(), timePeriod2.hashCode());
assertNotEquals(timePeriod1a, timePeriod3);
assertNotEquals(timePeriod1a.hashCode(), timePeriod3.hashCode());
assertNotEquals(timePeriod1a, timePeriod4);
assertNotEquals(timePeriod1a.hashCode(), timePeriod4.hashCode());
}
@Test
void checkStringRepresentation() {
assertEquals("P0", TimePeriod.of(0, 0, 0).toString());
assertEquals("P1Y", TimePeriod.of(12, 0, 0).toString());
assertEquals("P2M", TimePeriod.of(2, 0, 0).toString());
assertEquals("P3", TimePeriod.of(0, 3, 0).toString());
assertEquals("P1Y2M3", TimePeriod.of(14, 3, 0).toString());
assertEquals("PT04", TimePeriod.of(0, 0, 14400000).toString());
assertEquals("PT00:05", TimePeriod.of(0, 0, 300000).toString());
assertEquals("PT00:00:06", TimePeriod.of(0, 0, 6000).toString());
assertEquals("PT00:00:00.007", TimePeriod.of(0, 0, 7).toString());
assertEquals("P1Y2M3T04:05:06.007", TimePeriod.of(14, 3, 14706007).toString());
// Days and millis will never overflow to months/days, to respect differences
// in months and days (daylight saving).
assertEquals("P123T1193:02:47.295", TimePeriod.of(0, 123, 4294967295L).toString());
}
private void assertComponents(long months, long days, long millis, TimePeriod timePeriod) {
List<TemporalUnit> expectedUnits = new ArrayList<>(Arrays.asList(MONTHS, DAYS, MILLIS));
if (months == 0) {
expectedUnits.remove(MONTHS);
}
if (days == 0) {
expectedUnits.remove(DAYS);
}
if (millis == 0) {
expectedUnits.remove(MILLIS);
}
assertEquals(expectedUnits, timePeriod.getUnits());
assertEquals(months, timePeriod.getMonths());
assertEquals(months, timePeriod.get(MONTHS));
assertEquals(days, timePeriod.getDays());
assertEquals(days, timePeriod.get(DAYS));
assertEquals(millis, timePeriod.getMillis());
assertEquals(millis, timePeriod.get(MILLIS));
}
private static class TimeAmount implements TemporalAmount {
private final Map<TemporalUnit, Long> amountsPerUnit = new LinkedHashMap<>();
static TimeAmount of(TemporalUnit unit, long amount) {
return new TimeAmount().with(unit, amount);
}
TimeAmount with(TemporalUnit unit, long amount) {
amountsPerUnit.put(unit, amount);
return this;
}
@Override
public long get(TemporalUnit unit) {
return amountsPerUnit.get(unit);
}
@Override
public List<TemporalUnit> getUnits() {
return new ArrayList<>(amountsPerUnit.keySet());
}
@Override
public Temporal addTo(Temporal temporal) {
throw new UnsupportedOperationException();
}
@Override
public Temporal subtractFrom(Temporal temporal) {
throw new UnsupportedOperationException();
}
}
private static class DummyUnit implements TemporalUnit {
private static final DummyUnit INSTANCE = new DummyUnit();
@Override
public Duration getDuration() {
return null;
}
@Override
public boolean isDurationEstimated() {
return false;
}
@Override
public boolean isDateBased() {
return false;
}
@Override
public boolean isTimeBased() {
return false;
}
@Override
public <R extends Temporal> R addTo(R temporal, long amount) {
return null;
}
@Override
public long between(Temporal temporal1Inclusive, Temporal temporal2Exclusive) {
return 0;
}
}
}
| 7,164 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/CaseFinder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util;
import java.io.BufferedReader;
import java.io.IOException;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Parser for files containing test cases consisting of
* <code><String,String></code> pairs, where the first string is the input
* to the test case, and the second string is the expected output of the test
* case.
*
* <p>
* A test-case file is a sequence of
* <a href="en.wikipedia.org/wiki/Here_document">here documents</a>
* ("heredocs"), very similar in syntax to Unix Shell heredocs. Heredocs labeled
* "INPUT" indicate the start of a new case, and these INPUT heredocs the inputs
* of test cases. Following an "INPUT" heredoc can more zero or more
* "expected-output" heredocs. Each of these expected-output heredocs defines
* what we call a <dfn>subcase</dfn>. The assumption here is that for each
* interesting test input, there are often multiple different tests one could
* run, each with different expected outputs.
*
* <p>
* Consumers of this class call the {@link #find} method to find all subcases
* marked with a given label. For example, imagine the following test-case file:
* <blockquote>
*
* <pre>
* <<INPUT 0
* <<VALUE 0
* <<PPRINT 0
* <<INPUT 1+1
* <<VALUE 2
* <<PPRINT 1 + 1
* <<SEXP (+ 1 1)
* SEXP
* </pre>
*
* </blockquote> Calling {@link #find} on the label "VALUE" will return two test
* cases, the pair <code><"0","0"></code> and
* <code><"1+1","2"></code>. Calling it on the label "PPRINT" will return
* <code><"0","0"></code> and <code><"1+1","1 +
* 1"></code>. Notice that there need not be a subcase for every INPUT. In
* the case of "SEXP", for example, {@link #find} will return only the single
* pair <code><"1+1","(+ 1 1)"></code>.
*
* <p>
* There are two forms of heredocs, single-line and multi-line. The examples
* above (except "SEXP") are single-line heredocs. The general syntax for these
* is: <blockquote>
*
* <pre>
* ^<<([a-zA-Z][_a-zA-Z0-9]*) (.*)$
* </pre>
*
* </blockquote> The first group in this regex is the label of the heredoc, and
* the second group is the text of the heredoc. A single space separates the two
* groups and is not part of there heredoc (subsequent spaces <em>will</em> be
* included in the heredoc). A "line terminator" as defined by the Java language
* (i.e., CR, LR, or CR followed by LF) terminates a singline-line heredoc but
* is not included in the text of the heredoc.
*
* <p>
* As the name implies, multi-line heredocs are spread across multiple lines, as
* in this example: <blockquote>
*
* <pre>
* <<INPUT
* 1
* +1 +
* 1
* INPUT
* <<VALUE 3
* <<PPRINT 1 + 1 + 1
* </pre>
*
* </blockquote> In this case, the input to the test case is spread across
* multiple lines (the line terminators in these documents are preserved as part
* of the document text). Multi-line heredocs can be used for both the inputs of
* text cases and the expected outputs of them.
*
* <p>
* The syntax of multi-line heredocs obey the following pseudo-regex:
* <blockquote>
*
* <pre>
* ^<<([a-zA-Z][_a-zA-Z0-9]*)$(.*)$^\1$
* </pre>
*
* </blockquote> That is, as illustrated by the example, a multi-line heredoc
* named "LABEL" consists of the text <code><lt;LABEL</code> on a line by
* itself, followed by the text of the heredoc, followed by the text
* <code>LABEL</code> on a line by itself (if LABEL starts a line but is not the
* <em>only</em> text on that line, then that entire line is part of the
* heredoc, and the heredoc is not terminated by that line).
*
* <p>
* In multi-line heredocs, neither the line terminator that terminates the start
* of the document, nor the one just before the label that ends the heredoc, are
* part of the text of the heredoc. Thus, for example, the text of the
* multi-line input from above would be exactly <code>"1\n+1 +\n1"</code>.
* If you want a new line at the end of a multi-line heredoc, put a blank line
* before the label ending the heredoc.
*
* <p>
* Also in multi-line heredocs, line-terminators within the heredoc are
* normalized to line-feeds ('\n'). Thus, for example, when a test file written
* on a Windows machine is parsed on any machine, the Windows-style line
* terminators within heredocs will be translated to Unix-style line
* terminators, no matter what platform the tests are run on.
*
* <p>
* Note that lines between heredocs are ignored, and can be used to provide
* spacing between and/or commentary on the test cases.
*/
public class CaseFinder {
/**
* Scan test-case file <code>in</code> looking for test subcases marked with
* <code>caseLabel</code>. Any such cases are appended (in order) to the "cases"
* parameter. If <code>caseLabel</code> equals the string <code>"INPUT"</code>,
* then returns the list of <<i>input</i>, <code>null</code>> pairs for
* <i>input</i> equal to all heredoc's named INPUT's found in the input stream.
*/
public static List<Object[]> find(BufferedReader in, String label, List<Object[]> cases) throws IOException {
if (!Pattern.matches(LABEL_REGEX, label))
throw new IllegalArgumentException("Bad case subcase label: " + label);
final String subcaseMarker = "<<" + label;
for (String line = in.readLine();;) {
// Find next new case
while (line != null && !line.startsWith(NEW_CASE_MARKER))
line = in.readLine();
if (line == null)
break;
String input;
input = processHereDoc(in, line);
if (label.equals(NEW_CASE_NAME)) {
cases.add(new Object[] { input, null });
line = in.readLine();
continue;
}
// Check to see if there's a subcase named "label" for that case
do {
line = in.readLine();
} while (line != null && (!line.startsWith(NEW_CASE_MARKER) && !line.startsWith(subcaseMarker)));
if (line == null || line.startsWith(NEW_CASE_MARKER))
continue;
String expectedOutput = processHereDoc(in, line);
cases.add(new Object[] { input, expectedOutput });
}
in.close();
return cases;
}
private static final String NEW_CASE_NAME = "INPUT";
private static final String NEW_CASE_MARKER = "<<" + NEW_CASE_NAME;
private static final String LABEL_REGEX = "[a-zA-Z][_a-zA-Z0-9]*";
private static final Pattern START_LINE_PATTERN = Pattern.compile("^<<(" + LABEL_REGEX + ")(.*)$");
/**
* Reads and returns content of a heredoc. Assumes we just read a
* start-of-here-doc marker for a here-doc labeled "docMarker." Replaces
* arbitrary newlines with system newlines, but strips newline from final line
* of heredoc. Throws IOException if EOF is reached before heredoc is terminate.
*/
private static String processHereDoc(BufferedReader in, String docStart) throws IOException {
Matcher m = START_LINE_PATTERN.matcher(docStart);
if (!m.matches())
throw new IllegalArgumentException("Wasn't given the start of a heredoc (\"" + docStart + "\")");
String docName = m.group(1);
// Determine if this is a single-line heredoc, and process if it is
String singleLineText = m.group(2);
if (singleLineText.length() != 0) {
if (!singleLineText.startsWith(" "))
throw new IOException("Single-line heredoc missing initial space (\"" + docStart + "\")");
return singleLineText.substring(1);
}
// Process multi-line heredocs
StringBuilder result = new StringBuilder();
String line = in.readLine();
String prevLine = "";
boolean firstTime = true;
while (line != null && !line.equals(docName)) {
if (!firstTime)
result.append(prevLine).append('\n');
else
firstTime = false;
prevLine = line;
line = in.readLine();
}
if (line == null)
throw new IOException("Here document (" + docName + ") terminated by end-of-file.");
return result.append(prevLine).toString();
}
}
| 7,165 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/TestUtf8.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.nio.charset.StandardCharsets;
import org.apache.avro.SystemLimitException;
import org.apache.avro.TestSystemLimitException;
import org.junit.jupiter.api.Test;
public class TestUtf8 {
@Test
void byteConstructor() throws Exception {
byte[] bs = "Foo".getBytes(StandardCharsets.UTF_8);
Utf8 u = new Utf8(bs);
assertEquals(bs.length, u.getByteLength());
for (int i = 0; i < bs.length; i++) {
assertEquals(bs[i], u.getBytes()[i]);
}
}
@Test
void arrayReusedWhenLargerThanRequestedSize() {
byte[] bs = "55555".getBytes(StandardCharsets.UTF_8);
Utf8 u = new Utf8(bs);
assertEquals(5, u.getByteLength());
byte[] content = u.getBytes();
u.setByteLength(3);
assertEquals(3, u.getByteLength());
assertSame(content, u.getBytes());
u.setByteLength(4);
assertEquals(4, u.getByteLength());
assertSame(content, u.getBytes());
}
@Test
void hashCodeReused() {
assertEquals(97, new Utf8("a").hashCode());
assertEquals(3904, new Utf8("zz").hashCode());
assertEquals(122, new Utf8("z").hashCode());
assertEquals(99162322, new Utf8("hello").hashCode());
assertEquals(3198781, new Utf8("hell").hashCode());
Utf8 u = new Utf8("a");
assertEquals(97, u.hashCode());
assertEquals(97, u.hashCode());
u.set("a");
assertEquals(97, u.hashCode());
u.setByteLength(1);
assertEquals(97, u.hashCode());
u.setByteLength(2);
assertNotEquals(97, u.hashCode());
u.set("zz");
assertEquals(3904, u.hashCode());
u.setByteLength(1);
assertEquals(122, u.hashCode());
u.set("hello");
assertEquals(99162322, u.hashCode());
u.setByteLength(4);
assertEquals(3198781, u.hashCode());
u.set(new Utf8("zz"));
assertEquals(3904, u.hashCode());
u.setByteLength(1);
assertEquals(122, u.hashCode());
u.set(new Utf8("hello"));
assertEquals(99162322, u.hashCode());
u.setByteLength(4);
assertEquals(3198781, u.hashCode());
}
@Test
void oversizeUtf8() {
Utf8 u = new Utf8();
u.setByteLength(1024);
assertEquals(1024, u.getByteLength());
assertThrows(UnsupportedOperationException.class,
() -> u.setByteLength(TestSystemLimitException.MAX_ARRAY_VM_LIMIT + 1));
try {
System.setProperty(SystemLimitException.MAX_STRING_LENGTH_PROPERTY, Long.toString(1000L));
TestSystemLimitException.resetLimits();
Exception ex = assertThrows(SystemLimitException.class, () -> u.setByteLength(1024));
assertEquals("String length 1024 exceeds maximum allowed", ex.getMessage());
} finally {
System.clearProperty(SystemLimitException.MAX_STRING_LENGTH_PROPERTY);
TestSystemLimitException.resetLimits();
}
}
@Test
void serialization() throws IOException, ClassNotFoundException {
try (ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos)) {
Utf8 originalEmpty = new Utf8();
Utf8 originalBytes = new Utf8("originalBytes".getBytes(StandardCharsets.UTF_8));
Utf8 originalString = new Utf8("originalString");
oos.writeObject(originalEmpty);
oos.writeObject(originalBytes);
oos.writeObject(originalString);
oos.flush();
try (ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
ObjectInputStream ois = new ObjectInputStream(bis)) {
assertThat(ois.readObject(), is(originalEmpty));
assertThat(ois.readObject(), is(originalBytes));
assertThat(ois.readObject(), is(originalString));
}
}
}
}
| 7,166 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/TestCaseFinder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
public class TestCaseFinder {
@Nested
public static class SimpleCases {
public static List<Object[]> cases() {
List<Object[]> result = new ArrayList<>();
result.add(new Object[] { "", "foo", new Object[][] {} });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT b", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT b\n", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT b\n\n", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\r<<OUTPUT b", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(
new Object[] { "// This is a test\n<<INPUT a\n\n\n<<OUTPUT b", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\nOUTPUT", "OUTPUT", new Object[][] { { "a", "b" } } });
result.add(new Object[] { "<<INPUT a\n<<OUTPUT\nb\n\nOUTPUT", "OUTPUT", new Object[][] { { "a", "b\n" } } });
result.add(
new Object[] { "<<INPUT a\n<<OUTPUT\n\n b \n\nOUTPUT", "OUTPUT", new Object[][] { { "a", "\n b \n" } } });
result.add(
new Object[] { "<<INPUT a\n<<O b\n<<INPUT c\n<<O d", "O", new Object[][] { { "a", "b" }, { "c", "d" } } });
result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d", "O",
new Object[][] { { "a", "b" }, { "c", "d" } } });
result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT c\n<<O d", "F", new Object[][] { { "a", "z" } } });
result.add(new Object[] { "<<INPUT a\n<<O b\n<<F z\n<<INPUT\nc\nINPUT\n<<O d\n<<INPUT e", "INPUT",
new Object[][] { { "a", null }, { "c", null }, { "e", null } } });
return result;
}
@ParameterizedTest
@MethodSource("cases")
void output(String input, String label, Object[][] ex) throws Exception {
List<Object[]> result = new ArrayList<>();
CaseFinder.find(mk(input), label, result);
List<Object[]> expectedOutput = Arrays.asList(ex);
assertTrue(eq(result, expectedOutput), pr(result));
}
}
@Nested
public class NonParameterized {
@Test
void badDocLabel1() throws Exception {
assertThrows(java.lang.IllegalArgumentException.class, () -> {
List<Object[]> result = new ArrayList<>();
CaseFinder.find(mk("<<INPUT blah"), "", result);
});
}
@Test
void badDocLabel2() throws Exception {
assertThrows(java.lang.IllegalArgumentException.class, () -> {
List<Object[]> result = new ArrayList<>();
CaseFinder.find(mk("<<INPUT blah"), "kill-er", result);
});
}
@Test
void badSingleLineHeredoc() throws Exception {
assertThrows(java.io.IOException.class, () -> {
List<Object[]> result = new ArrayList<>();
CaseFinder.find(mk("<<INPUTblah"), "foo", result);
});
}
@Test
void unterminatedHeredoc() throws Exception {
assertThrows(java.io.IOException.class, () -> {
List<Object[]> result = new ArrayList<>();
CaseFinder.find(mk("<<INPUT"), "foo", result);
});
}
}
private static BufferedReader mk(String s) {
return new BufferedReader(new StringReader(s));
}
private static String pr(List<Object[]> t) {
StringBuilder b = new StringBuilder();
b.append("{ ");
boolean firstTime = true;
for (Object[] p : t) {
if (!firstTime)
b.append(", ");
else
firstTime = false;
b.append("{ \"").append(p[0]).append("\", \"").append(p[1]).append("\" }");
}
b.append("}");
return b.toString();
}
private static boolean eq(List<Object[]> l1, List<Object[]> l2) {
if (l1 == null || l2 == null)
return l1 == l2;
if (l1.size() != l2.size())
return false;
for (int i = 0; i < l1.size(); i++)
if (!Arrays.equals(l1.get(i), l2.get(i)))
return false;
return true;
}
}
| 7,167 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/TestRandomData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Objects;
import java.util.Random;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificRecordBase;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class TestRandomData {
private long seed;
private int count;
private File file;
private GenericData genericData;
private SpecificData specificData;
private Schema specificSchema;
private ReflectData reflectData;
private Schema reflectedSchema;
@Before
public void setUp() throws Exception {
file = Files.createTempFile("randomData", ".avro").toFile();
seed = System.currentTimeMillis();
count = new Random().nextInt(50) + 75;
genericData = GenericData.get();
specificData = SpecificData.get();
specificSchema = specificData.getSchema(SpecificTestRecord.class);
reflectData = ReflectData.get();
reflectedSchema = reflectData.getSchema(ReflectTestRecord.class);
}
@Test
public void testRandomDataFromGenericToGeneric() throws IOException {
checkWrite(genericData, TEST_SCHEMA);
checkRead(genericData, TEST_SCHEMA);
}
@Test
public void testRandomDataFromGenericToSpecific() throws IOException {
checkWrite(genericData, TEST_SCHEMA);
checkRead(specificData, specificSchema);
}
@Test
public void testRandomDataFromGenericToReflected() throws IOException {
checkWrite(genericData, TEST_SCHEMA);
checkRead(reflectData, reflectedSchema);
}
@Test
public void testRandomDataFromSpecificToGeneric() throws IOException {
checkWrite(specificData, specificSchema);
checkRead(genericData, TEST_SCHEMA);
}
@Test
public void testRandomDataFromSpecificToSpecific() throws IOException {
checkWrite(specificData, specificSchema);
checkRead(specificData, specificSchema);
}
@Test
public void testRandomDataFromSpecificToReflected() throws IOException {
checkWrite(specificData, specificSchema);
checkRead(reflectData, reflectedSchema);
}
@Test
public void testRandomDataFromReflectedToGeneric() throws IOException {
checkWrite(reflectData, reflectedSchema);
checkRead(genericData, TEST_SCHEMA);
}
@Test
public void testRandomDataFromReflectedToSpecific() throws IOException {
checkWrite(reflectData, reflectedSchema);
checkRead(specificData, specificSchema);
}
@Test
public void testRandomDataFromReflectedToReflected() throws IOException {
checkWrite(reflectData, reflectedSchema);
checkRead(reflectData, reflectedSchema);
}
private void checkWrite(GenericData genericData, Schema schema) throws IOException {
// noinspection unchecked
try (DataFileWriter<Object> writer = new DataFileWriter<Object>(genericData.createDatumWriter(schema))) {
writer.create(schema, file);
for (Object datum : new RandomData(genericData, schema, this.count, seed)) {
writer.append(datum);
}
}
}
private void checkRead(GenericData genericData, Schema schema) throws IOException {
// noinspection unchecked
try (DataFileReader<Object> reader = new DataFileReader<Object>(file, genericData.createDatumReader(schema))) {
for (Object expected : new RandomData(genericData, schema, this.count, seed)) {
assertEquals(expected, reader.next());
}
}
}
/*
* Test classes: they implement the same schema, but one is a SpecificRecord and
* the other uses a reflected schema.
*/
public static final String TEST_SCHEMA_JSON = "{\"type\":\"record\",\"name\":\"Record\",\"fields\":[{\"name\":\"x\",\"type\":\"int\"},{\"name\":\"y\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}";
public static final Schema TEST_SCHEMA = new Schema.Parser().parse(TEST_SCHEMA_JSON);
public static class SpecificTestRecord extends SpecificRecordBase {
public static final Schema SCHEMA$ = new Schema.Parser().parse(TEST_SCHEMA_JSON.replace("\"name\":\"Record\"",
"\"name\":\"" + SpecificTestRecord.class.getCanonicalName() + "\""));
private int x;
private String y;
@Override
public Schema getSchema() {
return SCHEMA$;
}
@Override
public void put(int i, Object v) {
switch (i) {
case 0:
x = (Integer) v;
break;
case 1:
y = (String) v;
break;
default:
throw new RuntimeException();
}
}
@Override
public Object get(int i) {
switch (i) {
case 0:
return x;
case 1:
return y;
}
throw new RuntimeException();
}
}
public static class ReflectTestRecord {
private int x;
private String y;
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public String getY() {
return y;
}
public void setY(String y) {
this.y = y;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ReflectTestRecord that = (ReflectTestRecord) o;
return x == that.x && Objects.equals(y, that.y);
}
@Override
public int hashCode() {
return Objects.hash(x, y);
}
@Override
public String toString() {
return String.format("{\"x\": %d, \"y\": \"%s\"}", x, y);
}
}
}
| 7,168 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/WeakIdentityHashMapTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.avro.util;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
/**
* This test aims to stress WeakIdentityHashMap class in multithread env.
*/
class WeakIdentityHashMapTest {
private static final int TEST_SIZE = 4001;
List<String> data = new ArrayList<>(TEST_SIZE);
final WeakIdentityHashMap<String, String> map = new WeakIdentityHashMap<>();
List<RuntimeException> exceptions = new ArrayList<>(TEST_SIZE);
@Test
void stressMap() {
for (int i = 1; i <= TEST_SIZE; i++) {
data.add("Data_" + i);
}
List<Thread> threads = new ArrayList<>(80);
for (int i = 0; i <= 80; i++) {
final int seed = (i + 1) * 100;
Runnable runnable = () -> rundata(seed);
Thread t = new Thread(runnable);
threads.add(t);
}
threads.forEach(Thread::start);
threads.forEach((Thread t) -> {
try {
t.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
Assertions.assertTrue(exceptions.isEmpty());
}
void rundata(int seed) {
try {
for (int i = 1; i <= TEST_SIZE; i++) {
String keyValue = data.get((i + seed) % TEST_SIZE);
map.put(keyValue, keyValue);
if (i % 200 == 0) {
sleep();
}
String keyValueRemove = data.get(((i + seed) * 3) % TEST_SIZE);
map.remove(keyValueRemove);
}
} catch (RuntimeException ex) {
exceptions.add(ex);
}
}
void sleep() {
try {
Thread.sleep(5);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
| 7,169 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/internal/TestJacksonUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.internal;
import static org.apache.avro.util.internal.JacksonUtils.toJsonNode;
import static org.apache.avro.util.internal.JacksonUtils.toObject;
import static org.junit.jupiter.api.Assertions.*;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.BigIntegerNode;
import com.fasterxml.jackson.databind.node.BinaryNode;
import com.fasterxml.jackson.databind.node.BooleanNode;
import com.fasterxml.jackson.databind.node.DecimalNode;
import com.fasterxml.jackson.databind.node.DoubleNode;
import com.fasterxml.jackson.databind.node.FloatNode;
import com.fasterxml.jackson.databind.node.IntNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.LongNode;
import com.fasterxml.jackson.databind.node.NullNode;
import com.fasterxml.jackson.databind.node.NumericNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.stream.Stream;
import org.apache.avro.JsonProperties;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
public class TestJacksonUtils {
enum Direction {
UP, DOWN;
}
@Test
void testToJsonNode() {
assertNull(toJsonNode(null));
assertEquals(NullNode.getInstance(), toJsonNode(JsonProperties.NULL_VALUE));
assertEquals(BooleanNode.TRUE, toJsonNode(true));
assertEquals(IntNode.valueOf(1), toJsonNode(1));
assertEquals(LongNode.valueOf(2), toJsonNode(2L));
assertEquals(FloatNode.valueOf(1.0f), toJsonNode(1.0f));
assertEquals(FloatNode.valueOf(33.33000183105469f), toJsonNode(33.33000183105469f));
assertEquals(DoubleNode.valueOf(2.0), toJsonNode(2.0d));
assertEquals(BinaryNode.valueOf(new byte[] { 1, 2 }), toJsonNode(new byte[] { 1, 2 }));
assertEquals(TextNode.valueOf("a"), toJsonNode("a"));
assertEquals(TextNode.valueOf("UP"), toJsonNode(Direction.UP));
assertEquals(BigIntegerNode.valueOf(BigInteger.ONE), toJsonNode(BigInteger.ONE));
assertEquals(DecimalNode.valueOf(BigDecimal.ONE), toJsonNode(BigDecimal.ONE));
ArrayNode an = JsonNodeFactory.instance.arrayNode();
an.add(1);
assertEquals(an, toJsonNode(Collections.singletonList(1)));
ObjectNode on = JsonNodeFactory.instance.objectNode();
on.put("a", 1);
assertEquals(on, toJsonNode(Collections.singletonMap("a", 1)));
}
@Test
void testToObject() {
assertNull(toObject(null));
assertEquals(JsonProperties.NULL_VALUE, toObject(NullNode.getInstance()));
assertEquals(true, toObject(BooleanNode.TRUE));
assertEquals(1, toObject(IntNode.valueOf(1)));
assertEquals(2L, toObject(IntNode.valueOf(2), Schema.create(Schema.Type.LONG)));
assertEquals(1.0f, toObject(DoubleNode.valueOf(1.0), Schema.create(Schema.Type.FLOAT)));
assertEquals(2.0, toObject(DoubleNode.valueOf(2.0)));
assertEquals(BinaryNode.valueOf(new byte[] { 1, 2 }), toJsonNode(new byte[] { 1, 2 }));
assertArrayEquals(new byte[] { 1, 2 },
(byte[]) toObject(TextNode.valueOf("\u0001\u0002"), Schema.create(Schema.Type.BYTES)));
assertEquals("a", toObject(TextNode.valueOf("a")));
assertEquals("UP", toObject(TextNode.valueOf("UP"), SchemaBuilder.enumeration("Direction").symbols("UP", "DOWN")));
ArrayNode an = JsonNodeFactory.instance.arrayNode();
an.add(1);
assertEquals(Collections.singletonList(1), toObject(an));
ObjectNode on = JsonNodeFactory.instance.objectNode();
on.put("a", 1);
assertEquals(Collections.singletonMap("a", 1), toObject(on));
assertEquals(Collections.singletonMap("a", 1L),
toObject(on, SchemaBuilder.record("r").fields().requiredLong("a").endRecord()));
assertEquals(JsonProperties.NULL_VALUE,
toObject(NullNode.getInstance(), SchemaBuilder.unionOf().nullType().and().intType().endUnion()));
assertEquals("a", toObject(TextNode.valueOf("a"), SchemaBuilder.unionOf().stringType().and().intType().endUnion()));
}
@ParameterizedTest
@MethodSource("nodes")
void cycle(JsonNode input) {
Object object = JacksonUtils.toObject(input);
JsonNode node = JacksonUtils.toJsonNode(object);
Assertions.assertEquals(input, node);
}
public static Stream<Arguments> nodes() {
ObjectNode o1 = JsonNodeFactory.instance.objectNode();
o1.put("intField", 123);
o1.put("floatField", 33.33000183105469f);
o1.put("doubleField", 33.33000183105469245d);
return Stream.of(JsonNodeFactory.instance.numberNode(33.33000183105469f),
JsonNodeFactory.instance.binaryNode("Hello".getBytes(StandardCharsets.ISO_8859_1)),
JsonNodeFactory.instance.arrayNode().add(1).add("Hello").add(o1)).map(Arguments::of);
}
}
| 7,170 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/internal/TestClassValueCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.internal;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestClassValueCache {
@Test
void basic() {
ClassValueCache<String> cache = new ClassValueCache<>(Class::toString);
String fromCache = cache.apply(String.class);
assertThat(fromCache, is("class java.lang.String"));
assertThat(cache.apply(String.class), sameInstance(fromCache));
}
}
| 7,171 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/springframework/TestConcurrentReferenceHashMap.java | /*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.springframework;
import org.apache.avro.reflect.Nullable;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.apache.avro.util.springframework.ConcurrentReferenceHashMap.Entry;
import org.apache.avro.util.springframework.ConcurrentReferenceHashMap.Reference;
import org.apache.avro.util.springframework.ConcurrentReferenceHashMap.Restructure;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* Tests for {@link ConcurrentReferenceHashMap}.
*
* @author Phillip Webb
* @author Juergen Hoeller
*/
class TestConcurrentReferenceHashMap {
private static final Comparator<? super String> NULL_SAFE_STRING_SORT = new NullSafeComparator<>(
new ComparableComparator<String>(), true);
private TestWeakConcurrentCache<Integer, String> map = new TestWeakConcurrentCache<>();
@Test
void shouldCreateWithDefaults() {
ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>();
assertThat(map.getSegmentsSize(), equalTo(16));
assertThat(map.getSegment(0).getSize(), equalTo(1));
assertThat(map.getLoadFactor(), equalTo(0.75f));
}
@Test
void shouldCreateWithInitialCapacity() {
ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(32);
assertThat(map.getSegmentsSize(), equalTo(16));
assertThat(map.getSegment(0).getSize(), equalTo(2));
assertThat(map.getLoadFactor(), equalTo(0.75f));
}
@Test
void shouldCreateWithInitialCapacityAndLoadFactor() {
ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(32, 0.5f);
assertThat(map.getSegmentsSize(), equalTo(16));
assertThat(map.getSegment(0).getSize(), equalTo(2));
assertThat(map.getLoadFactor(), equalTo(0.5f));
}
@Test
void shouldCreateWithInitialCapacityAndConcurrentLevel() {
ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(16, 2);
assertThat(map.getSegmentsSize(), equalTo(2));
assertThat(map.getSegment(0).getSize(), equalTo(8));
assertThat(map.getLoadFactor(), equalTo(0.75f));
}
@Test
void shouldCreateFullyCustom() {
ConcurrentReferenceHashMap<Integer, String> map = new ConcurrentReferenceHashMap<>(5, 0.5f, 3);
// concurrencyLevel of 3 ends up as 4 (nearest power of 2)
assertThat(map.getSegmentsSize(), equalTo(4));
// initialCapacity is 5/4 (rounded up, to nearest power of 2)
assertThat(map.getSegment(0).getSize(), equalTo(2));
assertThat(map.getLoadFactor(), equalTo(0.5f));
}
@Test
void shouldNeedNonNegativeInitialCapacity() {
new ConcurrentReferenceHashMap<Integer, String>(0, 1);
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> new TestWeakConcurrentCache<Integer, String>(-1, 1));
assertTrue(e.getMessage().contains("Initial capacity must not be negative"));
}
@Test
void shouldNeedPositiveLoadFactor() {
new ConcurrentReferenceHashMap<Integer, String>(0, 0.1f, 1);
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> new TestWeakConcurrentCache<Integer, String>(0, 0.0f, 1));
assertTrue(e.getMessage().contains("Load factor must be positive"));
}
@Test
void shouldNeedPositiveConcurrencyLevel() {
new ConcurrentReferenceHashMap<Integer, String>(1, 1);
IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
() -> new TestWeakConcurrentCache<Integer, String>(1, 0));
assertTrue(e.getMessage().contains("Concurrency level must be positive"));
}
@Test
void shouldPutAndGet() {
// NOTE we are using mock references so we don't need to worry about GC
assertEquals(0, this.map.size());
this.map.put(123, "123");
assertThat(this.map.get(123), equalTo("123"));
assertEquals(1, this.map.size());
this.map.put(123, "123b");
assertEquals(1, this.map.size());
this.map.put(123, null);
assertEquals(1, this.map.size());
}
@Test
void shouldReplaceOnDoublePut() {
this.map.put(123, "321");
this.map.put(123, "123");
assertThat(this.map.get(123), equalTo("123"));
}
@Test
void shouldPutNullKey() {
assertNull(this.map.get(null));
assertThat(this.map.getOrDefault(null, "456"), equalTo("456"));
this.map.put(null, "123");
assertThat(this.map.get(null), equalTo("123"));
assertThat(this.map.getOrDefault(null, "456"), equalTo("123"));
}
@Test
void shouldPutNullValue() {
assertNull(this.map.get(123));
assertThat(this.map.getOrDefault(123, "456"), equalTo("456"));
this.map.put(123, "321");
assertThat(this.map.get(123), equalTo("321"));
assertThat(this.map.getOrDefault(123, "456"), equalTo("321"));
this.map.put(123, null);
assertNull(this.map.get(123));
assertNull(this.map.getOrDefault(123, "456"));
}
@Test
void shouldGetWithNoItems() {
assertNull(this.map.get(123));
}
@Test
void shouldApplySupplementalHash() {
Integer key = 123;
this.map.put(key, "123");
assertNotEquals(this.map.getSupplementalHash(), key.hashCode());
assertNotEquals(this.map.getSupplementalHash() >> 30 & 0xFF, 0);
}
@Test
void shouldGetFollowingNexts() {
// Use loadFactor to disable resize
this.map = new TestWeakConcurrentCache<>(1, 10.0f, 1);
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
assertThat(this.map.getSegment(0).getSize(), equalTo(1));
assertThat(this.map.get(1), equalTo("1"));
assertThat(this.map.get(2), equalTo("2"));
assertThat(this.map.get(3), equalTo("3"));
assertNull(this.map.get(4));
}
@Test
void shouldResize() {
this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1);
this.map.put(1, "1");
assertThat(this.map.getSegment(0).getSize(), equalTo(1));
assertThat(this.map.get(1), equalTo("1"));
this.map.put(2, "2");
assertThat(this.map.getSegment(0).getSize(), equalTo(2));
assertThat(this.map.get(1), equalTo("1"));
assertThat(this.map.get(2), equalTo("2"));
this.map.put(3, "3");
assertThat(this.map.getSegment(0).getSize(), equalTo(4));
assertThat(this.map.get(1), equalTo("1"));
assertThat(this.map.get(2), equalTo("2"));
assertThat(this.map.get(3), equalTo("3"));
this.map.put(4, "4");
assertThat(this.map.getSegment(0).getSize(), equalTo(8));
assertThat(this.map.get(4), equalTo("4"));
// Putting again should not increase the count
for (int i = 1; i <= 5; i++) {
this.map.put(i, String.valueOf(i));
}
assertThat(this.map.getSegment(0).getSize(), equalTo(8));
assertThat(this.map.get(5), equalTo("5"));
}
@Test
void shouldPurgeOnGet() {
this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1);
for (int i = 1; i <= 5; i++) {
this.map.put(i, String.valueOf(i));
}
this.map.getMockReference(1, Restructure.NEVER).queueForPurge();
this.map.getMockReference(3, Restructure.NEVER).queueForPurge();
assertNull(this.map.getReference(1, Restructure.WHEN_NECESSARY));
assertThat(this.map.get(2), equalTo("2"));
assertNull(this.map.getReference(3, Restructure.WHEN_NECESSARY));
assertThat(this.map.get(4), equalTo("4"));
assertThat(this.map.get(5), equalTo("5"));
}
@Test
void shouldPurgeOnPut() {
this.map = new TestWeakConcurrentCache<>(1, 0.75f, 1);
for (int i = 1; i <= 5; i++) {
this.map.put(i, String.valueOf(i));
}
this.map.getMockReference(1, Restructure.NEVER).queueForPurge();
this.map.getMockReference(3, Restructure.NEVER).queueForPurge();
this.map.put(1, "1");
assertThat(this.map.get(1), equalTo("1"));
assertThat(this.map.get(2), equalTo("2"));
assertNull(this.map.getReference(3, Restructure.WHEN_NECESSARY));
assertThat(this.map.get(4), equalTo("4"));
assertThat(this.map.get(5), equalTo("5"));
}
@Test
void shouldPutIfAbsent() {
assertNull(this.map.putIfAbsent(123, "123"));
assertThat(this.map.putIfAbsent(123, "123b"), equalTo("123"));
assertThat(this.map.get(123), equalTo("123"));
}
@Test
void shouldPutIfAbsentWithNullValue() {
assertNull(this.map.putIfAbsent(123, null));
assertNull(this.map.putIfAbsent(123, "123"));
assertNull(this.map.get(123));
}
@Test
void shouldPutIfAbsentWithNullKey() {
assertNull(this.map.putIfAbsent(null, "123"));
assertThat(this.map.putIfAbsent(null, "123b"), equalTo("123"));
assertThat(this.map.get(null), equalTo("123"));
}
@Test
void shouldRemoveKeyAndValue() {
this.map.put(123, "123");
assertFalse(this.map.remove(123, "456"));
assertThat(this.map.get(123), equalTo("123"));
assertTrue(this.map.remove(123, "123"));
assertFalse(this.map.containsKey(123));
assertTrue(this.map.isEmpty());
}
@Test
void shouldRemoveKeyAndValueWithExistingNull() {
this.map.put(123, null);
assertFalse(this.map.remove(123, "456"));
assertNull(this.map.get(123));
assertTrue(this.map.remove(123, null));
assertFalse(this.map.containsKey(123));
assertTrue(this.map.isEmpty());
}
@Test
void shouldReplaceOldValueWithNewValue() {
this.map.put(123, "123");
assertFalse(this.map.replace(123, "456", "789"));
assertThat(this.map.get(123), equalTo("123"));
assertTrue(this.map.replace(123, "123", "789"));
assertThat(this.map.get(123), equalTo("789"));
}
@Test
void shouldReplaceOldNullValueWithNewValue() {
this.map.put(123, null);
assertFalse(this.map.replace(123, "456", "789"));
assertNull(this.map.get(123));
assertTrue(this.map.replace(123, null, "789"));
assertThat(this.map.get(123), equalTo("789"));
}
@Test
void shouldReplaceValue() {
this.map.put(123, "123");
assertThat(this.map.replace(123, "456"), equalTo("123"));
assertThat(this.map.get(123), equalTo("456"));
}
@Test
void shouldReplaceNullValue() {
this.map.put(123, null);
assertNull(this.map.replace(123, "456"));
assertThat(this.map.get(123), equalTo("456"));
}
@Test
void shouldGetSize() {
assertEquals(0, this.map.size());
this.map.put(123, "123");
this.map.put(123, null);
this.map.put(456, "456");
assertEquals(2, this.map.size());
}
@Test
void shouldSupportIsEmpty() {
assertTrue(this.map.isEmpty());
this.map.put(123, "123");
this.map.put(123, null);
this.map.put(456, "456");
assertFalse(this.map.isEmpty());
}
@Test
void shouldContainKey() {
assertFalse(this.map.containsKey(123));
assertFalse(this.map.containsKey(456));
this.map.put(123, "123");
this.map.put(456, null);
assertTrue(this.map.containsKey(123));
assertTrue(this.map.containsKey(456));
}
@Test
void shouldContainValue() {
assertFalse(this.map.containsValue("123"));
assertFalse(this.map.containsValue(null));
this.map.put(123, "123");
this.map.put(456, null);
assertTrue(this.map.containsValue("123"));
assertTrue(this.map.containsValue(null));
}
@Test
void shouldRemoveWhenKeyIsInMap() {
this.map.put(123, null);
this.map.put(456, "456");
this.map.put(null, "789");
assertNull(this.map.remove(123));
assertThat(this.map.remove(456), equalTo("456"));
assertThat(this.map.remove(null), equalTo("789"));
assertTrue(this.map.isEmpty());
}
@Test
void shouldRemoveWhenKeyIsNotInMap() {
assertNull(this.map.remove(123));
assertNull(this.map.remove(null));
assertTrue(this.map.isEmpty());
}
@Test
void shouldPutAll() {
Map<Integer, String> m = new HashMap<>();
m.put(123, "123");
m.put(456, null);
m.put(null, "789");
this.map.putAll(m);
assertEquals(3, this.map.size());
assertThat(this.map.get(123), equalTo("123"));
assertNull(this.map.get(456));
assertThat(this.map.get(null), equalTo("789"));
}
@Test
void shouldClear() {
this.map.put(123, "123");
this.map.put(456, null);
this.map.put(null, "789");
this.map.clear();
assertEquals(0, this.map.size());
assertFalse(this.map.containsKey(123));
assertFalse(this.map.containsKey(456));
assertFalse(this.map.containsKey(null));
}
@Test
void shouldGetKeySet() {
this.map.put(123, "123");
this.map.put(456, null);
this.map.put(null, "789");
Set<Integer> expected = new HashSet<>();
expected.add(123);
expected.add(456);
expected.add(null);
assertThat(this.map.keySet(), equalTo(expected));
}
@Test
void shouldGetValues() {
this.map.put(123, "123");
this.map.put(456, null);
this.map.put(null, "789");
List<String> actual = new ArrayList<>(this.map.values());
List<String> expected = new ArrayList<>();
expected.add("123");
expected.add(null);
expected.add("789");
actual.sort(NULL_SAFE_STRING_SORT);
expected.sort(NULL_SAFE_STRING_SORT);
assertThat(actual, equalTo(expected));
}
@Test
void shouldGetEntrySet() {
this.map.put(123, "123");
this.map.put(456, null);
this.map.put(null, "789");
HashMap<Integer, String> expected = new HashMap<>();
expected.put(123, "123");
expected.put(456, null);
expected.put(null, "789");
assertThat(this.map.entrySet(), equalTo(expected.entrySet()));
}
@Test
void shouldGetEntrySetFollowingNext() {
// Use loadFactor to disable resize
this.map = new TestWeakConcurrentCache<>(1, 10.0f, 1);
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
HashMap<Integer, String> expected = new HashMap<>();
expected.put(1, "1");
expected.put(2, "2");
expected.put(3, "3");
assertThat(this.map.entrySet(), equalTo(expected.entrySet()));
}
@Test
void shouldRemoveViaEntrySet() {
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
Iterator<Map.Entry<Integer, String>> iterator = this.map.entrySet().iterator();
iterator.next();
iterator.next();
iterator.remove();
assertThrows(IllegalStateException.class, iterator::remove);
iterator.next();
assertFalse(iterator.hasNext());
assertEquals(2, this.map.size());
assertFalse(this.map.containsKey(2));
}
@Test
void shouldSetViaEntrySet() {
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
Iterator<Map.Entry<Integer, String>> iterator = this.map.entrySet().iterator();
iterator.next();
iterator.next().setValue("2b");
iterator.next();
assertFalse(iterator.hasNext());
assertEquals(3, this.map.size());
assertThat(this.map.get(2), equalTo("2b"));
}
@Test
void containsViaEntrySet() {
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
Set<Map.Entry<Integer, String>> entrySet = this.map.entrySet();
Set<Map.Entry<Integer, String>> copy = new HashMap<>(this.map).entrySet();
copy.forEach(entry -> assertTrue(entrySet.contains(entry)));
this.map.put(1, "A");
this.map.put(2, "B");
this.map.put(3, "C");
copy.forEach(entry -> assertFalse(entrySet.contains(entry)));
this.map.put(1, "1");
this.map.put(2, "2");
this.map.put(3, "3");
copy.forEach(entry -> assertTrue(entrySet.contains(entry)));
entrySet.clear();
copy.forEach(entry -> assertFalse(entrySet.contains(entry)));
}
@Test
@Disabled("Intended for use during development only")
void shouldBeFasterThanSynchronizedMap() throws InterruptedException {
Map<Integer, WeakReference<String>> synchronizedMap = Collections
.synchronizedMap(new WeakHashMap<Integer, WeakReference<String>>());
StopWatch mapTime = timeMultiThreaded("SynchronizedMap", synchronizedMap,
v -> new WeakReference<>(String.valueOf(v)));
System.out.println(mapTime.prettyPrint());
this.map.setDisableTestHooks(true);
StopWatch cacheTime = timeMultiThreaded("WeakConcurrentCache", this.map, String::valueOf);
System.out.println(cacheTime.prettyPrint());
// We should be at least 4 time faster
assertTrue(cacheTime.getTotalTimeSeconds() < (mapTime.getTotalTimeSeconds() / 4.0));
}
@Test
void shouldSupportNullReference() {
// GC could happen during restructure so we must be able to create a reference
// for a null entry
map.createReferenceManager().createReference(null, 1234, null);
}
/**
* Time a multi-threaded access to a cache.
*
* @return the timing stopwatch
*/
private <V> StopWatch timeMultiThreaded(String id, final Map<Integer, V> map, ValueFactory<V> factory)
throws InterruptedException {
StopWatch stopWatch = new StopWatch(id);
for (int i = 0; i < 500; i++) {
map.put(i, factory.newValue(i));
}
Thread[] threads = new Thread[30];
stopWatch.start("Running threads");
for (int threadIndex = 0; threadIndex < threads.length; threadIndex++) {
threads[threadIndex] = new Thread("Cache access thread " + threadIndex) {
@Override
public void run() {
for (int j = 0; j < 1000; j++) {
for (int i = 0; i < 1000; i++) {
map.get(i);
}
}
}
};
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
if (thread.isAlive()) {
thread.join(2000);
}
}
stopWatch.stop();
return stopWatch;
}
private interface ValueFactory<V> {
V newValue(int k);
}
private static class TestWeakConcurrentCache<K, V> extends ConcurrentReferenceHashMap<K, V> {
private int supplementalHash;
private final LinkedList<MockReference<K, V>> queue = new LinkedList<>();
private boolean disableTestHooks;
public TestWeakConcurrentCache() {
super();
}
public void setDisableTestHooks(boolean disableTestHooks) {
this.disableTestHooks = disableTestHooks;
}
public TestWeakConcurrentCache(int initialCapacity, float loadFactor, int concurrencyLevel) {
super(initialCapacity, loadFactor, concurrencyLevel);
}
public TestWeakConcurrentCache(int initialCapacity, int concurrencyLevel) {
super(initialCapacity, concurrencyLevel);
}
@Override
protected int getHash(@Nullable Object o) {
if (this.disableTestHooks) {
return super.getHash(o);
}
// For testing we want more control of the hash
this.supplementalHash = super.getHash(o);
return (o != null ? o.hashCode() : 0);
}
public int getSupplementalHash() {
return this.supplementalHash;
}
@Override
protected ReferenceManager createReferenceManager() {
return new ReferenceManager() {
@Override
public Reference<K, V> createReference(Entry<K, V> entry, int hash, @Nullable Reference<K, V> next) {
if (TestWeakConcurrentCache.this.disableTestHooks) {
return super.createReference(entry, hash, next);
}
return new MockReference<>(entry, hash, next, TestWeakConcurrentCache.this.queue);
}
@Override
public Reference<K, V> pollForPurge() {
if (TestWeakConcurrentCache.this.disableTestHooks) {
return super.pollForPurge();
}
return TestWeakConcurrentCache.this.queue.isEmpty() ? null : TestWeakConcurrentCache.this.queue.removeFirst();
}
};
}
public MockReference<K, V> getMockReference(K key, Restructure restructure) {
return (MockReference<K, V>) super.getReference(key, restructure);
}
}
private static class MockReference<K, V> implements Reference<K, V> {
private final int hash;
private Entry<K, V> entry;
private final Reference<K, V> next;
private final LinkedList<MockReference<K, V>> queue;
public MockReference(Entry<K, V> entry, int hash, Reference<K, V> next, LinkedList<MockReference<K, V>> queue) {
this.hash = hash;
this.entry = entry;
this.next = next;
this.queue = queue;
}
@Override
public Entry<K, V> get() {
return this.entry;
}
@Override
public int getHash() {
return this.hash;
}
@Override
public Reference<K, V> getNext() {
return this.next;
}
@Override
public void release() {
this.queue.add(this);
this.entry = null;
}
public void queueForPurge() {
this.queue.add(this);
}
}
}
| 7,172 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/springframework/StopWatch.java | /*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.springframework;
import org.apache.avro.reflect.Nullable;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Simple stop watch, allowing for timing of a number of tasks, exposing total
* running time and running time for each named task.
*
* <p>
* Conceals use of {@link System#nanoTime()}, improving the readability of
* application code and reducing the likelihood of calculation errors.
*
* <p>
* Note that this object is not designed to be thread-safe and does not use
* synchronization.
*
* <p>
* This class is normally used to verify performance during proof-of-concept
* work and in development, rather than as part of production applications.
*
* <p>
* As of Spring Framework 5.2, running time is tracked and reported in
* nanoseconds.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Sam Brannen
* @since May 2, 2001
*/
class StopWatch {
/**
* Identifier of this {@code StopWatch}.
* <p>
* Handy when we have output from multiple stop watches and need to distinguish
* between them in log or console output.
*/
private final String id;
private boolean keepTaskList = true;
private final List<TaskInfo> taskList = new ArrayList<>(1);
/** Start time of the current task. */
private long startTimeNanos;
/** Name of the current task. */
@Nullable
private String currentTaskName;
@Nullable
private TaskInfo lastTaskInfo;
private int taskCount;
/** Total running time. */
private long totalTimeNanos;
/**
* Construct a new {@code StopWatch}.
* <p>
* Does not start any task.
*/
public StopWatch() {
this("");
}
/**
* Construct a new {@code StopWatch} with the given ID.
* <p>
* The ID is handy when we have output from multiple stop watches and need to
* distinguish between them.
* <p>
* Does not start any task.
*
* @param id identifier for this stop watch
*/
public StopWatch(String id) {
this.id = id;
}
/**
* Get the ID of this {@code StopWatch}, as specified on construction.
*
* @return the ID (empty String by default)
* @since 4.2.2
* @see #StopWatch(String)
*/
public String getId() {
return this.id;
}
/**
* Configure whether the {@link TaskInfo} array is built over time.
* <p>
* Set this to {@code false} when using a {@code StopWatch} for millions of
* intervals; otherwise, the {@code TaskInfo} structure will consume excessive
* memory.
* <p>
* Default is {@code true}.
*/
public void setKeepTaskList(boolean keepTaskList) {
this.keepTaskList = keepTaskList;
}
/**
* Start an unnamed task.
* <p>
* The results are undefined if {@link #stop()} or timing methods are called
* without invoking this method first.
*
* @see #start(String)
* @see #stop()
*/
public void start() throws IllegalStateException {
start("");
}
/**
* Start a named task.
* <p>
* The results are undefined if {@link #stop()} or timing methods are called
* without invoking this method first.
*
* @param taskName the name of the task to start
* @see #start()
* @see #stop()
*/
public void start(String taskName) throws IllegalStateException {
if (this.currentTaskName != null) {
throw new IllegalStateException("Can't start StopWatch: it's already running");
}
this.currentTaskName = taskName;
this.startTimeNanos = System.nanoTime();
}
/**
* Stop the current task.
* <p>
* The results are undefined if timing methods are called without invoking at
* least one pair of {@code start()} / {@code stop()} methods.
*
* @see #start()
* @see #start(String)
*/
public void stop() throws IllegalStateException {
if (this.currentTaskName == null) {
throw new IllegalStateException("Can't stop StopWatch: it's not running");
}
long lastTime = System.nanoTime() - this.startTimeNanos;
this.totalTimeNanos += lastTime;
this.lastTaskInfo = new TaskInfo(this.currentTaskName, lastTime);
if (this.keepTaskList) {
this.taskList.add(this.lastTaskInfo);
}
++this.taskCount;
this.currentTaskName = null;
}
/**
* Determine whether this {@code StopWatch} is currently running.
*
* @see #currentTaskName()
*/
public boolean isRunning() {
return (this.currentTaskName != null);
}
/**
* Get the name of the currently running task, if any.
*
* @since 4.2.2
* @see #isRunning()
*/
@Nullable
public String currentTaskName() {
return this.currentTaskName;
}
/**
* Get the time taken by the last task in nanoseconds.
*
* @since 5.2
* @see #getLastTaskTimeMillis()
*/
public long getLastTaskTimeNanos() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tasks run: can't get last task interval");
}
return this.lastTaskInfo.getTimeNanos();
}
/**
* Get the time taken by the last task in milliseconds.
*
* @see #getLastTaskTimeNanos()
*/
public long getLastTaskTimeMillis() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tasks run: can't get last task interval");
}
return this.lastTaskInfo.getTimeMillis();
}
/**
* Get the name of the last task.
*/
public String getLastTaskName() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tasks run: can't get last task name");
}
return this.lastTaskInfo.getTaskName();
}
/**
* Get the last task as a {@link TaskInfo} object.
*/
public TaskInfo getLastTaskInfo() throws IllegalStateException {
if (this.lastTaskInfo == null) {
throw new IllegalStateException("No tasks run: can't get last task info");
}
return this.lastTaskInfo;
}
/**
* Get the total time in nanoseconds for all tasks.
*
* @since 5.2
* @see #getTotalTimeMillis()
* @see #getTotalTimeSeconds()
*/
public long getTotalTimeNanos() {
return this.totalTimeNanos;
}
/**
* Get the total time in milliseconds for all tasks.
*
* @see #getTotalTimeNanos()
* @see #getTotalTimeSeconds()
*/
public long getTotalTimeMillis() {
return nanosToMillis(this.totalTimeNanos);
}
/**
* Get the total time in seconds for all tasks.
*
* @see #getTotalTimeNanos()
* @see #getTotalTimeMillis()
*/
public double getTotalTimeSeconds() {
return nanosToSeconds(this.totalTimeNanos);
}
/**
* Get the number of tasks timed.
*/
public int getTaskCount() {
return this.taskCount;
}
/**
* Get an array of the data for tasks performed.
*/
public TaskInfo[] getTaskInfo() {
if (!this.keepTaskList) {
throw new UnsupportedOperationException("Task info is not being kept!");
}
return this.taskList.toArray(new TaskInfo[0]);
}
/**
* Get a short description of the total running time.
*/
public String shortSummary() {
return "StopWatch '" + getId() + "': running time = " + getTotalTimeNanos() + " ns";
}
/**
* Generate a string with a table describing all tasks performed.
* <p>
* For custom reporting, call {@link #getTaskInfo()} and use the task info
* directly.
*/
public String prettyPrint() {
StringBuilder sb = new StringBuilder(shortSummary());
sb.append('\n');
if (!this.keepTaskList) {
sb.append("No task info kept");
} else {
sb.append("---------------------------------------------\n");
sb.append("ns % Task name\n");
sb.append("---------------------------------------------\n");
NumberFormat nf = NumberFormat.getNumberInstance();
nf.setMinimumIntegerDigits(9);
nf.setGroupingUsed(false);
NumberFormat pf = NumberFormat.getPercentInstance();
pf.setMinimumIntegerDigits(3);
pf.setGroupingUsed(false);
for (TaskInfo task : getTaskInfo()) {
sb.append(nf.format(task.getTimeNanos())).append(" ");
sb.append(pf.format((double) task.getTimeNanos() / getTotalTimeNanos())).append(" ");
sb.append(task.getTaskName()).append('\n');
}
}
return sb.toString();
}
/**
* Generate an informative string describing all tasks performed
* <p>
* For custom reporting, call {@link #getTaskInfo()} and use the task info
* directly.
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder(shortSummary());
if (this.keepTaskList) {
for (TaskInfo task : getTaskInfo()) {
sb.append("; [").append(task.getTaskName()).append("] took ").append(task.getTimeNanos()).append(" ns");
long percent = Math.round(100.0 * task.getTimeNanos() / getTotalTimeNanos());
sb.append(" = ").append(percent).append('%');
}
} else {
sb.append("; no task info kept");
}
return sb.toString();
}
private static long nanosToMillis(long duration) {
return TimeUnit.NANOSECONDS.toMillis(duration);
}
private static double nanosToSeconds(long duration) {
return duration / 1_000_000_000.0;
}
/**
* Nested class to hold data about one task executed within the
* {@code StopWatch}.
*/
public static final class TaskInfo {
private final String taskName;
private final long timeNanos;
TaskInfo(String taskName, long timeNanos) {
this.taskName = taskName;
this.timeNanos = timeNanos;
}
/**
* Get the name of this task.
*/
public String getTaskName() {
return this.taskName;
}
/**
* Get the time in nanoseconds this task took.
*
* @since 5.2
* @see #getTimeMillis()
* @see #getTimeSeconds()
*/
public long getTimeNanos() {
return this.timeNanos;
}
/**
* Get the time in milliseconds this task took.
*
* @see #getTimeNanos()
* @see #getTimeSeconds()
*/
public long getTimeMillis() {
return nanosToMillis(this.timeNanos);
}
/**
* Get the time in seconds this task took.
*
* @see #getTimeMillis()
* @see #getTimeNanos()
*/
public double getTimeSeconds() {
return nanosToSeconds(this.timeNanos);
}
}
}
| 7,173 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/springframework/ComparableComparator.java | /*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.springframework;
import java.util.Comparator;
/**
* Comparator that adapts Comparables to the Comparator interface. Mainly for
* internal use in other Comparators, when supposed to work on Comparables.
*
* @author Keith Donald
* @since 1.2.2
* @param <T> the type of comparable objects that may be compared by this
* comparator
* @see Comparable
*/
class ComparableComparator<T extends Comparable<T>> implements Comparator<T> {
/**
* A shared instance of this default comparator. see Comparators#comparable()
*/
@SuppressWarnings("rawtypes")
public static final ComparableComparator INSTANCE = new ComparableComparator();
@Override
public int compare(T o1, T o2) {
return o1.compareTo(o2);
}
}
| 7,174 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/util/springframework/NullSafeComparator.java | /*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.springframework;
import org.apache.avro.reflect.Nullable;
import java.util.Comparator;
/**
* A Comparator that will safely compare nulls to be lower or higher than other
* objects. Can decorate a given Comparator or work on Comparables.
*
* @author Keith Donald
* @author Juergen Hoeller
* @since 1.2.2
* @param <T> the type of objects that may be compared by this comparator
* @see Comparable
*/
class NullSafeComparator<T> implements Comparator<T> {
/**
* A shared default instance of this comparator, treating nulls lower than
* non-null objects. see Comparators#nullsLow()
*/
@SuppressWarnings("rawtypes")
public static final NullSafeComparator NULLS_LOW = new NullSafeComparator<>(true);
/**
* A shared default instance of this comparator, treating nulls higher than
* non-null objects. see Comparators#nullsHigh()
*/
@SuppressWarnings("rawtypes")
public static final NullSafeComparator NULLS_HIGH = new NullSafeComparator<>(false);
private final Comparator<T> nonNullComparator;
private final boolean nullsLow;
/**
* Create a NullSafeComparator that sorts {@code null} based on the provided
* flag, working on Comparables.
* <p>
* When comparing two non-null objects, their Comparable implementation will be
* used: this means that non-null elements (that this Comparator will be applied
* to) need to implement Comparable.
* <p>
* As a convenience, you can use the default shared instances:
* {@code NullSafeComparator.NULLS_LOW} and
* {@code NullSafeComparator.NULLS_HIGH}.
*
* @param nullsLow whether to treat nulls lower or higher than non-null objects
* @see Comparable
* @see #NULLS_LOW
* @see #NULLS_HIGH
*/
@SuppressWarnings("unchecked")
private NullSafeComparator(boolean nullsLow) {
this.nonNullComparator = ComparableComparator.INSTANCE;
this.nullsLow = nullsLow;
}
/**
* Create a NullSafeComparator that sorts {@code null} based on the provided
* flag, decorating the given Comparator.
* <p>
* When comparing two non-null objects, the specified Comparator will be used.
* The given underlying Comparator must be able to handle the elements that this
* Comparator will be applied to.
*
* @param comparator the comparator to use when comparing two non-null objects
* @param nullsLow whether to treat nulls lower or higher than non-null
* objects
*/
public NullSafeComparator(Comparator<T> comparator, boolean nullsLow) {
// Assert.notNull(comparator, "Non-null Comparator is required");
this.nonNullComparator = comparator;
this.nullsLow = nullsLow;
}
@Override
public int compare(@Nullable T o1, @Nullable T o2) {
if (o1 == o2) {
return 0;
}
if (o1 == null) {
return (this.nullsLow ? -1 : 1);
}
if (o2 == null) {
return (this.nullsLow ? 1 : -1);
}
return this.nonNullComparator.compare(o1, o2);
}
@Override
@SuppressWarnings("unchecked")
public boolean equals(@Nullable Object other) {
if (this == other) {
return true;
}
if (!(other instanceof NullSafeComparator)) {
return false;
}
NullSafeComparator<T> otherComp = (NullSafeComparator<T>) other;
return (this.nonNullComparator.equals(otherComp.nonNullComparator) && this.nullsLow == otherComp.nullsLow);
}
@Override
public int hashCode() {
return this.nonNullComparator.hashCode() * (this.nullsLow ? -1 : 1);
}
@Override
public String toString() {
return "NullSafeComparator: non-null comparator [" + this.nonNullComparator + "]; "
+ (this.nullsLow ? "nulls low" : "nulls high");
}
}
| 7,175 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/TestZstandardCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import org.junit.jupiter.api.Test;
public class TestZstandardCodec {
@Test
void zstandardToStringAndName() throws IOException {
Codec codec = CodecFactory.zstandardCodec(3).createInstance();
assertTrue(codec instanceof ZstandardCodec);
assertEquals(codec.getName(), "zstandard");
assertEquals(codec.toString(), "zstandard[3]");
}
}
| 7,176 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/TestAllCodecs.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.stream.Stream;
public class TestAllCodecs {
@ParameterizedTest
@MethodSource("codecTypes")
void codec(String codec, Class<? extends Codec> codecClass) throws IOException {
int inputSize = 500_000;
byte[] input = generateTestData(inputSize);
Codec codecInstance = CodecFactory.fromString(codec).createInstance();
Assertions.assertTrue(codecClass.isInstance(codecInstance));
Assertions.assertTrue(codecInstance.getName().equals(codec));
ByteBuffer inputByteBuffer = ByteBuffer.wrap(input);
ByteBuffer compressedBuffer = codecInstance.compress(inputByteBuffer);
int compressedSize = compressedBuffer.remaining();
// Make sure something returned
Assertions.assertTrue(compressedSize > 0);
// While the compressed size could in many real cases
// *increase* compared to the input size, our input data
// is extremely easy to compress and all Avro's compression algorithms
// should have a compression ratio greater than 1 (except 'null').
Assertions.assertTrue(compressedSize < inputSize || codec.equals("null"));
// Decompress the data
ByteBuffer decompressedBuffer = codecInstance.decompress(compressedBuffer);
// Validate the the input and output are equal.
inputByteBuffer.rewind();
Assertions.assertEquals(inputByteBuffer, decompressedBuffer);
}
@ParameterizedTest
@MethodSource("codecTypes")
void codecSlice(String codec, Class<? extends Codec> codecClass) throws IOException {
int inputSize = 500_000;
byte[] input = generateTestData(inputSize);
Codec codecInstance = CodecFactory.fromString(codec).createInstance();
Assertions.assertTrue(codecClass.isInstance(codecInstance));
ByteBuffer partialBuffer = ByteBuffer.wrap(input);
partialBuffer.position(17);
ByteBuffer inputByteBuffer = partialBuffer.slice();
ByteBuffer compressedBuffer = codecInstance.compress(inputByteBuffer);
int compressedSize = compressedBuffer.remaining();
// Make sure something returned
Assertions.assertTrue(compressedSize > 0);
// Create a slice from the compressed buffer
ByteBuffer sliceBuffer = ByteBuffer.allocate(compressedSize + 100);
sliceBuffer.position(50);
sliceBuffer.put(compressedBuffer);
sliceBuffer.limit(compressedSize + 50);
sliceBuffer.position(50);
// Decompress the data
ByteBuffer decompressedBuffer = codecInstance.decompress(sliceBuffer.slice());
// Validate the the input and output are equal.
inputByteBuffer.rewind();
Assertions.assertEquals(inputByteBuffer, decompressedBuffer);
}
public static Stream<Arguments> codecTypes() {
return Stream.of(Arguments.of("bzip2", BZip2Codec.class), Arguments.of("zstandard", ZstandardCodec.class),
Arguments.of("null", NullCodec.class), Arguments.of("xz", XZCodec.class),
Arguments.of("snappy", SnappyCodec.class), Arguments.of("deflate", DeflateCodec.class));
}
// Generate some test data that will compress easily
public static byte[] generateTestData(int inputSize) {
byte[] arr = new byte[inputSize];
for (int i = 0; i < arr.length; i++) {
arr[i] = (byte) (65 + i % 10);
}
return arr;
}
}
| 7,177 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/TestSeekableByteArrayInput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
public class TestSeekableByteArrayInput {
private byte[] getSerializedMessage(IndexedRecord message, Schema schema) throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
SpecificDatumWriter<IndexedRecord> writer = new SpecificDatumWriter<>();
try (DataFileWriter<IndexedRecord> dfw = new DataFileWriter<>(writer).create(schema, baos)) {
dfw.append(message);
}
return baos.toByteArray();
}
private Schema getTestSchema() throws Exception {
Schema schema = Schema.createRecord("TestRecord", "this is a test record", "org.apache.avro.file", false);
List<Field> fields = new ArrayList<>();
fields.add(new Field("name", Schema.create(Type.STRING), "this is a test field"));
schema.setFields(fields);
return schema;
}
@Test
void serialization() throws Exception {
Schema testSchema = getTestSchema();
GenericRecord message = new Record(testSchema);
message.put("name", "testValue");
byte[] data = getSerializedMessage(message, testSchema);
GenericDatumReader<IndexedRecord> reader = new GenericDatumReader<>(testSchema);
final IndexedRecord result;
try (SeekableInput in = new SeekableByteArrayInput(data);
FileReader<IndexedRecord> dfr = DataFileReader.openReader(in, reader)) {
result = dfr.next();
}
assertNotNull(result);
assertTrue(result instanceof GenericRecord);
assertEquals(new Utf8("testValue"), ((GenericRecord) result).get("name"));
}
}
| 7,178 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/TestCustomCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.avro.file.codec.CustomCodec;
import org.junit.jupiter.api.Test;
public class TestCustomCodec {
@Test
void customCodec() {
CustomCodec customCodec = new CustomCodec();
Codec snappyCodec = new SnappyCodec.Option().createInstance();
assertEquals(customCodec, new CustomCodec());
assertNotEquals(customCodec, snappyCodec);
String testString = "Testing 123";
ByteBuffer original = ByteBuffer.allocate(testString.getBytes(UTF_8).length);
original.put(testString.getBytes(UTF_8));
original.rewind();
ByteBuffer decompressed = null;
try {
ByteBuffer compressed = customCodec.compress(original);
compressed.rewind();
decompressed = customCodec.decompress(compressed);
} catch (IOException e) {
e.printStackTrace();
}
assertEquals(testString, new String(decompressed.array(), UTF_8));
}
}
| 7,179 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/TestIOExceptionDuringWrite.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.io.OutputStream;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.util.RandomData;
import org.junit.jupiter.api.Test;
/*
* Tests if we not write any garbage to the end of the file after any exception occurred
*/
public class TestIOExceptionDuringWrite {
private static class FailingOutputStream extends OutputStream {
private int byteCnt;
public FailingOutputStream(int failAfter) {
byteCnt = failAfter;
}
@Override
public void write(int b) throws IOException {
if (byteCnt > 0) {
--byteCnt;
} else if (byteCnt == 0) {
--byteCnt;
throw new IOException("Artificial failure from FailingOutputStream");
} else {
fail("No bytes should have been written after IOException");
}
}
}
private static final String SCHEMA_JSON = "{\"type\": \"record\", \"name\": \"Test\", \"fields\": ["
+ "{\"name\":\"stringField\", \"type\":\"string\"}," + "{\"name\":\"longField\", \"type\":\"long\"}]}";
private static final Schema SCHEMA = new Schema.Parser().parse(SCHEMA_JSON);
@Test
void noWritingAfterException() throws IOException {
try (DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>())) {
writer.create(SCHEMA, new FailingOutputStream(100000));
int recordCnt = 0;
for (Object datum : new RandomData(SCHEMA, 100000, 42)) {
writer.append(datum);
if (++recordCnt % 17 == 0) {
writer.flush();
}
}
} catch (IOException e) {
return;
}
fail("IOException should have been thrown");
}
}
| 7,180 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/file/codec/CustomCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.file.codec;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.avro.file.Codec;
/**
* Simple Custom Codec to validate making Codec Public Compress and Decompress
* operations are just bitwise-NOT of data
*/
public class CustomCodec extends Codec {
private static final String CODECNAME = "CUSTOMCODEC";
@Override
public String getName() {
return CODECNAME;
}
@Override
public ByteBuffer compress(ByteBuffer in) throws IOException {
ByteBuffer out = ByteBuffer.allocate(in.remaining());
while (in.position() < in.capacity())
out.put((byte) ~in.get());
return out;
}
@Override
public ByteBuffer decompress(ByteBuffer in) throws IOException {
ByteBuffer out = ByteBuffer.allocate(in.remaining());
while (in.position() < in.capacity())
out.put((byte) ~in.get());
return out;
}
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other instanceof Codec) {
ByteBuffer original = ByteBuffer.allocate(getName().getBytes(UTF_8).length);
original.put(getName().getBytes(UTF_8));
original.rewind();
try {
return compareDecompress((Codec) other, original);
} catch (IOException e) {
return false;
}
} else
return false;
}
/**
* Codecs must implement an equals() method. Two codecs, A and B are equal if:
* the result of A and B decompressing content compressed by A is the same AND
* the result of A and B decompressing content compressed by B is the same
*/
private boolean compareDecompress(Codec other, ByteBuffer original) throws IOException {
ByteBuffer compressedA = this.compress(original);
original.rewind();
ByteBuffer compressedB = other.compress(original);
return this.decompress(compressedA).equals(other.decompress((ByteBuffer) compressedA.rewind()))
&& this.decompress(compressedB).equals(other.decompress((ByteBuffer) compressedB.rewind()));
}
@Override
public int hashCode() {
return getName().hashCode();
}
}
| 7,181 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestNonStringMapKeys.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.SeekableByteArrayInput;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
/**
* Test serialization and de-serialization of non-string map-keys
*/
public class TestNonStringMapKeys {
@Test
void nonStringMapKeys() throws Exception {
Company entityObj1 = buildCompany();
Company entityObj2 = buildCompany();
String testType = "NonStringKeysTest";
Company[] entityObjs = { entityObj1, entityObj2 };
byte[] bytes = testSerialization(testType, entityObj1, entityObj2);
List<GenericRecord> records = testGenericDatumRead(testType, bytes, entityObjs);
GenericRecord record = records.get(0);
Object employees = record.get("employees");
assertTrue(employees instanceof GenericArray, "Unable to read 'employees' map");
GenericArray arrayEmployees = ((GenericArray) employees);
Object employeeRecord = arrayEmployees.get(0);
assertTrue(employeeRecord instanceof GenericRecord);
Object key = ((GenericRecord) employeeRecord).get(ReflectData.NS_MAP_KEY);
Object value = ((GenericRecord) employeeRecord).get(ReflectData.NS_MAP_VALUE);
assertTrue(key instanceof GenericRecord);
assertTrue(value instanceof GenericRecord);
// Map stored: 1:foo, 2:bar
Object id = ((GenericRecord) key).get("id");
Object name = ((GenericRecord) value).get("name").toString();
assertTrue((id.equals(1) && name.equals("Foo")) || (id.equals(2) && name.equals("Bar")));
List<Company> records2 = testReflectDatumRead(testType, bytes, entityObjs);
Company co = records2.get(0);
log("Read: " + co);
assertNotNull(co.getEmployees());
assertEquals(2, co.getEmployees().size());
for (Entry<EmployeeId, EmployeeInfo> e : co.getEmployees().entrySet()) {
id = e.getKey().getId();
name = e.getValue().getName();
assertTrue((id.equals(1) && name.equals("Foo")) || (id.equals(2) && name.equals("Bar")));
}
byte[] jsonBytes = testJsonEncoder(testType, entityObj1);
assertNotNull(jsonBytes, "Unable to serialize using jsonEncoder");
GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
assertEquals(record, jsonRecord, "JSON decoder output not same as Binary Decoder");
}
@Test
void nonStringMapKeysInNestedMaps() throws Exception {
Company2 entityObj1 = buildCompany2();
String testType = "NestedMapsTest";
Company2[] entityObjs = { entityObj1 };
byte[] bytes = testSerialization(testType, entityObj1);
List<GenericRecord> records = testGenericDatumRead(testType, bytes, entityObjs);
GenericRecord record = records.get(0);
Object employees = record.get("employees");
assertTrue(employees instanceof GenericArray, "Unable to read 'employees' map");
GenericArray employeesMapArray = ((GenericArray) employees);
Object employeeMapElement = employeesMapArray.get(0);
assertTrue(employeeMapElement instanceof GenericRecord);
Object key = ((GenericRecord) employeeMapElement).get(ReflectData.NS_MAP_KEY);
Object value = ((GenericRecord) employeeMapElement).get(ReflectData.NS_MAP_VALUE);
assertEquals(11, key);
assertTrue(value instanceof GenericRecord);
GenericRecord employeeInfo = (GenericRecord) value;
Object name = employeeInfo.get("name").toString();
assertEquals("Foo", name);
Object companyMap = employeeInfo.get("companyMap");
assertTrue(companyMap instanceof GenericArray);
GenericArray companyMapArray = (GenericArray) companyMap;
Object companyMapElement = companyMapArray.get(0);
assertTrue(companyMapElement instanceof GenericRecord);
key = ((GenericRecord) companyMapElement).get(ReflectData.NS_MAP_KEY);
value = ((GenericRecord) companyMapElement).get(ReflectData.NS_MAP_VALUE);
assertEquals(14, key);
if (value instanceof Utf8)
value = ((Utf8) value).toString();
assertEquals("CompanyFoo", value);
List<Company2> records2 = testReflectDatumRead(testType, bytes, entityObjs);
Company2 co = records2.get(0);
log("Read: " + co);
assertNotNull(co.getEmployees());
assertEquals(1, co.getEmployees().size());
for (Entry<Integer, EmployeeInfo2> e : co.getEmployees().entrySet()) {
Integer id = e.getKey();
name = e.getValue().getName();
assertTrue(id.equals(11) && name.equals("Foo"));
assertEquals("CompanyFoo", e.getValue().companyMap.values().iterator().next());
}
byte[] jsonBytes = testJsonEncoder(testType, entityObj1);
assertNotNull(jsonBytes, "Unable to serialize using jsonEncoder");
GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
assertEquals(record, jsonRecord, "JSON decoder output not same as Binary Decoder");
}
@Test
void recordNameInvariance() throws Exception {
SameMapSignature entityObj1 = buildSameMapSignature();
String testType = "RecordNameInvariance";
SameMapSignature[] entityObjs = { entityObj1 };
byte[] bytes = testSerialization(testType, entityObj1);
List<GenericRecord> records = testGenericDatumRead(testType, bytes, entityObjs);
GenericRecord record = records.get(0);
Object map1obj = record.get("map1");
assertTrue(map1obj instanceof GenericArray, "Unable to read map1");
GenericArray map1array = ((GenericArray) map1obj);
Object map1element = map1array.get(0);
assertTrue(map1element instanceof GenericRecord);
Object key = ((GenericRecord) map1element).get(ReflectData.NS_MAP_KEY);
Object value = ((GenericRecord) map1element).get(ReflectData.NS_MAP_VALUE);
assertEquals(1, key);
assertEquals("Foo", value.toString());
Object map2obj = record.get("map2");
assertEquals(map1obj, map2obj);
List<SameMapSignature> records2 = testReflectDatumRead(testType, bytes, entityObjs);
SameMapSignature entity = records2.get(0);
log("Read: " + entity);
assertNotNull(entity.getMap1());
assertEquals(1, entity.getMap1().size());
for (Entry<Integer, String> e : entity.getMap1().entrySet()) {
key = e.getKey();
value = e.getValue();
assertEquals(1, key);
assertEquals("Foo", value.toString());
}
assertEquals(entity.getMap1(), entity.getMap2());
assertEquals(entity.getMap1(), entity.getMap3());
assertEquals(entity.getMap1(), entity.getMap4());
ReflectData rdata = ReflectData.get();
Schema schema = rdata.getSchema(SameMapSignature.class);
Schema map1schema = schema.getField("map1").schema().getElementType();
Schema map2schema = schema.getField("map2").schema().getElementType();
Schema map3schema = schema.getField("map3").schema().getElementType();
Schema map4schema = schema.getField("map4").schema().getElementType();
log("Schema for map1 = " + map1schema);
log("Schema for map2 = " + map2schema);
log("Schema for map3 = " + map3schema);
log("Schema for map4 = " + map4schema);
assertEquals(map1schema.getFullName(), "org.apache.avro.reflect.PairIntegerString");
assertEquals(map1schema, map2schema);
assertEquals(map1schema, map3schema);
assertEquals(map1schema, map4schema);
byte[] jsonBytes = testJsonEncoder(testType, entityObj1);
assertNotNull(jsonBytes, "Unable to serialize using jsonEncoder");
GenericRecord jsonRecord = testJsonDecoder(testType, jsonBytes, entityObj1);
assertEquals(record.get("map1"), jsonRecord.get("map1"), "JSON decoder output not same as Binary Decoder");
assertEquals(record.get("map2"), jsonRecord.get("map2"), "JSON decoder output not same as Binary Decoder");
}
/**
* Test serialization of non-string map-key POJOs
*/
public <T> byte[] testSerialization(String testType, T... entityObjs) throws Exception {
log("---- Beginning " + testType + " ----");
T entityObj1 = entityObjs[0];
ReflectData rdata = ReflectData.AllowNull.get();
Schema schema = rdata.getSchema(entityObj1.getClass());
assertNotNull(schema, "Unable to get schema for " + testType);
log(schema.toString(true));
ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter(entityObj1.getClass(), rdata);
DataFileWriter<T> fileWriter = new DataFileWriter<>(datumWriter);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
fileWriter.create(schema, baos);
for (T entityObj : entityObjs) {
fileWriter.append(entityObj);
}
fileWriter.close();
byte[] bytes = baos.toByteArray();
return bytes;
}
/**
* Test that non-string map-keys are readable through GenericDatumReader This
* method should read as array of {key, value} and not as a map
*/
private <T> List<GenericRecord> testGenericDatumRead(String testType, byte[] bytes, T... entityObjs)
throws IOException {
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>();
SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
List<GenericRecord> records = new ArrayList<>();
try (DataFileReader<GenericRecord> fileReader = new DataFileReader<>(avroInputStream, datumReader)) {
Schema schema = fileReader.getSchema();
assertNotNull(schema, "Unable to get schema for " + testType);
GenericRecord record = null;
while (fileReader.hasNext()) {
try {
records.add(fileReader.next(record));
} catch (Exception e) {
fail("Fail with schema: " + schema);
}
}
}
return records;
}
/**
* Test that non-string map-keys are readable through ReflectDatumReader This
* method should form the original map and should not return any array of {key,
* value} as done by {@link #testGenericDatumRead()}
*/
private <T> List<T> testReflectDatumRead(String testType, byte[] bytes, T... entityObjs) throws IOException {
ReflectDatumReader<T> datumReader = new ReflectDatumReader<>();
SeekableByteArrayInput avroInputStream = new SeekableByteArrayInput(bytes);
List<T> records = new ArrayList<>();
try (DataFileReader<T> fileReader = new DataFileReader<>(avroInputStream, datumReader)) {
Schema schema = fileReader.getSchema();
T record = null;
while (fileReader.hasNext()) {
records.add(fileReader.next(record));
}
}
return records;
}
private <T> byte[] testJsonEncoder(String testType, T entityObj) throws IOException {
ReflectData rdata = ReflectData.AllowNull.get();
Schema schema = rdata.getSchema(entityObj.getClass());
ByteArrayOutputStream os = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().jsonEncoder(schema, os);
ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<>(schema, rdata);
datumWriter.write(entityObj, encoder);
encoder.flush();
byte[] bytes = os.toByteArray();
System.out.println("JSON encoder output:\n" + new String(bytes, UTF_8));
return bytes;
}
private <T> GenericRecord testJsonDecoder(String testType, byte[] bytes, T entityObj) throws IOException {
ReflectData rdata = ReflectData.AllowNull.get();
Schema schema = rdata.getSchema(entityObj.getClass());
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<>(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new String(bytes, UTF_8));
GenericRecord r = datumReader.read(null, decoder);
return r;
}
/**
* Create a POJO having non-string map-keys
*/
private Company buildCompany() {
Company co = new Company();
HashMap<EmployeeId, EmployeeInfo> employees = new HashMap<>();
co.setEmployees(employees);
employees.put(new EmployeeId(1), new EmployeeInfo("Foo"));
employees.put(new EmployeeId(2), new EmployeeInfo("Bar"));
return co;
}
/**
* Create a POJO having non-string map-keys The objects inside that map should
* also have non-string map-keys
*/
private Company2 buildCompany2() {
Company2 co = new Company2();
HashMap<Integer, EmployeeInfo2> employees = new HashMap<>();
co.setEmployees(employees);
EmployeeId2 empId = new EmployeeId2(1);
EmployeeInfo2 empInfo = new EmployeeInfo2("Foo");
HashMap<Integer, String> companyMap = new HashMap<>();
empInfo.setCompanyMap(companyMap);
companyMap.put(14, "CompanyFoo");
employees.put(11, empInfo);
return co;
}
private SameMapSignature buildSameMapSignature() {
SameMapSignature obj = new SameMapSignature();
obj.setMap1(new HashMap<>());
obj.getMap1().put(1, "Foo");
obj.setMap2(new ConcurrentHashMap<>());
obj.getMap2().put(1, "Foo");
obj.setMap3(new LinkedHashMap<>());
obj.getMap3().put(1, "Foo");
obj.setMap4(new TreeMap<>());
obj.getMap4().put(1, "Foo");
return obj;
}
private void log(String msg) {
System.out.println(msg);
}
}
class Company {
HashMap<EmployeeId, EmployeeInfo> employees;
public HashMap<EmployeeId, EmployeeInfo> getEmployees() {
return employees;
}
public void setEmployees(HashMap<EmployeeId, EmployeeInfo> employees) {
this.employees = employees;
}
@Override
public String toString() {
return "Company [employees=" + employees + "]";
}
}
class EmployeeId {
Integer id;
public EmployeeId() {
}
public EmployeeId(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer zip) {
this.id = zip;
}
@Override
public String toString() {
return "EmployeeId [id=" + id + "]";
}
}
class EmployeeInfo {
String name;
public EmployeeInfo() {
}
public EmployeeInfo(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "EmployeeInfo [name=" + name + "]";
}
}
class Company2 {
HashMap<Integer, EmployeeInfo2> employees;
public Company2() {
}
public HashMap<Integer, EmployeeInfo2> getEmployees() {
return employees;
}
public void setEmployees(HashMap<Integer, EmployeeInfo2> employees) {
this.employees = employees;
}
@Override
public String toString() {
return "Company2 [employees=" + employees + "]";
}
}
class EmployeeId2 {
Integer id;
public EmployeeId2() {
}
public EmployeeId2(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public void setId(Integer zip) {
this.id = zip;
}
@Override
public String toString() {
return "EmployeeId2 [id=" + id + "]";
}
}
class EmployeeInfo2 {
String name;
HashMap<Integer, String> companyMap;
public EmployeeInfo2() {
}
public EmployeeInfo2(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public HashMap<Integer, String> getCompanyMap() {
return companyMap;
}
public void setCompanyMap(HashMap<Integer, String> companyMap) {
this.companyMap = companyMap;
}
@Override
public String toString() {
return "EmployeeInfo2 [name=" + name + "]";
}
}
class SameMapSignature {
HashMap<Integer, String> map1;
ConcurrentHashMap<Integer, String> map2;
LinkedHashMap<Integer, String> map3;
TreeMap<Integer, String> map4;
public Map<Integer, String> getMap1() {
return map1;
}
public void setMap1(HashMap<Integer, String> map1) {
this.map1 = map1;
}
public Map<Integer, String> getMap2() {
return map2;
}
public void setMap2(ConcurrentHashMap<Integer, String> map2) {
this.map2 = map2;
}
public Map<Integer, String> getMap3() {
return map3;
}
public void setMap3(LinkedHashMap<Integer, String> map3) {
this.map3 = map3;
}
public Map<Integer, String> getMap4() {
return map4;
}
public void setMap4(TreeMap<Integer, String> map4) {
this.map4 = map4;
}
}
| 7,182 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.Map;
import java.util.Optional;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
public class TestReflectDatumReader {
private static <T> byte[] serializeWithReflectDatumWriter(T toSerialize, Class<T> toSerializeClass)
throws IOException {
ReflectDatumWriter<T> datumWriter = new ReflectDatumWriter<>(toSerializeClass);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(toSerialize, encoder);
encoder.flush();
return byteArrayOutputStream.toByteArray();
}
@Test
void read_PojoWithList() throws IOException {
PojoWithList pojoWithList = new PojoWithList();
pojoWithList.setId(42);
pojoWithList.setRelatedIds(Arrays.asList(1, 2, 3));
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithList, PojoWithList.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithList> reflectDatumReader = new ReflectDatumReader<>(PojoWithList.class);
PojoWithList deserialized = new PojoWithList();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithList, deserialized);
}
@Test
void read_PojoWithArray() throws IOException {
PojoWithArray pojoWithArray = new PojoWithArray();
pojoWithArray.setId(42);
pojoWithArray.setRelatedIds(new int[] { 1, 2, 3 });
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithArray, PojoWithArray.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithArray> reflectDatumReader = new ReflectDatumReader<>(PojoWithArray.class);
PojoWithArray deserialized = new PojoWithArray();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithArray, deserialized);
}
@Test
public void testRead_PojoWithSet() throws IOException {
PojoWithSet pojoWithSet = new PojoWithSet();
pojoWithSet.setId(42);
Set<Integer> relatedIds = new HashSet<>();
relatedIds.add(1);
relatedIds.add(2);
relatedIds.add(3);
pojoWithSet.setRelatedIds(relatedIds);
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithSet, PojoWithSet.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithSet> reflectDatumReader = new ReflectDatumReader<>(PojoWithSet.class);
PojoWithSet deserialized = new PojoWithSet();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithSet, deserialized);
}
@Test
public void testRead_PojoWithMap() throws IOException {
PojoWithMap pojoWithMap = new PojoWithMap();
pojoWithMap.setId(42);
Map<Integer, Integer> relatedIds = new HashMap<>();
relatedIds.put(1, 11);
relatedIds.put(2, 22);
relatedIds.put(3, 33);
pojoWithMap.setRelatedIds(relatedIds);
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithMap, PojoWithMap.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithMap> reflectDatumReader = new ReflectDatumReader<>(PojoWithMap.class);
PojoWithMap deserialized = new PojoWithMap();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithMap, deserialized);
}
@Test
public void testRead_PojoWithOptional() throws IOException {
PojoWithOptional pojoWithOptional = new PojoWithOptional();
pojoWithOptional.setId(42);
pojoWithOptional.setRelatedId(Optional.of(13));
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithOptional, PojoWithOptional.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithOptional> reflectDatumReader = new ReflectDatumReader<>(PojoWithOptional.class);
PojoWithOptional deserialized = new PojoWithOptional();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithOptional, deserialized);
}
@Test
public void testRead_PojoWithEmptyOptional() throws IOException {
PojoWithOptional pojoWithOptional = new PojoWithOptional();
pojoWithOptional.setId(42);
pojoWithOptional.setRelatedId(Optional.empty());
byte[] serializedBytes = serializeWithReflectDatumWriter(pojoWithOptional, PojoWithOptional.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectDatumReader<PojoWithOptional> reflectDatumReader = new ReflectDatumReader<>(PojoWithOptional.class);
PojoWithOptional deserialized = new PojoWithOptional();
reflectDatumReader.read(deserialized, decoder);
assertEquals(pojoWithOptional, deserialized);
}
@Test
public void testRead_PojoWithNullableAnnotation() throws IOException {
PojoWithBasicTypeNullableAnnotationV1 v1Pojo = new PojoWithBasicTypeNullableAnnotationV1();
int idValue = 1;
v1Pojo.setId(idValue);
byte[] serializedBytes = serializeWithReflectDatumWriter(v1Pojo, PojoWithBasicTypeNullableAnnotationV1.class);
Decoder decoder = DecoderFactory.get().binaryDecoder(serializedBytes, null);
ReflectData reflectData = ReflectData.get();
Schema schemaV1 = reflectData.getSchema(PojoWithBasicTypeNullableAnnotationV1.class);
Schema schemaV2 = reflectData.getSchema(PojoWithBasicTypeNullableAnnotationV2.class);
ReflectDatumReader<PojoWithBasicTypeNullableAnnotationV2> reflectDatumReader = new ReflectDatumReader<>(schemaV1,
schemaV2);
PojoWithBasicTypeNullableAnnotationV2 v2Pojo = new PojoWithBasicTypeNullableAnnotationV2();
reflectDatumReader.read(v2Pojo, decoder);
assertEquals(v1Pojo.id, v2Pojo.id);
assertEquals(v2Pojo.id, idValue);
assertEquals(v2Pojo.intId, FieldAccess.INT_DEFAULT_VALUE);
assertEquals(v2Pojo.floatId, FieldAccess.FLOAT_DEFAULT_VALUE);
assertEquals(v2Pojo.shortId, FieldAccess.SHORT_DEFAULT_VALUE);
assertEquals(v2Pojo.byteId, FieldAccess.BYTE_DEFAULT_VALUE);
assertEquals(v2Pojo.booleanId, FieldAccess.BOOLEAN_DEFAULT_VALUE);
assertEquals(v2Pojo.charId, FieldAccess.CHAR_DEFAULT_VALUE);
assertEquals(v2Pojo.longId, FieldAccess.LONG_DEFAULT_VALUE);
assertEquals(v2Pojo.doubleId, FieldAccess.DOUBLE_DEFAULT_VALUE);
}
public static class PojoWithList {
private int id;
private List<Integer> relatedIds;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public List<Integer> getRelatedIds() {
return relatedIds;
}
public void setRelatedIds(List<Integer> relatedIds) {
this.relatedIds = relatedIds;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + ((relatedIds == null) ? 0 : relatedIds.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithList other = (PojoWithList) obj;
if (id != other.id)
return false;
if (relatedIds == null) {
return other.relatedIds == null;
} else
return relatedIds.equals(other.relatedIds);
}
}
public static class PojoWithArray {
private int id;
private int[] relatedIds;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int[] getRelatedIds() {
return relatedIds;
}
public void setRelatedIds(int[] relatedIds) {
this.relatedIds = relatedIds;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + Arrays.hashCode(relatedIds);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithArray other = (PojoWithArray) obj;
if (id != other.id)
return false;
return Arrays.equals(relatedIds, other.relatedIds);
}
}
public static class PojoWithSet {
private int id;
private Set<Integer> relatedIds;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Set<Integer> getRelatedIds() {
return relatedIds;
}
public void setRelatedIds(Set<Integer> relatedIds) {
this.relatedIds = relatedIds;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + ((relatedIds == null) ? 0 : relatedIds.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithSet other = (PojoWithSet) obj;
if (id != other.id)
return false;
if (relatedIds == null) {
return other.relatedIds == null;
} else
return relatedIds.equals(other.relatedIds);
}
}
public static class PojoWithMap {
private int id;
private Map<Integer, Integer> relatedIds;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Map<Integer, Integer> getRelatedIds() {
return relatedIds;
}
public void setRelatedIds(Map<Integer, Integer> relatedIds) {
this.relatedIds = relatedIds;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + ((relatedIds == null) ? 0 : relatedIds.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithMap other = (PojoWithMap) obj;
if (id != other.id)
return false;
if (relatedIds == null) {
return other.relatedIds == null;
} else
return relatedIds.equals(other.relatedIds);
}
}
public static class PojoWithOptional {
private int id;
private Optional<Integer> relatedId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Optional<Integer> getRelatedId() {
return relatedId;
}
public void setRelatedId(Optional<Integer> relatedId) {
this.relatedId = relatedId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
result = prime * result + ((relatedId == null) ? 0 : relatedId.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithOptional other = (PojoWithOptional) obj;
if (id != other.id)
return false;
if (relatedId == null) {
return other.relatedId == null;
} else
return relatedId.equals(other.relatedId);
}
}
public static class PojoWithBasicTypeNullableAnnotationV1 {
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PojoWithBasicTypeNullableAnnotationV1 other = (PojoWithBasicTypeNullableAnnotationV1) obj;
return id == other.id;
}
}
public static class PojoWithBasicTypeNullableAnnotationV2 {
private int id;
@Nullable
private int intId;
@Nullable
private float floatId;
@Nullable
private short shortId;
@Nullable
private byte byteId;
@Nullable
private boolean booleanId;
@Nullable
private char charId;
@Nullable
private long longId;
@Nullable
private double doubleId;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getIntId() {
return intId;
}
public void setIntId(int intId) {
this.intId = intId;
}
public float getFloatId() {
return floatId;
}
public void setFloatId(float floatId) {
this.floatId = floatId;
}
public short getShortId() {
return shortId;
}
public void setShortId(short shortId) {
this.shortId = shortId;
}
public byte getByteId() {
return byteId;
}
public void setByteId(byte byteId) {
this.byteId = byteId;
}
public boolean isBooleanId() {
return booleanId;
}
public void setBooleanId(boolean booleanId) {
this.booleanId = booleanId;
}
public char getCharId() {
return charId;
}
public void setCharId(char charId) {
this.charId = charId;
}
public long getLongId() {
return longId;
}
public void setLongId(long longId) {
this.longId = longId;
}
public double getDoubleId() {
return doubleId;
}
public void setDoubleId(double doubleId) {
this.doubleId = doubleId;
}
@Override
public int hashCode() {
final int prime = 31;
long temp;
int result = 1;
result = prime * result + id;
result = prime * result + intId;
result = prime * result + (floatId != 0.0f ? Float.floatToIntBits(floatId) : 0);
result = prime * result + (int) shortId;
result = prime * result + (int) byteId;
result = prime * result + (booleanId ? 1 : 0);
result = prime * result + (int) charId;
result = prime * result + (int) (longId ^ (longId >>> 32));
temp = Double.doubleToLongBits(doubleId);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
PojoWithBasicTypeNullableAnnotationV2 that = (PojoWithBasicTypeNullableAnnotationV2) o;
if (id != that.id)
return false;
if (intId != that.intId)
return false;
if (Float.compare(that.floatId, floatId) != 0)
return false;
if (shortId != that.shortId)
return false;
if (byteId != that.byteId)
return false;
if (booleanId != that.booleanId)
return false;
if (charId != that.charId)
return false;
if (longId != that.longId)
return false;
return Double.compare(that.doubleId, doubleId) == 0;
}
}
}
| 7,183 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflect.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.AvroTypeException;
import org.apache.avro.JsonProperties;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.reflect.TestReflect.SampleRecord.AnotherSampleRecord;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
public class TestReflect {
EncoderFactory factory = new EncoderFactory();
// test primitive type inference
@Test
void testVoid() {
check(Void.TYPE, "\"null\"");
check(Void.class, "\"null\"");
}
@Test
void testBoolean() {
check(Boolean.TYPE, "\"boolean\"");
check(Boolean.class, "\"boolean\"");
}
@Test
void testInt() {
check(Integer.TYPE, "\"int\"");
check(Integer.class, "\"int\"");
}
@Test
void testByte() {
check(Byte.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Byte\"}");
check(Byte.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Byte\"}");
}
@Test
void testShort() {
check(Short.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Short\"}");
check(Short.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Short\"}");
}
@Test
void testChar() {
check(Character.TYPE, "{\"type\":\"int\",\"java-class\":\"java.lang.Character\"}");
check(Character.class, "{\"type\":\"int\",\"java-class\":\"java.lang.Character\"}");
}
@Test
void testLong() {
check(Long.TYPE, "\"long\"");
check(Long.class, "\"long\"");
}
@Test
void testFloat() {
check(Float.TYPE, "\"float\"");
check(Float.class, "\"float\"");
}
@Test
void testDouble() {
check(Double.TYPE, "\"double\"");
check(Double.class, "\"double\"");
}
@Test
void string() {
check("Foo", "\"string\"");
}
@Test
void bytes() {
check(ByteBuffer.allocate(0), "\"bytes\"");
check(new byte[0], "{\"type\":\"bytes\",\"java-class\":\"[B\"}");
}
@Test
void unionWithCollection() {
Schema s = new Schema.Parser().parse("[\"null\", {\"type\":\"array\",\"items\":\"float\"}]");
GenericData data = ReflectData.get();
assertEquals(1, data.resolveUnion(s, new ArrayList<Float>()));
}
@Test
void unionWithMap() {
Schema s = new Schema.Parser().parse("[\"null\", {\"type\":\"map\",\"values\":\"float\"}]");
GenericData data = ReflectData.get();
assertEquals(1, data.resolveUnion(s, new HashMap<String, Float>()));
}
@Test
void unionWithMapWithUtf8Keys() {
Schema s = new Schema.Parser().parse("[\"null\", {\"type\":\"map\",\"values\":\"float\"}]");
GenericData data = ReflectData.get();
HashMap<Utf8, Float> map = new HashMap<>();
map.put(new Utf8("foo"), 1.0f);
assertEquals(1, data.resolveUnion(s, map));
}
@Test
void unionWithFixed() {
Schema s = new Schema.Parser().parse("[\"null\", {\"type\":\"fixed\",\"name\":\"f\",\"size\":1}]");
Schema f = new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"f\",\"size\":1}");
GenericData data = ReflectData.get();
assertEquals(1, data.resolveUnion(s, new GenericData.Fixed(f)));
}
@Test
void unionWithEnum() {
Schema s = new Schema.Parser().parse("[\"null\", {\"type\":\"enum\",\"name\":\"E\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"symbols\":[\"A\",\"B\"]}]");
GenericData data = ReflectData.get();
assertEquals(1, data.resolveUnion(s, E.A));
}
@Test
void unionWithBytes() {
Schema s = new Schema.Parser().parse("[\"null\", \"bytes\"]");
GenericData data = ReflectData.get();
assertEquals(1, data.resolveUnion(s, ByteBuffer.wrap(new byte[] { 1 })));
}
// test map, array and list type inference
public static class R1 {
private Map<String, String> mapField = new HashMap<>();
private String[] arrayField = new String[] { "foo" };
private List<String> listField = new ArrayList<>();
{
mapField.put("foo", "bar");
listField.add("foo");
}
@Override
public boolean equals(Object o) {
if (!(o instanceof R1))
return false;
R1 that = (R1) o;
return mapField.equals(that.mapField) && Arrays.equals(this.arrayField, that.arrayField)
&& listField.equals(that.listField);
}
}
@Test
void map() throws Exception {
check(R1.class.getDeclaredField("mapField").getGenericType(), "{\"type\":\"map\",\"values\":\"string\"}");
}
@Test
void array() throws Exception {
check(R1.class.getDeclaredField("arrayField").getGenericType(),
"{\"type\":\"array\",\"items\":\"string\",\"java-class\":\"[Ljava.lang.String;\"}");
}
@Test
void list() throws Exception {
check(R1.class.getDeclaredField("listField").getGenericType(),
"{\"type\":\"array\",\"items\":\"string\"" + ",\"java-class\":\"java.util.List\"}");
}
@Test
void r1() throws Exception {
checkReadWrite(new R1());
}
// test record, array and list i/o
public static class R2 {
private String[] arrayField;
private Collection<String> collectionField;
@Override
public boolean equals(Object o) {
if (!(o instanceof R2))
return false;
R2 that = (R2) o;
return Arrays.equals(this.arrayField, that.arrayField) && collectionField.equals(that.collectionField);
}
}
@Test
void r2() throws Exception {
R2 r2 = new R2();
r2.arrayField = new String[] { "foo" };
r2.collectionField = new ArrayList<>();
r2.collectionField.add("foo");
checkReadWrite(r2);
}
// test array i/o of unboxed type
public static class R3 {
private int[] intArray;
@Override
public boolean equals(Object o) {
if (!(o instanceof R3))
return false;
R3 that = (R3) o;
return Arrays.equals(this.intArray, that.intArray);
}
}
@Test
void r3() throws Exception {
R3 r3 = new R3();
r3.intArray = new int[] { 1 };
checkReadWrite(r3);
}
// test inherited fields & short datatype
public static class R4 {
public short value;
public short[] shorts;
public byte b;
public char c;
@Override
public boolean equals(Object o) {
if (!(o instanceof R4))
return false;
R4 that = (R4) o;
return this.value == that.value && Arrays.equals(this.shorts, that.shorts) && this.b == that.b
&& this.c == that.c;
}
}
public static class R5 extends R4 {
}
@Test
void r5() throws Exception {
R5 r5 = new R5();
r5.value = 1;
r5.shorts = new short[] { 3, 255, 256, Short.MAX_VALUE, Short.MIN_VALUE };
r5.b = 99;
r5.c = 'a';
checkReadWrite(r5);
}
// test union annotation on a class
@Union({ R7.class, R8.class })
public static class R6 {
}
public static class R7 extends R6 {
public int value;
@Override
public boolean equals(Object o) {
if (!(o instanceof R7))
return false;
return this.value == ((R7) o).value;
}
}
public static class R8 extends R6 {
public float value;
@Override
public boolean equals(Object o) {
if (!(o instanceof R8))
return false;
return this.value == ((R8) o).value;
}
}
// test arrays of union annotated class
public static class R9 {
public R6[] r6s;
@Override
public boolean equals(Object o) {
if (!(o instanceof R9))
return false;
return Arrays.equals(this.r6s, ((R9) o).r6s);
}
}
@Test
void r6() throws Exception {
R7 r7 = new R7();
r7.value = 1;
checkReadWrite(r7, ReflectData.get().getSchema(R6.class));
R8 r8 = new R8();
r8.value = 1;
checkReadWrite(r8, ReflectData.get().getSchema(R6.class));
R9 r9 = new R9();
r9.r6s = new R6[] { r7, r8 };
checkReadWrite(r9, ReflectData.get().getSchema(R9.class));
}
// test union in fields
public static class R9_1 {
@Union({ Void.class, R7.class, R8.class })
public Object value;
@Override
public boolean equals(Object o) {
if (!(o instanceof R9_1))
return false;
if (this.value == null)
return ((R9_1) o).value == null;
return this.value.equals(((R9_1) o).value);
}
}
@Test
void r6_1() throws Exception {
R7 r7 = new R7();
r7.value = 1;
checkReadWrite(r7, ReflectData.get().getSchema(R6.class));
R8 r8 = new R8();
r8.value = 1;
checkReadWrite(r8, ReflectData.get().getSchema(R6.class));
R9_1 r9_1 = new R9_1();
r9_1.value = null;
checkReadWrite(r9_1, ReflectData.get().getSchema(R9_1.class));
r9_1.value = r7;
checkReadWrite(r9_1, ReflectData.get().getSchema(R9_1.class));
r9_1.value = r8;
checkReadWrite(r9_1, ReflectData.get().getSchema(R9_1.class));
}
// test union annotation on methods and parameters
public static interface P0 {
@Union({ Void.class, String.class })
String foo(@Union({ Void.class, String.class }) String s);
}
@Test
void p0() throws Exception {
Protocol p0 = ReflectData.get().getProtocol(P0.class);
Protocol.Message message = p0.getMessages().get("foo");
// check response schema is union
Schema response = message.getResponse();
assertEquals(Schema.Type.UNION, response.getType());
assertEquals(Schema.Type.NULL, response.getTypes().get(0).getType());
assertEquals(Schema.Type.STRING, response.getTypes().get(1).getType());
// check request schema is union
Schema request = message.getRequest();
Field field = request.getField("s");
assertNotNull(field, "field 's' should not be null");
Schema param = field.schema();
assertEquals(Schema.Type.UNION, param.getType());
assertEquals(Schema.Type.NULL, param.getTypes().get(0).getType());
assertEquals(Schema.Type.STRING, param.getTypes().get(1).getType());
// check union erasure
assertEquals(String.class, ReflectData.get().getClass(response));
assertEquals(String.class, ReflectData.get().getClass(param));
}
// test Stringable annotation
@Stringable
public static class R10 {
private String text;
public R10(String text) {
this.text = text;
}
@Override
public String toString() {
return text;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof R10))
return false;
return this.text.equals(((R10) o).text);
}
}
@Test
void r10() throws Exception {
Schema r10Schema = ReflectData.get().getSchema(R10.class);
assertEquals(Schema.Type.STRING, r10Schema.getType());
assertEquals(R10.class.getName(), r10Schema.getProp("java-class"));
checkReadWrite(new R10("foo"), r10Schema);
}
// test Nullable annotation on field
public static class R11 {
@Nullable
private String text;
@Override
public boolean equals(Object o) {
if (!(o instanceof R11))
return false;
R11 that = (R11) o;
if (this.text == null)
return that.text == null;
return this.text.equals(that.text);
}
}
@Test
void r11() throws Exception {
Schema r11Record = ReflectData.get().getSchema(R11.class);
assertEquals(Schema.Type.RECORD, r11Record.getType());
Field r11Field = r11Record.getField("text");
assertEquals(JsonProperties.NULL_VALUE, r11Field.defaultVal());
Schema r11FieldSchema = r11Field.schema();
assertEquals(Schema.Type.UNION, r11FieldSchema.getType());
assertEquals(Schema.Type.NULL, r11FieldSchema.getTypes().get(0).getType());
Schema r11String = r11FieldSchema.getTypes().get(1);
assertEquals(Schema.Type.STRING, r11String.getType());
R11 r11 = new R11();
checkReadWrite(r11, r11Record);
r11.text = "foo";
checkReadWrite(r11, r11Record);
}
// test nullable annotation on methods and parameters
public static interface P1 {
@Nullable
String foo(@Nullable String s);
}
@Test
void p1() throws Exception {
Protocol p1 = ReflectData.get().getProtocol(P1.class);
Protocol.Message message = p1.getMessages().get("foo");
// check response schema is union
Schema response = message.getResponse();
assertEquals(Schema.Type.UNION, response.getType());
assertEquals(Schema.Type.NULL, response.getTypes().get(0).getType());
assertEquals(Schema.Type.STRING, response.getTypes().get(1).getType());
// check request schema is union
Schema request = message.getRequest();
Field field = request.getField("s");
assertNotNull(field, "field 's' should not be null");
Schema param = field.schema();
assertEquals(Schema.Type.UNION, param.getType());
assertEquals(Schema.Type.NULL, param.getTypes().get(0).getType());
assertEquals(Schema.Type.STRING, param.getTypes().get(1).getType());
// check union erasure
assertEquals(String.class, ReflectData.get().getClass(response));
assertEquals(String.class, ReflectData.get().getClass(param));
}
// test AvroSchema annotation
public static class R12 { // fields
@AvroSchema("\"int\"")
Object x;
@AvroSchema("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}")
List<String> strings;
}
@Test
void r12() throws Exception {
Schema s = ReflectData.get().getSchema(R12.class);
assertEquals(Schema.Type.INT, s.getField("x").schema().getType());
assertEquals(new Schema.Parser().parse("{\"type\":\"array\",\"items\":[\"null\",\"string\"]}"),
s.getField("strings").schema());
}
@AvroSchema("\"null\"") // record
public static class R13 {
}
@Test
void r13() throws Exception {
Schema s = ReflectData.get().getSchema(R13.class);
assertEquals(Schema.Type.NULL, s.getType());
}
public interface P4 {
@AvroSchema("\"int\"") // message value
Object foo(@AvroSchema("\"int\"") Object x); // message param
}
@Test
void p4() throws Exception {
Protocol p = ReflectData.get().getProtocol(P4.class);
Protocol.Message message = p.getMessages().get("foo");
assertEquals(Schema.Type.INT, message.getResponse().getType());
Field field = message.getRequest().getField("x");
assertEquals(Schema.Type.INT, field.schema().getType());
}
// test error
@SuppressWarnings("serial")
public static class E1 extends Exception {
}
public static interface P2 {
void error() throws E1;
}
private static class NullableDefaultTest {
@Nullable
@AvroDefault("1")
int foo;
}
@Test
public void testAvroNullableDefault() {
check(NullableDefaultTest.class,
"{\"type\":\"record\",\"name\":\"NullableDefaultTest\","
+ "\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"foo\",\"type\":[\"null\",\"int\"],\"default\":1}]}");
}
private static class UnionDefaultTest {
@Union({ Integer.class, String.class })
@AvroDefault("1")
Object foo;
}
@Test
public void testAvroUnionDefault() {
check(UnionDefaultTest.class,
"{\"type\":\"record\",\"name\":\"UnionDefaultTest\","
+ "\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"foo\",\"type\":[\"int\",\"string\"],\"default\":1}]}");
}
@Test
void p2() throws Exception {
Schema e1 = ReflectData.get().getSchema(E1.class);
assertEquals(Schema.Type.RECORD, e1.getType());
assertTrue(e1.isError());
Field message = e1.getField("detailMessage");
assertNotNull(message, "field 'detailMessage' should not be null");
Schema messageSchema = message.schema();
assertEquals(Schema.Type.UNION, messageSchema.getType());
assertEquals(Schema.Type.NULL, messageSchema.getTypes().get(0).getType());
assertEquals(Schema.Type.STRING, messageSchema.getTypes().get(1).getType());
Protocol p2 = ReflectData.get().getProtocol(P2.class);
Protocol.Message m = p2.getMessages().get("error");
// check error schema is union
Schema response = m.getErrors();
assertEquals(Schema.Type.UNION, response.getType());
assertEquals(Schema.Type.STRING, response.getTypes().get(0).getType());
assertEquals(e1, response.getTypes().get(1));
}
@Test
void noPackage() throws Exception {
Class<?> noPackage = Class.forName("NoPackage");
Schema s = ReflectData.get().getSchema(noPackage);
assertEquals(noPackage.getName(), ReflectData.getClassName(s));
}
void checkReadWrite(Object object) throws Exception {
checkReadWrite(object, ReflectData.get().getSchema(object.getClass()));
}
void checkReadWrite(Object object, Schema s) throws Exception {
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<>(s);
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(object, factory.directBinaryEncoder(out, null));
ReflectDatumReader<Object> reader = new ReflectDatumReader<>(s);
Object after = reader.read(null, DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertEquals(object, after);
// check reflective setField works for records
if (s.getType().equals(Schema.Type.RECORD)) {
Object copy = object.getClass().getDeclaredConstructor().newInstance();
for (Field f : s.getFields()) {
Object val = ReflectData.get().getField(object, f.name(), f.pos());
ReflectData.get().setField(copy, f.name(), f.pos(), val);
}
assertEquals(object, copy, "setField");
}
}
public static enum E {
A, B
}
@Test
void testEnum() throws Exception {
check(E.class, "{\"type\":\"enum\",\"name\":\"E\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"symbols\":[\"A\",\"B\"]}");
}
public static class R {
int a;
long b;
}
@Test
void record() throws Exception {
check(R.class,
"{\"type\":\"record\",\"name\":\"R\",\"namespace\":" + "\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"a\",\"type\":\"int\"}," + "{\"name\":\"b\",\"type\":\"long\"}]}");
}
public static class RAvroIgnore {
@AvroIgnore
int a;
}
@Test
void annotationAvroIgnore() throws Exception {
check(RAvroIgnore.class, "{\"type\":\"record\",\"name\":\"RAvroIgnore\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":[]}");
}
@AvroMeta(key = "X", value = "Y")
public static class RAvroMeta {
@AvroMeta(key = "K", value = "V")
int a;
}
@Test
void annotationAvroMeta() throws Exception {
check(RAvroMeta.class,
"{\"type\":\"record\",\"name\":\"RAvroMeta\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":[" + "{\"name\":\"a\",\"type\":\"int\",\"K\":\"V\"}]"
+ ",\"X\":\"Y\"}");
}
@AvroMeta(key = "X", value = "Y")
@AvroMeta(key = "A", value = "B")
public static class RAvroMultiMeta {
@AvroMeta(key = "K", value = "V")
@AvroMeta(key = "L", value = "W")
int a;
}
@Test
void annotationMultiAvroMeta() {
check(RAvroMultiMeta.class,
"{\"type\":\"record\",\"name\":\"RAvroMultiMeta\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"a\",\"type\":\"int\",\"K\":\"V\",\"L\":\"W\"}]" + ",\"X\":\"Y\",\"A\":\"B\"}");
}
public static class RAvroDuplicateFieldMeta {
@AvroMeta(key = "K", value = "V")
@AvroMeta(key = "K", value = "W")
int a;
}
@Test
void annotationDuplicateFieldAvroMeta() {
assertThrows(AvroTypeException.class, () -> {
ReflectData.get().getSchema(RAvroDuplicateFieldMeta.class);
});
}
@AvroMeta(key = "K", value = "V")
@AvroMeta(key = "K", value = "W")
public static class RAvroDuplicateTypeMeta {
int a;
}
@Test
void annotationDuplicateTypeAvroMeta() {
assertThrows(AvroTypeException.class, () -> {
ReflectData.get().getSchema(RAvroDuplicateTypeMeta.class);
});
}
public static class RAvroName {
@AvroName("b")
int a;
}
@Test
void annotationAvroName() throws Exception {
check(RAvroName.class, "{\"type\":\"record\",\"name\":\"RAvroName\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":[" + "{\"name\":\"b\",\"type\":\"int\"}]}");
}
public static class RAvroNameCollide {
@AvroName("b")
int a;
int b;
}
@Test
void annotationAvroNameCollide() throws Exception {
assertThrows(Exception.class, () -> {
check(RAvroNameCollide.class,
"{\"type\":\"record\",\"name\":\"RAvroNameCollide\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":[" + "{\"name\":\"b\",\"type\":\"int\"},"
+ "{\"name\":\"b\",\"type\":\"int\"}]}");
});
}
public static class RAvroStringableField {
@Stringable
int a;
}
@Test
void annotationAvroStringableFields() throws Exception {
check(RAvroStringableField.class, "{\"type\":\"record\",\"name\":\"RAvroStringableField\",\"namespace\":"
+ "\"org.apache.avro.reflect.TestReflect\",\"fields\":[" + "{\"name\":\"a\",\"type\":\"string\"}]}");
}
private void check(Object o, String schemaJson) {
check(o.getClass(), schemaJson);
}
private void check(java.lang.reflect.Type type, String schemaJson) {
assertEquals(schemaJson, ReflectData.get().getSchema(type).toString());
}
@Test
void recordIO() throws IOException {
Schema schm = ReflectData.get().getSchema(SampleRecord.class);
ReflectDatumWriter<SampleRecord> writer = new ReflectDatumWriter<>(schm);
ByteArrayOutputStream out = new ByteArrayOutputStream();
SampleRecord record = new SampleRecord();
record.x = 5;
record.y = 10;
writer.write(record, factory.directBinaryEncoder(out, null));
ReflectDatumReader<SampleRecord> reader = new ReflectDatumReader<>(schm);
SampleRecord decoded = reader.read(null, DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertEquals(record, decoded);
}
public static class AvroEncRecord {
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date date;
@Override
public boolean equals(Object o) {
if (!(o instanceof AvroEncRecord))
return false;
return date.equals(((AvroEncRecord) o).date);
}
}
public static class multipleAnnotationRecord {
@AvroIgnore
@Stringable
Integer i1;
@AvroIgnore
@Nullable
Integer i2;
@AvroIgnore
@AvroName("j")
Integer i3;
@AvroIgnore
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date i4;
@Stringable
@Nullable
Integer i5;
@Stringable
@AvroName("j6")
Integer i6 = 6;
@Stringable
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date i7 = new java.util.Date(7L);
@Nullable
@AvroName("j8")
Integer i8;
@Nullable
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date i9;
@AvroName("j10")
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date i10 = new java.util.Date(10L);
@Stringable
@Nullable
@AvroName("j11")
@AvroEncode(using = DateAsLongEncoding.class)
java.util.Date i11;
}
@Test
void multipleAnnotations() throws IOException {
Schema schm = ReflectData.get().getSchema(multipleAnnotationRecord.class);
ReflectDatumWriter<multipleAnnotationRecord> writer = new ReflectDatumWriter<>(schm);
ByteArrayOutputStream out = new ByteArrayOutputStream();
multipleAnnotationRecord record = new multipleAnnotationRecord();
record.i1 = 1;
record.i2 = 2;
record.i3 = 3;
record.i4 = new java.util.Date(4L);
record.i5 = 5;
record.i6 = 6;
record.i7 = new java.util.Date(7L);
record.i8 = 8;
record.i9 = new java.util.Date(9L);
record.i10 = new java.util.Date(10L);
record.i11 = new java.util.Date(11L);
writer.write(record, factory.directBinaryEncoder(out, null));
ReflectDatumReader<multipleAnnotationRecord> reader = new ReflectDatumReader<>(schm);
multipleAnnotationRecord decoded = reader.read(new multipleAnnotationRecord(),
DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertNull(decoded.i1);
assertNull(decoded.i2);
assertNull(decoded.i3);
assertNull(decoded.i4);
assertEquals(decoded.i5, 5);
assertEquals(decoded.i6, 6);
assertEquals(decoded.i7.getTime(), 7);
assertEquals(decoded.i8, 8);
assertEquals(decoded.i9.getTime(), 9);
assertEquals(decoded.i10.getTime(), 10);
assertEquals(decoded.i11.getTime(), 11);
}
@Test
void avroEncodeInducing() throws IOException {
Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
assertEquals(schm.toString(),
"{\"type\":\"record\",\"name\":\"AvroEncRecord\",\"namespace"
+ "\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[{\"name\":\"date\","
+ "\"type\":{\"type\":\"long\",\"CustomEncoding\":\"DateAsLongEncoding\"}}]}");
}
@Test
void avroEncodeIO() throws IOException {
Schema schm = ReflectData.get().getSchema(AvroEncRecord.class);
ReflectDatumWriter<AvroEncRecord> writer = new ReflectDatumWriter<>(schm);
ByteArrayOutputStream out = new ByteArrayOutputStream();
AvroEncRecord record = new AvroEncRecord();
record.date = new java.util.Date(948833323L);
writer.write(record, factory.directBinaryEncoder(out, null));
ReflectDatumReader<AvroEncRecord> reader = new ReflectDatumReader<>(schm);
AvroEncRecord decoded = reader.read(new AvroEncRecord(),
DecoderFactory.get().binaryDecoder(out.toByteArray(), null));
assertEquals(record, decoded);
}
@Test
void recordWithNullIO() throws IOException {
ReflectData reflectData = ReflectData.AllowNull.get();
Schema schm = reflectData.getSchema(AnotherSampleRecord.class);
ReflectDatumWriter<AnotherSampleRecord> writer = new ReflectDatumWriter<>(schm);
ByteArrayOutputStream out = new ByteArrayOutputStream();
// keep record.a null and see if that works
Encoder e = factory.directBinaryEncoder(out, null);
AnotherSampleRecord a = new AnotherSampleRecord();
writer.write(a, e);
AnotherSampleRecord b = new AnotherSampleRecord(10);
writer.write(b, e);
e.flush();
ReflectDatumReader<AnotherSampleRecord> reader = new ReflectDatumReader<>(schm);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
Decoder d = DecoderFactory.get().binaryDecoder(in, null);
AnotherSampleRecord decoded = reader.read(null, d);
assertEquals(a, decoded);
decoded = reader.read(null, d);
assertEquals(b, decoded);
}
@Test
void disableUnsafe() throws Exception {
String saved = System.getProperty("avro.disable.unsafe");
try {
System.setProperty("avro.disable.unsafe", "true");
ReflectData.ACCESSOR_CACHE.remove(multipleAnnotationRecord.class);
ReflectData.ACCESSOR_CACHE.remove(AnotherSampleRecord.class);
ReflectionUtil.resetFieldAccess();
multipleAnnotations();
recordWithNullIO();
} finally {
if (saved == null)
System.clearProperty("avro.disable.unsafe");
else
System.setProperty("avro.disable.unsafe", saved);
ReflectData.ACCESSOR_CACHE.remove(multipleAnnotationRecord.class);
ReflectData.ACCESSOR_CACHE.remove(AnotherSampleRecord.class);
ReflectionUtil.resetFieldAccess();
}
}
public static class SampleRecord {
public int x = 1;
private int y = 2;
@Override
public int hashCode() {
return x + y;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final SampleRecord other = (SampleRecord) obj;
if (x != other.x)
return false;
return y == other.y;
}
public static class AnotherSampleRecord {
private Integer a = null;
private SampleRecord s = null;
public AnotherSampleRecord() {
}
AnotherSampleRecord(Integer a) {
this.a = a;
this.s = new SampleRecord();
}
@Override
public int hashCode() {
int hash = (a != null ? a.hashCode() : 0);
hash += (s != null ? s.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object other) {
if (other instanceof AnotherSampleRecord) {
AnotherSampleRecord o = (AnotherSampleRecord) other;
return (this.a != null || o.a == null) && (this.a == null || this.a.equals(o.a))
&& (this.s != null || o.s == null) && (this.s == null || this.s.equals(o.s));
} else {
return false;
}
}
}
}
public static class X {
int i;
}
public static class B1 {
X x;
}
public static class B2 {
X x;
}
public static class A {
B1 b1;
B2 b2;
}
public static interface C {
void foo(A a);
}
@Test
void forwardReference() {
ReflectData data = ReflectData.get();
Protocol reflected = data.getProtocol(C.class);
String ref = reflected.toString();
Protocol reparsed = Protocol.parse(ref);
assertEquals(reflected, reparsed);
assert (reparsed.getTypes().contains(data.getSchema(A.class)));
assert (reparsed.getTypes().contains(data.getSchema(B1.class)));
assert (reparsed.getTypes().contains(data.getSchema(B2.class)));
assert (reparsed.getTypes().contains(data.getSchema(X.class)));
}
public static interface P3 {
void m1();
void m1(int x);
}
@Test
void overloadedMethod() {
assertThrows(AvroTypeException.class, () -> {
ReflectData.get().getProtocol(P3.class);
});
}
@Test
void noPackageSchema() throws Exception {
ReflectData.get().getSchema(Class.forName("NoPackage"));
}
@Test
void noPackageProtocol() throws Exception {
ReflectData.get().getProtocol(Class.forName("NoPackage"));
}
public static class Y {
int i;
}
/** Test nesting of reflect data within generic. */
@Test
void reflectWithinGeneric() throws Exception {
ReflectData data = ReflectData.get();
// define a record with a field that's a specific Y
Schema schema = Schema.createRecord("Foo", "", "x.y.z", false);
List<Schema.Field> fields = new ArrayList<>();
fields.add(new Schema.Field("f", data.getSchema(Y.class), "", null));
schema.setFields(fields);
// create a generic instance of this record
Y y = new Y();
y.i = 1;
GenericData.Record record = new GenericData.Record(schema);
record.put("f", y);
// test that this instance can be written & re-read
checkBinary(schema, record);
}
@Test
void primitiveArray() throws Exception {
testPrimitiveArrays(false);
}
@Test
void primitiveArrayBlocking() throws Exception {
testPrimitiveArrays(true);
}
private void testPrimitiveArrays(boolean blocking) throws Exception {
testPrimitiveArray(boolean.class, blocking);
testPrimitiveArray(byte.class, blocking);
testPrimitiveArray(short.class, blocking);
testPrimitiveArray(char.class, blocking);
testPrimitiveArray(int.class, blocking);
testPrimitiveArray(long.class, blocking);
testPrimitiveArray(float.class, blocking);
testPrimitiveArray(double.class, blocking);
}
private void testPrimitiveArray(Class<?> c, boolean blocking) throws Exception {
ReflectData data = new ReflectData();
Random r = new Random();
int size = 200;
Object array = Array.newInstance(c, size);
Schema s = data.getSchema(array.getClass());
for (int i = 0; i < size; i++) {
Array.set(array, i, randomFor(c, r));
}
checkBinary(data, s, array, false, blocking);
}
private Object randomFor(Class<?> c, Random r) {
if (c == boolean.class)
return r.nextBoolean();
if (c == int.class)
return r.nextInt();
if (c == long.class)
return r.nextLong();
if (c == byte.class)
return (byte) r.nextInt();
if (c == float.class)
return r.nextFloat();
if (c == double.class)
return r.nextDouble();
if (c == char.class)
return (char) r.nextInt();
if (c == short.class)
return (short) r.nextInt();
return null;
}
/** Test union of null and an array. */
@Test
void nullArray() throws Exception {
String json = "[{\"type\":\"array\", \"items\": \"long\"}, \"null\"]";
Schema schema = new Schema.Parser().parse(json);
checkBinary(schema, null);
}
/** Test stringable classes. */
@Test
void stringables() throws Exception {
checkStringable(java.math.BigDecimal.class, "10");
checkStringable(java.math.BigInteger.class, "20");
checkStringable(java.net.URI.class, "foo://bar:9000/baz");
checkStringable(java.net.URL.class, "http://bar:9000/baz");
checkStringable(java.io.File.class, "foo.bar");
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public void checkStringable(Class c, String value) throws Exception {
ReflectData data = new ReflectData();
Schema schema = data.getSchema(c);
assertEquals("{\"type\":\"string\",\"java-class\":\"" + c.getName() + "\"}", schema.toString());
checkBinary(schema, c.getConstructor(String.class).newInstance(value));
}
public static class M1 {
Map<Integer, String> integerKeyMap;
Map<java.math.BigInteger, String> bigIntegerKeyMap;
Map<java.math.BigDecimal, String> bigDecimalKeyMap;
Map<java.io.File, String> fileKeyMap;
}
/** Test Map with stringable key classes. */
@Test
void stringableMapKeys() throws Exception {
M1 record = new M1();
record.integerKeyMap = new HashMap<>(1);
record.integerKeyMap.put(10, "foo");
record.bigIntegerKeyMap = new HashMap<>(1);
record.bigIntegerKeyMap.put(java.math.BigInteger.TEN, "bar");
record.bigDecimalKeyMap = new HashMap<>(1);
record.bigDecimalKeyMap.put(java.math.BigDecimal.ONE, "bigDecimal");
record.fileKeyMap = new HashMap<>(1);
record.fileKeyMap.put(new java.io.File("foo.bar"), "file");
ReflectData data = new ReflectData().addStringable(Integer.class);
checkBinary(data, data.getSchema(M1.class), record, true);
}
public static class NullableStringable {
java.math.BigDecimal number;
}
@Test
void nullableStringableField() throws Exception {
NullableStringable datum = new NullableStringable();
datum.number = java.math.BigDecimal.TEN;
Schema schema = ReflectData.AllowNull.get().getSchema(NullableStringable.class);
checkBinary(schema, datum);
}
public static void checkBinary(ReflectData reflectData, Schema schema, Object datum, boolean equals)
throws IOException {
checkBinary(reflectData, schema, datum, equals, false);
}
private static void checkBinary(ReflectData reflectData, Schema schema, Object datum, boolean equals,
boolean blocking) throws IOException {
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<>(schema);
ByteArrayOutputStream out = new ByteArrayOutputStream();
if (!blocking) {
writer.write(datum, EncoderFactory.get().directBinaryEncoder(out, null));
} else {
writer.write(datum, new EncoderFactory().configureBlockSize(64).blockingBinaryEncoder(out, null));
}
writer.write(datum, EncoderFactory.get().directBinaryEncoder(out, null));
byte[] data = out.toByteArray();
ReflectDatumReader<Object> reader = new ReflectDatumReader<>(schema);
Object decoded = reader.read(null, DecoderFactory.get().binaryDecoder(data, null));
assertEquals(0, reflectData.compare(datum, decoded, schema, equals));
}
public static void checkBinary(Schema schema, Object datum) throws IOException {
checkBinary(ReflectData.get(), schema, datum, false);
}
/** Test that the error message contains the name of the class. */
@Test
void reflectFieldError() throws Exception {
Object datum = "";
try {
ReflectData.get().getField(datum, "notAFieldOfString", 0);
} catch (AvroRuntimeException e) {
assertTrue(e.getMessage().contains(datum.getClass().getName()));
}
}
@AvroAlias(alias = "a", space = "b")
private static class AliasA {
}
@AvroAlias(alias = "a", space = "")
private static class AliasB {
}
@AvroAlias(alias = "a")
private static class AliasC {
}
@Test
void avroAliasOnClass() {
check(AliasA.class,
"{\"type\":\"record\",\"name\":\"AliasA\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"b.a\"]}");
check(AliasB.class,
"{\"type\":\"record\",\"name\":\"AliasB\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"a\"]}");
check(AliasC.class,
"{\"type\":\"record\",\"name\":\"AliasC\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"a\"]}");
}
@AvroAlias(alias = "alias1", space = "space1")
@AvroAlias(alias = "alias2", space = "space2")
private static class MultipleAliasRecord {
}
@Test
void multipleAliasAnnotationsOnClass() {
check(MultipleAliasRecord.class,
"{\"type\":\"record\",\"name\":\"MultipleAliasRecord\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[],\"aliases\":[\"space1.alias1\",\"space2.alias2\"]}");
}
private static class Z {
}
@Test
void dollarTerminatedNamespaceCompatibility() {
ReflectData data = ReflectData.get();
Schema s = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).parse(
"{\"type\":\"record\",\"name\":\"Z\",\"namespace\":\"org.apache.avro.reflect.TestReflect$\",\"fields\":[]}");
assertEquals(data.getSchema(data.getClass(s)).toString(),
"{\"type\":\"record\",\"name\":\"Z\",\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":[]}");
}
@Test
void dollarTerminatedNestedStaticClassNamespaceCompatibility() {
ReflectData data = ReflectData.get();
// Older versions of Avro generated this namespace on nested records.
Schema s = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).parse(
"{\"type\":\"record\",\"name\":\"AnotherSampleRecord\",\"namespace\":\"org.apache.avro.reflect.TestReflect$SampleRecord\",\"fields\":[]}");
assertThat(data.getSchema(data.getClass(s)).getFullName(),
is("org.apache.avro.reflect.TestReflect.SampleRecord.AnotherSampleRecord"));
}
private static class ClassWithAliasOnField {
@AvroAlias(alias = "aliasName")
int primitiveField;
}
private static class ClassWithMultipleAliasesOnField {
@AvroAlias(alias = "alias1")
@AvroAlias(alias = "alias2")
int primitiveField;
}
private static class ClassWithAliasAndNamespaceOnField {
@AvroAlias(alias = "aliasName", space = "forbidden.space.entry")
int primitiveField;
}
@Test
void avroAliasOnField() {
Schema expectedSchema = SchemaBuilder.record(ClassWithAliasOnField.class.getSimpleName())
.namespace("org.apache.avro.reflect.TestReflect").fields().name("primitiveField").aliases("aliasName")
.type(Schema.create(org.apache.avro.Schema.Type.INT)).noDefault().endRecord();
check(ClassWithAliasOnField.class, expectedSchema.toString());
}
@Test
void namespaceDefinitionOnFieldAliasMustThrowException() {
assertThrows(AvroRuntimeException.class, () -> {
ReflectData.get().getSchema(ClassWithAliasAndNamespaceOnField.class);
});
}
@Test
public void testMultipleFieldAliases() {
Field field = new Field("primitiveField", Schema.create(Schema.Type.INT));
field.addAlias("alias1");
field.addAlias("alias2");
Schema avroMultiMeta = Schema.createRecord("ClassWithMultipleAliasesOnField", null,
"org.apache.avro.reflect.TestReflect", false, Arrays.asList(field));
Schema schema = ReflectData.get().getSchema(ClassWithMultipleAliasesOnField.class);
assertEquals(avroMultiMeta, schema);
}
private static class OptionalTest {
Optional<Integer> foo;
}
@Test
public void testOptional() {
check(OptionalTest.class,
"{\"type\":\"record\",\"name\":\"OptionalTest\","
+ "\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"foo\",\"type\":[\"null\",\"int\"],\"default\":null}]}");
}
private static class DefaultTest {
@AvroDefault("1")
int foo;
}
@Test
void avroDefault() {
check(DefaultTest.class,
"{\"type\":\"record\",\"name\":\"DefaultTest\","
+ "\"namespace\":\"org.apache.avro.reflect.TestReflect\",\"fields\":["
+ "{\"name\":\"foo\",\"type\":\"int\",\"default\":1}]}");
}
public static class NullableBytesTest {
@Nullable
byte[] bytes;
NullableBytesTest() {
}
NullableBytesTest(byte[] bytes) {
this.bytes = bytes;
}
@Override
public boolean equals(Object obj) {
return obj instanceof NullableBytesTest && Arrays.equals(((NullableBytesTest) obj).bytes, this.bytes);
}
}
@Test
void nullableByteArrayNotNullValue() throws Exception {
checkReadWrite(new NullableBytesTest("foo".getBytes(UTF_8)));
}
@Test
void nullableByteArrayNullValue() throws Exception {
checkReadWrite(new NullableBytesTest());
}
private enum DocTestEnum {
ENUM_1, ENUM_2
}
@AvroDoc("DocTest class docs")
private static class DocTest {
@AvroDoc("Some Documentation")
int foo;
@AvroDoc("Some other Documentation")
DocTestEnum enums;
@AvroDoc("And again")
DefaultTest defaultTest;
}
@Test
void avroDoc() {
check(DocTest.class,
"{\"type\":\"record\",\"name\":\"DocTest\",\"namespace\":\"org.apache.avro.reflect.TestReflect\","
+ "\"doc\":\"DocTest class docs\"," + "\"fields\":["
+ "{\"name\":\"defaultTest\",\"type\":{\"type\":\"record\",\"name\":\"DefaultTest\","
+ "\"fields\":[{\"name\":\"foo\",\"type\":\"int\",\"default\":1}]},\"doc\":\"And again\"},"
+ "{\"name\":\"enums\",\"type\":{\"type\":\"enum\",\"name\":\"DocTestEnum\","
+ "\"symbols\":[\"ENUM_1\",\"ENUM_2\"]},\"doc\":\"Some other Documentation\"},"
+ "{\"name\":\"foo\",\"type\":\"int\",\"doc\":\"Some Documentation\"}" + "]}");
}
}
| 7,184 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectionUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import java.io.IOException;
import java.io.InputStream;
import org.junit.jupiter.api.Test;
public class TestReflectionUtil {
@Test
void unsafeUtil() {
new Tester().checkUnsafe();
}
@Test
void unsafeWhenNotExists() throws Exception {
ClassLoader cl = new NoUnsafe();
Class<?> testerClass = cl.loadClass(Tester.class.getName());
testerClass.getDeclaredMethod("checkUnsafe").invoke(testerClass.getDeclaredConstructor().newInstance());
}
public static final class Tester {
public Tester() {
}
public void checkUnsafe() {
ReflectionUtil.getFieldAccess();
}
}
private static final class NoUnsafe extends ClassLoader {
private ClassLoader parent = TestReflectionUtil.class.getClassLoader();
@Override
public java.lang.Class<?> loadClass(String name) throws ClassNotFoundException {
Class<?> clazz = findLoadedClass(name);
if (clazz != null) {
return clazz;
}
if ("sun.misc.Unsafe".equals(name)) {
throw new ClassNotFoundException(name);
}
if (!name.startsWith("org.apache.avro.")) {
return parent.loadClass(name);
}
InputStream data = parent.getResourceAsStream(name.replace('.', '/') + ".class");
byte[] buf = new byte[10240]; // big enough, too lazy to loop
int size;
try {
size = data.read(buf);
} catch (IOException e) {
throw new ClassNotFoundException();
}
clazz = defineClass(name, buf, 0, size);
resolveClass(clazz);
return clazz;
}
}
}
| 7,185 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectLogicalTypes.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.io.IOException;
import java.math.BigDecimal;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import org.apache.avro.Conversion;
import org.apache.avro.Conversions;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.data.TimeConversions;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.specific.SpecificData;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
/**
* Tests various logical types * string => UUID * fixed and bytes => Decimal *
* record => Pair
*/
public class TestReflectLogicalTypes {
@TempDir
public File temp;
public static final ReflectData REFLECT = new ReflectData();
@BeforeAll
public static void addUUID() {
REFLECT.addLogicalTypeConversion(new Conversions.UUIDConversion());
REFLECT.addLogicalTypeConversion(new Conversions.DecimalConversion());
REFLECT.addLogicalTypeConversion(new TimeConversions.LocalTimestampMillisConversion());
}
@Test
void reflectedSchema() {
Schema expected = SchemaBuilder.record(RecordWithUUIDList.class.getName()).fields().name("uuids").type().array()
.items().stringType().noDefault().endRecord();
expected.getField("uuids").schema().addProp(SpecificData.CLASS_PROP, List.class.getName());
LogicalTypes.uuid().addToSchema(expected.getField("uuids").schema().getElementType());
Schema actual = REFLECT.getSchema(RecordWithUUIDList.class);
assertEquals(expected, actual, "Should use the UUID logical type");
}
// this can be static because the schema only comes from reflection
public static class DecimalRecordBytes {
// scale is required and will not be set by the conversion
@AvroSchema("{" + "\"type\": \"bytes\"," + "\"logicalType\": \"decimal\"," + "\"precision\": 9," + "\"scale\": 2"
+ "}")
private BigDecimal decimal;
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DecimalRecordBytes that = (DecimalRecordBytes) other;
if (decimal == null) {
return (that.decimal == null);
}
return decimal.equals(that.decimal);
}
@Override
public int hashCode() {
return decimal != null ? decimal.hashCode() : 0;
}
}
@Test
void decimalBytes() throws IOException {
Schema schema = REFLECT.getSchema(DecimalRecordBytes.class);
assertEquals("org.apache.avro.reflect.TestReflectLogicalTypes", schema.getNamespace(),
"Should have the correct record name");
assertEquals("DecimalRecordBytes", schema.getName(), "Should have the correct record name");
assertEquals(LogicalTypes.decimal(9, 2), LogicalTypes.fromSchema(schema.getField("decimal").schema()),
"Should have the correct logical type");
DecimalRecordBytes record = new DecimalRecordBytes();
record.decimal = new BigDecimal("3.14");
File test = write(REFLECT, schema, record);
assertEquals(Collections.singletonList(record), read(REFLECT.createDatumReader(schema), test),
"Should match the decimal after round trip");
}
// this can be static because the schema only comes from reflection
public static class DecimalRecordFixed {
// scale is required and will not be set by the conversion
@AvroSchema("{" + "\"name\": \"decimal_9\"," + "\"type\": \"fixed\"," + "\"size\": 4,"
+ "\"logicalType\": \"decimal\"," + "\"precision\": 9," + "\"scale\": 2" + "}")
private BigDecimal decimal;
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
DecimalRecordFixed that = (DecimalRecordFixed) other;
if (decimal == null) {
return (that.decimal == null);
}
return decimal.equals(that.decimal);
}
@Override
public int hashCode() {
return decimal != null ? decimal.hashCode() : 0;
}
}
@Test
void decimalFixed() throws IOException {
Schema schema = REFLECT.getSchema(DecimalRecordFixed.class);
assertEquals("org.apache.avro.reflect.TestReflectLogicalTypes", schema.getNamespace(),
"Should have the correct record name");
assertEquals("DecimalRecordFixed", schema.getName(), "Should have the correct record name");
assertEquals(LogicalTypes.decimal(9, 2), LogicalTypes.fromSchema(schema.getField("decimal").schema()),
"Should have the correct logical type");
DecimalRecordFixed record = new DecimalRecordFixed();
record.decimal = new BigDecimal("3.14");
File test = write(REFLECT, schema, record);
assertEquals(Collections.singletonList(record), read(REFLECT.createDatumReader(schema), test),
"Should match the decimal after round trip");
}
public static class Pair<X, Y> {
private final X first;
private final Y second;
private Pair(X first, Y second) {
this.first = first;
this.second = second;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
Pair<?, ?> that = (Pair<?, ?>) other;
if (first == null) {
if (that.first != null) {
return false;
}
} else if (first.equals(that.first)) {
return false;
}
if (second == null) {
return that.second == null;
} else
return !second.equals(that.second);
}
@Override
public int hashCode() {
return Arrays.hashCode(new Object[] { first, second });
}
public static <X, Y> Pair<X, Y> of(X first, Y second) {
return new Pair<>(first, second);
}
}
public static class PairRecord {
@AvroSchema("{" + "\"name\": \"Pair\"," + "\"type\": \"record\"," + "\"fields\": ["
+ " {\"name\": \"x\", \"type\": \"long\"}," + " {\"name\": \"y\", \"type\": \"long\"}" + " ],"
+ "\"logicalType\": \"pair\"" + "}")
Pair<Long, Long> pair;
}
@Test
@SuppressWarnings("unchecked")
void pairRecord() throws IOException {
ReflectData model = new ReflectData();
model.addLogicalTypeConversion(new Conversion<Pair>() {
@Override
public Class<Pair> getConvertedType() {
return Pair.class;
}
@Override
public String getLogicalTypeName() {
return "pair";
}
@Override
public Pair fromRecord(IndexedRecord value, Schema schema, LogicalType type) {
return Pair.of(value.get(0), value.get(1));
}
@Override
public IndexedRecord toRecord(Pair value, Schema schema, LogicalType type) {
GenericData.Record record = new GenericData.Record(schema);
record.put(0, value.first);
record.put(1, value.second);
return record;
}
});
LogicalTypes.register("pair", new LogicalTypes.LogicalTypeFactory() {
private final LogicalType PAIR = new LogicalType("pair");
@Override
public LogicalType fromSchema(Schema schema) {
return PAIR;
}
@Override
public String getTypeName() {
return "pair";
}
});
Schema schema = model.getSchema(PairRecord.class);
assertEquals("org.apache.avro.reflect.TestReflectLogicalTypes", schema.getNamespace(),
"Should have the correct record name");
assertEquals("PairRecord", schema.getName(), "Should have the correct record name");
assertEquals("pair", LogicalTypes.fromSchema(schema.getField("pair").schema()).getName(),
"Should have the correct logical type");
PairRecord record = new PairRecord();
record.pair = Pair.of(34L, 35L);
List<PairRecord> expected = new ArrayList<>();
expected.add(record);
File test = write(model, schema, record);
Pair<Long, Long> actual = ((PairRecord) TestReflectLogicalTypes
.<PairRecord>read(model.createDatumReader(schema), test).get(0)).pair;
assertEquals(34L, (long) actual.first, "Data should match after serialization round-trip");
assertEquals(35L, (long) actual.second, "Data should match after serialization round-trip");
}
@Test
void readUUID() throws IOException {
Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithStringUUID r1 = new RecordWithStringUUID();
r1.uuid = u1.toString();
RecordWithStringUUID r2 = new RecordWithStringUUID();
r2.uuid = u2.toString();
List<RecordWithUUID> expected = Arrays.asList(new RecordWithUUID(), new RecordWithUUID());
expected.get(0).uuid = u1;
expected.get(1).uuid = u2;
File test = write(ReflectData.get().getSchema(RecordWithStringUUID.class), r1, r2);
assertEquals(expected, read(REFLECT.createDatumReader(uuidSchema), test), "Should convert Strings to UUIDs");
// verify that the field's type overrides the logical type
Schema uuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidStringSchema.getField("uuid").schema());
assertEquals(Arrays.asList(r1, r2), read(REFLECT.createDatumReader(uuidStringSchema), test),
"Should not convert to UUID if accessor is String");
}
@Test
void writeUUID() throws IOException {
Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithUUID r1 = new RecordWithUUID();
r1.uuid = u1;
RecordWithUUID r2 = new RecordWithUUID();
r2.uuid = u2;
List<RecordWithStringUUID> expected = Arrays.asList(new RecordWithStringUUID(), new RecordWithStringUUID());
expected.get(0).uuid = u1.toString();
expected.get(1).uuid = u2.toString();
File test = write(REFLECT, uuidSchema, r1, r2);
// verify that the field's type overrides the logical type
Schema uuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
assertEquals(expected, read(REFLECT.createDatumReader(uuidStringSchema), test),
"Should read uuid as String without UUID conversion");
LogicalTypes.uuid().addToSchema(uuidStringSchema.getField("uuid").schema());
assertEquals(expected, read(ReflectData.get().createDatumReader(uuidStringSchema), test),
"Should read uuid as String without UUID logical type");
}
@Test
void writeNullableUUID() throws IOException {
Schema nullableUuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().optionalString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(nullableUuidSchema.getField("uuid").schema().getTypes().get(1));
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithUUID r1 = new RecordWithUUID();
r1.uuid = u1;
RecordWithUUID r2 = new RecordWithUUID();
r2.uuid = u2;
List<RecordWithStringUUID> expected = Arrays.asList(new RecordWithStringUUID(), new RecordWithStringUUID());
expected.get(0).uuid = u1.toString();
expected.get(1).uuid = u2.toString();
File test = write(REFLECT, nullableUuidSchema, r1, r2);
// verify that the field's type overrides the logical type
Schema nullableUuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields()
.optionalString("uuid").endRecord();
assertEquals(expected, read(ReflectData.get().createDatumReader(nullableUuidStringSchema), test),
"Should read uuid as String without UUID conversion");
}
@Test
void writeNullableUUIDReadRequiredString() throws IOException {
Schema nullableUuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().optionalString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(nullableUuidSchema.getField("uuid").schema().getTypes().get(1));
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithUUID r1 = new RecordWithUUID();
r1.uuid = u1;
RecordWithUUID r2 = new RecordWithUUID();
r2.uuid = u2;
List<RecordWithStringUUID> expected = Arrays.asList(new RecordWithStringUUID(), new RecordWithStringUUID());
expected.get(0).uuid = u1.toString();
expected.get(1).uuid = u2.toString();
File test = write(REFLECT, nullableUuidSchema, r1, r2);
// verify that the field's type overrides the logical type
Schema uuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
assertEquals(expected, read(REFLECT.createDatumReader(uuidStringSchema), test),
"Should read uuid as String without UUID conversion");
}
@Test
void readUUIDMissingLogicalTypeUnsafe() throws IOException {
String unsafeValue = System.getProperty("avro.disable.unsafe");
try {
// only one FieldAccess can be set per JVM
System.clearProperty("avro.disable.unsafe");
Assumptions.assumeTrue(ReflectionUtil.getFieldAccess() instanceof FieldAccessUnsafe);
Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
RecordWithStringUUID r1 = new RecordWithStringUUID();
r1.uuid = u1.toString();
File test = write(ReflectData.get().getSchema(RecordWithStringUUID.class), r1);
RecordWithUUID datum = (RecordWithUUID) read(ReflectData.get().createDatumReader(uuidSchema), test).get(0);
Object uuid = datum.uuid;
assertTrue(uuid instanceof String, "UUID should be a String (unsafe)");
} finally {
if (unsafeValue != null) {
System.setProperty("avro.disable.unsafe", unsafeValue);
}
}
}
@Test
void readUUIDMissingLogicalTypeReflect() throws IOException {
final String unsafeValue = System.getProperty("avro.disable.unsafe");
// only one FieldAccess can be set per JVM
System.setProperty("avro.disable.unsafe", "true");
try {
Assumptions.assumeTrue(ReflectionUtil.getFieldAccess() instanceof FieldAccessReflect);
Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
RecordWithStringUUID r1 = new RecordWithStringUUID();
r1.uuid = u1.toString();
File test = write(ReflectData.get().getSchema(RecordWithStringUUID.class), r1);
assertThrows(IllegalArgumentException.class,
() -> read(ReflectData.get().createDatumReader(uuidSchema), test).get(0));
} finally {
if (unsafeValue != null) {
System.setProperty("avro.disable.unsafe", unsafeValue);
} else {
System.clearProperty("avro.disable.unsafe");
}
}
}
@Test
void writeUUIDMissingLogicalType() throws IOException {
assertThrows(DataFileWriter.AppendWriteException.class, () -> {
Schema uuidSchema = SchemaBuilder.record(RecordWithUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithUUID r1 = new RecordWithUUID();
r1.uuid = u1;
RecordWithUUID r2 = new RecordWithUUID();
r2.uuid = u2;
// write without using REFLECT, which has the logical type
File test = write(uuidSchema, r1, r2);
// verify that the field's type overrides the logical type
Schema uuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields()
.requiredString("uuid").endRecord();
// this fails with an AppendWriteException wrapping ClassCastException
// because the UUID isn't converted to a CharSequence expected internally
read(ReflectData.get().createDatumReader(uuidStringSchema), test);
});
}
@Test
void readUUIDGenericRecord() throws IOException {
Schema uuidSchema = SchemaBuilder.record("RecordWithUUID").fields().requiredString("uuid").endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
RecordWithStringUUID r1 = new RecordWithStringUUID();
r1.uuid = u1.toString();
RecordWithStringUUID r2 = new RecordWithStringUUID();
r2.uuid = u2.toString();
List<GenericData.Record> expected = Arrays.asList(new GenericData.Record(uuidSchema),
new GenericData.Record(uuidSchema));
expected.get(0).put("uuid", u1);
expected.get(1).put("uuid", u2);
File test = write(ReflectData.get().getSchema(RecordWithStringUUID.class), r1, r2);
assertEquals(expected, read(REFLECT.createDatumReader(uuidSchema), test), "Should convert Strings to UUIDs");
// verify that the field's type overrides the logical type
Schema uuidStringSchema = SchemaBuilder.record(RecordWithStringUUID.class.getName()).fields().requiredString("uuid")
.endRecord();
LogicalTypes.uuid().addToSchema(uuidSchema.getField("uuid").schema());
assertEquals(Arrays.asList(r1, r2), read(REFLECT.createDatumReader(uuidStringSchema), test),
"Should not convert to UUID if accessor is String");
}
@Test
void readUUIDArray() throws IOException {
Schema uuidArraySchema = SchemaBuilder.record(RecordWithUUIDArray.class.getName()).fields().name("uuids").type()
.array().items().stringType().noDefault().endRecord();
LogicalTypes.uuid().addToSchema(uuidArraySchema.getField("uuids").schema().getElementType());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
GenericRecord r = new GenericData.Record(uuidArraySchema);
r.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
RecordWithUUIDArray expected = new RecordWithUUIDArray();
expected.uuids = new UUID[] { u1, u2 };
File test = write(uuidArraySchema, r);
assertEquals(expected, read(REFLECT.createDatumReader(uuidArraySchema), test).get(0),
"Should convert Strings to UUIDs");
}
@Test
void writeUUIDArray() throws IOException {
Schema uuidArraySchema = SchemaBuilder.record(RecordWithUUIDArray.class.getName()).fields().name("uuids").type()
.array().items().stringType().noDefault().endRecord();
LogicalTypes.uuid().addToSchema(uuidArraySchema.getField("uuids").schema().getElementType());
Schema stringArraySchema = SchemaBuilder.record("RecordWithUUIDArray").fields().name("uuids").type().array().items()
.stringType().noDefault().endRecord();
stringArraySchema.getField("uuids").schema().addProp(SpecificData.CLASS_PROP, List.class.getName());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
GenericRecord expected = new GenericData.Record(stringArraySchema);
List<String> uuids = new ArrayList<>();
uuids.add(u1.toString());
uuids.add(u2.toString());
expected.put("uuids", uuids);
RecordWithUUIDArray r = new RecordWithUUIDArray();
r.uuids = new UUID[] { u1, u2 };
File test = write(REFLECT, uuidArraySchema, r);
assertEquals(expected, read(ReflectData.get().createDatumReader(stringArraySchema), test).get(0),
"Should read UUIDs as Strings");
}
@Test
void readUUIDList() throws IOException {
Schema uuidListSchema = SchemaBuilder.record(RecordWithUUIDList.class.getName()).fields().name("uuids").type()
.array().items().stringType().noDefault().endRecord();
uuidListSchema.getField("uuids").schema().addProp(SpecificData.CLASS_PROP, List.class.getName());
LogicalTypes.uuid().addToSchema(uuidListSchema.getField("uuids").schema().getElementType());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
GenericRecord r = new GenericData.Record(uuidListSchema);
r.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
RecordWithUUIDList expected = new RecordWithUUIDList();
expected.uuids = Arrays.asList(u1, u2);
File test = write(uuidListSchema, r);
assertEquals(expected, read(REFLECT.createDatumReader(uuidListSchema), test).get(0),
"Should convert Strings to UUIDs");
}
@Test
void writeUUIDList() throws IOException {
Schema uuidListSchema = SchemaBuilder.record(RecordWithUUIDList.class.getName()).fields().name("uuids").type()
.array().items().stringType().noDefault().endRecord();
uuidListSchema.getField("uuids").schema().addProp(SpecificData.CLASS_PROP, List.class.getName());
LogicalTypes.uuid().addToSchema(uuidListSchema.getField("uuids").schema().getElementType());
Schema stringArraySchema = SchemaBuilder.record("RecordWithUUIDArray").fields().name("uuids").type().array().items()
.stringType().noDefault().endRecord();
stringArraySchema.getField("uuids").schema().addProp(SpecificData.CLASS_PROP, List.class.getName());
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
GenericRecord expected = new GenericData.Record(stringArraySchema);
expected.put("uuids", Arrays.asList(u1.toString(), u2.toString()));
RecordWithUUIDList r = new RecordWithUUIDList();
r.uuids = Arrays.asList(u1, u2);
File test = write(REFLECT, uuidListSchema, r);
assertEquals(expected, read(REFLECT.createDatumReader(stringArraySchema), test).get(0),
"Should read UUIDs as Strings");
}
@Test
void reflectedSchemaLocalDateTime() {
Schema actual = REFLECT.getSchema(RecordWithTimestamps.class);
assertEquals("org.apache.avro.reflect", actual.getNamespace(), "Should have the correct record name");
assertEquals("RecordWithTimestamps", actual.getName(), "Should have the correct record name");
assertEquals(Schema.Type.LONG, actual.getField("localDateTime").schema().getType(),
"Should have the correct physical type");
assertEquals(LogicalTypes.localTimestampMillis(),
LogicalTypes.fromSchema(actual.getField("localDateTime").schema()), "Should have the correct logical type");
}
private static <D> List<D> read(DatumReader<D> reader, File file) throws IOException {
List<D> data = new ArrayList<>();
try (FileReader<D> fileReader = new DataFileReader<>(file, reader)) {
for (D datum : fileReader) {
data.add(datum);
}
}
return data;
}
private <D> File write(Schema schema, D... data) throws IOException {
return write(ReflectData.get(), schema, data);
}
@SuppressWarnings("unchecked")
private <D> File write(GenericData model, Schema schema, D... data) throws IOException {
File file = File.createTempFile("junit", null, temp);
DatumWriter<D> writer = model.createDatumWriter(schema);
try (DataFileWriter<D> fileWriter = new DataFileWriter<>(writer)) {
fileWriter.create(schema, file);
for (D datum : data) {
fileWriter.append(datum);
}
}
return file;
}
}
class RecordWithUUID {
UUID uuid;
@Override
public int hashCode() {
return uuid.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof RecordWithUUID)) {
return false;
}
RecordWithUUID that = (RecordWithUUID) obj;
return this.uuid.equals(that.uuid);
}
}
class RecordWithStringUUID {
String uuid;
@Override
public int hashCode() {
return uuid.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof RecordWithStringUUID)) {
return false;
}
RecordWithStringUUID that = (RecordWithStringUUID) obj;
return this.uuid.equals(that.uuid);
}
}
class RecordWithUUIDArray {
UUID[] uuids;
@Override
public int hashCode() {
return Arrays.hashCode(uuids);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof RecordWithUUIDArray)) {
return false;
}
RecordWithUUIDArray that = (RecordWithUUIDArray) obj;
return Arrays.equals(this.uuids, that.uuids);
}
}
class RecordWithUUIDList {
List<UUID> uuids;
@Override
public int hashCode() {
return uuids.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof RecordWithUUIDList)) {
return false;
}
RecordWithUUIDList that = (RecordWithUUIDList) obj;
return this.uuids.equals(that.uuids);
}
}
class RecordWithTimestamps {
LocalDateTime localDateTime;
@Override
public int hashCode() {
return Objects.hash(localDateTime);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof RecordWithTimestamps)) {
return false;
}
RecordWithTimestamps that = (RecordWithTimestamps) obj;
return Objects.equals(localDateTime, that.localDateTime);
}
}
| 7,186 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectAllowNulls.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import org.apache.avro.Schema;
import org.junit.jupiter.api.Test;
public class TestReflectAllowNulls {
private static class Primitives {
boolean aBoolean;
byte aByte;
short aShort;
int anInt;
long aLong;
float aFloat;
double aDouble;
}
private static class Wrappers {
Boolean aBoolean;
Byte aByte;
Short aShort;
Integer anInt;
Long aLong;
Float aFloat;
Double aDouble;
Primitives anObject;
}
private static class AllowNullWithNullable {
@Nullable
Double aDouble;
@AvroSchema("[\"double\", \"long\"]")
Object doubleOrLong;
@Nullable
@AvroSchema("[\"double\", \"long\"]")
Object doubleOrLongOrNull1;
@AvroSchema("[\"double\", \"long\", \"null\"]")
Object doubleOrLongOrNull2;
@Nullable
@AvroSchema("[\"double\", \"long\", \"null\"]")
Object doubleOrLongOrNull3;
}
@Test
void primitives() {
// AllowNull only makes fields nullable, so testing must use a base record
Schema primitives = ReflectData.AllowNull.get().getSchema(Primitives.class);
assertEquals(requiredSchema(boolean.class), primitives.getField("aBoolean").schema());
assertEquals(requiredSchema(byte.class), primitives.getField("aByte").schema());
assertEquals(requiredSchema(short.class), primitives.getField("aShort").schema());
assertEquals(requiredSchema(int.class), primitives.getField("anInt").schema());
assertEquals(requiredSchema(long.class), primitives.getField("aLong").schema());
assertEquals(requiredSchema(float.class), primitives.getField("aFloat").schema());
assertEquals(requiredSchema(double.class), primitives.getField("aDouble").schema());
}
@Test
void wrappers() {
// AllowNull only makes fields nullable, so testing must use a base record
Schema wrappers = ReflectData.AllowNull.get().getSchema(Wrappers.class);
assertEquals(nullableSchema(boolean.class), wrappers.getField("aBoolean").schema());
assertEquals(nullableSchema(byte.class), wrappers.getField("aByte").schema());
assertEquals(nullableSchema(short.class), wrappers.getField("aShort").schema());
assertEquals(nullableSchema(int.class), wrappers.getField("anInt").schema());
assertEquals(nullableSchema(long.class), wrappers.getField("aLong").schema());
assertEquals(nullableSchema(float.class), wrappers.getField("aFloat").schema());
assertEquals(nullableSchema(double.class), wrappers.getField("aDouble").schema());
assertEquals(nullableSchema(Primitives.class), wrappers.getField("anObject").schema());
}
@Test
void allowNullWithNullableAnnotation() {
Schema withNullable = ReflectData.AllowNull.get().getSchema(AllowNullWithNullable.class);
assertEquals(nullableSchema(double.class), withNullable.getField("aDouble").schema(),
"Should produce a nullable double");
Schema nullableDoubleOrLong = Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL),
Schema.create(Schema.Type.DOUBLE), Schema.create(Schema.Type.LONG)));
assertEquals(nullableDoubleOrLong, withNullable.getField("doubleOrLong").schema(),
"Should add null to a non-null union");
assertEquals(nullableDoubleOrLong, withNullable.getField("doubleOrLongOrNull1").schema(),
"Should add null to a non-null union");
Schema doubleOrLongOrNull = Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.DOUBLE),
Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.NULL)));
assertEquals(doubleOrLongOrNull, withNullable.getField("doubleOrLongOrNull2").schema(),
"Should add null to a non-null union");
assertEquals(doubleOrLongOrNull, withNullable.getField("doubleOrLongOrNull3").schema(),
"Should add null to a non-null union");
}
private Schema requiredSchema(Class<?> type) {
return ReflectData.get().getSchema(type);
}
private Schema nullableSchema(Class<?> type) {
return Schema.createUnion(Arrays.asList(Schema.create(Schema.Type.NULL), ReflectData.get().getSchema(type)));
}
}
| 7,187 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import org.apache.avro.AvroTypeException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.util.internal.JacksonUtils;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestReflectData {
@Test
@SuppressWarnings("unchecked")
void weakSchemaCaching() throws Exception {
int numSchemas = 1000000;
for (int i = 0; i < numSchemas; i++) {
// Create schema
Schema schema = Schema.createRecord("schema", null, null, false);
schema.setFields(Collections.emptyList());
ReflectData.get().getRecordState(new Object(), schema);
}
// Reflect the number of schemas currently in the cache
ReflectData.ClassAccessorData classData = ReflectData.ACCESSOR_CACHE.get(Object.class);
System.gc(); // Not guaranteed, but seems to be reliable enough
assertThat("ReflectData cache should release references", classData.bySchema.size(), lessThan(numSchemas));
}
@Test
void genericProtocol() {
Protocol protocol = ReflectData.get().getProtocol(FooBarProtocol.class);
Schema recordSchema = ReflectData.get().getSchema(FooBarReflectiveRecord.class);
assertThat(protocol.getTypes(), contains(recordSchema));
assertThat(protocol.getMessages().keySet(), containsInAnyOrder("store", "findById", "exists"));
Schema.Field storeArgument = protocol.getMessages().get("store").getRequest().getFields().get(0);
assertThat(storeArgument.schema(), equalTo(recordSchema));
Schema.Field findByIdArgument = protocol.getMessages().get("findById").getRequest().getFields().get(0);
assertThat(findByIdArgument.schema(), equalTo(Schema.create(Schema.Type.STRING)));
Schema findByIdResponse = protocol.getMessages().get("findById").getResponse();
assertThat(findByIdResponse, equalTo(recordSchema));
Schema.Field existsArgument = protocol.getMessages().get("exists").getRequest().getFields().get(0);
assertThat(existsArgument.schema(), equalTo(Schema.create(Schema.Type.STRING)));
}
private interface CrudProtocol<R, I> extends OtherProtocol<I> {
void store(R record);
R findById(I id);
}
private interface OtherProtocol<G> {
boolean exists(G id);
}
private interface FooBarProtocol extends OtherProtocol<String>, CrudProtocol<FooBarReflectiveRecord, String> {
}
private static class FooBarReflectiveRecord {
private String bar;
private int baz;
}
static class User {
public String first = "Avro";
public String last = "Apache";
}
static class Meta {
public int f1 = 55;
public int f4;
public String f2 = "a-string";
public List<String> f3 = Arrays.asList("one", "two", "three");
// public User usr = new User();
}
@Test
void createSchemaDefaultValue() {
Meta meta = new Meta();
validateSchema(meta);
meta.f4 = 0x1987;
validateSchema(meta);
}
private void validateSchema(Meta meta) {
Schema schema = new ReflectData().setDefaultsGenerated(true).setDefaultGeneratedValue(Meta.class, meta)
.getSchema(Meta.class);
final String schemaString = schema.toString(true);
Schema.Parser parser = new Schema.Parser();
Schema cloneSchema = parser.parse(schemaString);
Map testCases = JacksonUtils.objectToMap(meta);
for (Schema.Field field : cloneSchema.getFields()) {
assertEquals(field.defaultVal(), testCases.get(field.name()), "Invalid field " + field.name());
}
}
public class Definition {
public Map<String, String> tokens;
}
@Test
void nonStaticInnerClasses() {
assertThrows(AvroTypeException.class, () -> {
ReflectData.get().getSchema(Definition.class);
});
}
@Test
void staticInnerClasses() {
ReflectData.get().getSchema(Meta.class);
}
}
| 7,188 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestByteBuffer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.jupiter.api.Assertions.*;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Iterator;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableByteArrayInput;
import org.apache.avro.io.DatumWriter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestByteBuffer {
@TempDir
public File DIR;
static class X {
String name = "";
ByteBuffer content;
}
File content;
@BeforeEach
public void before() throws IOException {
content = new File(DIR.getPath(), "test-content");
try (FileOutputStream out = new FileOutputStream(content)) {
for (int i = 0; i < 100000; i++) {
out.write("hello world\n".getBytes(UTF_8));
}
}
}
@Test
void test() throws Exception {
Schema schema = ReflectData.get().getSchema(X.class);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
writeOneXAsAvro(schema, bout);
X record = readOneXFromAvro(schema, bout);
String expected = getmd5(content);
String actual = getmd5(record.content);
assertEquals(expected, actual, "md5 for result differed from input");
}
private X readOneXFromAvro(Schema schema, ByteArrayOutputStream bout) throws IOException {
SeekableByteArrayInput input = new SeekableByteArrayInput(bout.toByteArray());
ReflectDatumReader<X> datumReader = new ReflectDatumReader<>(schema);
FileReader<X> reader = DataFileReader.openReader(input, datumReader);
Iterator<X> it = reader.iterator();
assertTrue(it.hasNext(), "missing first record");
X record = it.next();
assertFalse(it.hasNext(), "should be no more records - only wrote one out");
return record;
}
private void writeOneXAsAvro(Schema schema, ByteArrayOutputStream bout) throws IOException, FileNotFoundException {
DatumWriter<X> datumWriter = new ReflectDatumWriter<>(schema);
try (DataFileWriter<X> writer = new DataFileWriter<>(datumWriter)) {
writer.create(schema, bout);
X x = new X();
x.name = "xxx";
try (FileInputStream fis = new FileInputStream(content)) {
try (FileChannel channel = fis.getChannel()) {
long contentLength = content.length();
// set the content to be a file channel.
ByteBuffer buffer = channel.map(FileChannel.MapMode.READ_ONLY, 0, contentLength);
x.content = buffer;
writer.append(x);
}
}
writer.flush();
}
}
private String getmd5(File content) throws Exception {
try (FileInputStream fis = new FileInputStream(content)) {
try (FileChannel channel = fis.getChannel()) {
long contentLength = content.length();
ByteBuffer buffer = channel.map(FileChannel.MapMode.READ_ONLY, 0, contentLength);
return getmd5(buffer);
}
}
}
String getmd5(ByteBuffer buffer) throws NoSuchAlgorithmException {
MessageDigest mdEnc = MessageDigest.getInstance("MD5");
mdEnc.reset();
mdEnc.update(buffer);
return new BigInteger(1, mdEnc.digest()).toString(16);
}
}
| 7,189 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/reflect/TestReflectDatumWithAnonymousInstances.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.reflect;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
/**
* https://issues.apache.org/jira/browse/AVRO-1851
*/
public class TestReflectDatumWithAnonymousInstances {
private static Pojo pojo;
@BeforeAll
public static void init() {
// 1. Anonymous instance
pojo = new Pojo() {
{
// 2. Anonymous instance
Person person = new Person() {
{
setAddress("Address");
}
};
setPerson(person);
// 3. Anonymous instance
setTestEnum(TestEnum.V);
}
};
}
// Properly serializes and deserializes a POJO with an enum instance
// (TestEnum#V)
@Test
void handleProperlyEnumInstances() throws IOException {
byte[] output = serialize(pojo);
Pojo deserializedPojo = deserialize(output);
assertEquals(pojo, deserializedPojo);
assertTrue(deserializedPojo.getTestEnum().is_V());
}
private Pojo deserialize(byte[] input) throws IOException {
ByteArrayInputStream inputStream = new ByteArrayInputStream(input);
Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null);
ReflectData reflectData = ReflectData.AllowNull.get();
ReflectDatumReader<Pojo> reflectDatumReader = new ReflectDatumReader<>(reflectData);
Schema schema = reflectData.getSchema(Pojo.class);
reflectDatumReader.setSchema(schema);
return reflectDatumReader.read(null, decoder);
}
private byte[] serialize(Pojo input) throws IOException {
// Reflect data that supports nulls
ReflectData reflectData = ReflectData.AllowNull.get();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null);
ReflectDatumWriter<Pojo> datumWriter = new ReflectDatumWriter<>(Pojo.class, reflectData);
datumWriter.write(input, encoder);
encoder.flush();
return outputStream.toByteArray();
}
private static class Pojo {
private TestEnum testEnum;
private Person person;
public TestEnum getTestEnum() {
return testEnum;
}
public void setTestEnum(TestEnum testEnum) {
this.testEnum = testEnum;
}
public Person getPerson() {
return person;
}
public void setPerson(Person person) {
this.person = person;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
Class<?> thisClass = getClass();
while (thisClass.isAnonymousClass()) {
thisClass = thisClass.getSuperclass();
}
Class<?> oClass = o.getClass();
while (oClass.isAnonymousClass()) {
oClass = oClass.getSuperclass();
}
if (thisClass != oClass)
return false;
Pojo pojo = (Pojo) o;
if (testEnum != pojo.testEnum)
return false;
return person != null ? person.equals(pojo.person) : pojo.person == null;
}
@Override
public int hashCode() {
int result = testEnum != null ? testEnum.hashCode() : 0;
result = 31 * result + (person != null ? person.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Pojo{" + "testEnum=" + testEnum + ", person=" + person + '}';
}
}
private static class Person {
private String name;
private String address;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null)
return false;
Class<?> thisClass = getClass();
while (thisClass.isAnonymousClass()) {
thisClass = thisClass.getSuperclass();
}
Class<?> oClass = o.getClass();
while (oClass.isAnonymousClass()) {
oClass = oClass.getSuperclass();
}
if (thisClass != oClass)
return false;
Person person = (Person) o;
if (name != null ? !name.equals(person.name) : person.name != null)
return false;
return address != null ? address.equals(person.address) : person.address == null;
}
@Override
public int hashCode() {
int result = name != null ? name.hashCode() : 0;
result = 31 * result + (address != null ? address.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "Person{" + "name='" + name + '\'' + ", address='" + address + '\'' + '}';
}
}
enum TestEnum {
V {
@Override
public boolean is_V() {
return true;
}
};
public boolean is_V() {
return false;
}
}
}
| 7,190 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIO.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import org.apache.avro.Schema;
import org.apache.avro.io.TestValidatingIO.Encoding;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.stream.Stream;
public class TestResolvingIO {
@ParameterizedTest
@MethodSource("data2")
public void testIdentical(Encoding encoding, int skip, String jsonWriterSchema, String writerCalls,
String jsonReaderSchema, String readerCalls) throws IOException {
performTest(encoding, skip, jsonWriterSchema, writerCalls, jsonWriterSchema, writerCalls);
}
private static final int COUNT = 10;
@ParameterizedTest
@MethodSource("data2")
public void testCompatible(Encoding encoding, int skip, String jsonWriterSchema, String writerCalls,
String jsonReaderSchema, String readerCalls) throws IOException {
performTest(encoding, skip, jsonWriterSchema, writerCalls, jsonReaderSchema, readerCalls);
}
private void performTest(Encoding encoding, int skipLevel, String jsonWriterSchema, String writerCalls,
String jsonReaderSchema, String readerCalls) throws IOException {
for (int i = 0; i < COUNT; i++) {
testOnce(jsonWriterSchema, writerCalls, jsonReaderSchema, readerCalls, encoding, skipLevel);
}
}
private void testOnce(String jsonWriterSchema, String writerCalls, String jsonReaderSchema, String readerCalls,
Encoding encoding, int skipLevel) throws IOException {
Object[] values = TestValidatingIO.randomValues(writerCalls);
Object[] expected = TestValidatingIO.randomValues(readerCalls);
Schema writerSchema = new Schema.Parser().parse(jsonWriterSchema);
byte[] bytes = TestValidatingIO.make(writerSchema, writerCalls, values, encoding);
Schema readerSchema = new Schema.Parser().parse(jsonReaderSchema);
TestValidatingIO.print(encoding, skipLevel, writerSchema, readerSchema, values, expected);
check(writerSchema, readerSchema, bytes, readerCalls, expected, encoding, skipLevel);
}
static void check(Schema wsc, Schema rsc, byte[] bytes, String calls, Object[] values, Encoding encoding,
int skipLevel) throws IOException {
// TestValidatingIO.dump(bytes);
// System.out.println(new String(bytes, "UTF-8"));
Decoder bvi = null;
switch (encoding) {
case BINARY:
case BLOCKING_BINARY:
bvi = DecoderFactory.get().binaryDecoder(bytes, null);
break;
case JSON:
InputStream in = new ByteArrayInputStream(bytes);
bvi = new JsonDecoder(wsc, in);
break;
}
Decoder vi = new ResolvingDecoder(wsc, rsc, bvi);
String msg = "Error in resolving case: w=" + wsc + ", r=" + rsc;
TestValidatingIO.check(msg, vi, calls, values, skipLevel);
}
public static Stream<Arguments> data2() {
return TestValidatingIO.convertTo2dStream(encodings, skipLevels, testSchemas());
}
static Object[][] encodings = new Object[][] { { Encoding.BINARY }, { Encoding.BLOCKING_BINARY }, { Encoding.JSON } };
static Object[][] skipLevels = new Object[][] { { -1 }, { 0 }, { 1 }, { 2 } };
private static Object[][] testSchemas() {
// The mnemonics are the same as {@link TestValidatingIO#testSchemas}
return new Object[][] { { "\"int\"", "I", "\"float\"", "F" }, { "\"int\"", "I", "\"double\"", "D" },
{ "\"int\"", "I", "\"long\"", "L" }, { "\"long\"", "L", "\"float\"", "F" },
{ "\"long\"", "L", "\"double\"", "D" }, { "\"float\"", "F", "\"double\"", "D" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[]", "{\"type\":\"array\", \"items\": \"long\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[]", "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
{ "{\"type\":\"array\", \"items\": \"long\"}", "[]", "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
{ "{\"type\":\"array\", \"items\": \"float\"}", "[]", "{\"type\":\"array\", \"items\": \"double\"}", "[]" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]", "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]", "{\"type\":\"array\", \"items\": \"double\"}",
"[c1sD]" },
{ "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]", "{\"type\":\"array\", \"items\": \"double\"}",
"[c1sD]" },
{ "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]", "{\"type\":\"array\", \"items\": \"double\"}",
"[c1sD]" },
{ "{\"type\":\"map\", \"values\": \"int\"}", "{}", "{\"type\":\"map\", \"values\": \"long\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"int\"}", "{}", "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"long\"}", "{}", "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"float\"}", "{}", "{\"type\":\"map\", \"values\": \"double\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}", "{\"type\":\"map\", \"values\": \"long\"}",
"{c1sK5L}" },
{ "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}", "{\"type\":\"map\", \"values\": \"double\"}",
"{c1sK5D}" },
{ "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}", "{\"type\":\"map\", \"values\": \"double\"}",
"{c1sK5D}" },
{ "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}", "{\"type\":\"map\", \"values\": \"double\"}",
"{c1sK5D}" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"long\"}]}", "L" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"int\"}]}", "I",
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"double\"}]}", "D" },
// multi-field record with promotions
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+ "{\"name\":\"f1\", \"type\":\"int\"}," + "{\"name\":\"f2\", \"type\":\"float\"},"
+ "{\"name\":\"f3\", \"type\":\"bytes\"}," + "{\"name\":\"f4\", \"type\":\"string\"}]}", "BIFbS",
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f2\", \"type\":\"double\"},"
+ "{\"name\":\"f3\", \"type\":\"string\"}," + "{\"name\":\"f4\", \"type\":\"bytes\"}]}",
"BLDSb" },
{ "[\"int\"]", "U0I", "[\"long\"]", "U0L" }, { "[\"int\"]", "U0I", "[\"double\"]", "U0D" },
{ "[\"long\"]", "U0L", "[\"double\"]", "U0D" }, { "[\"float\"]", "U0F", "[\"double\"]", "U0D" },
{ "\"int\"", "I", "[\"int\"]", "U0I" },
{ "[\"int\"]", "U0I", "\"int\"", "I" }, { "[\"int\"]", "U0I", "\"long\"", "L" },
{ "[\"boolean\", \"int\"]", "U1I", "[\"boolean\", \"long\"]", "U1L" },
{ "[\"boolean\", \"int\"]", "U1I", "[\"long\", \"boolean\"]", "U0L" }, };
}
}
| 7,191 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestJsonDecoder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.avro.AvroTypeException;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
public class TestJsonDecoder {
@Test
void testInt() throws Exception {
checkNumeric("int", 1);
}
@Test
void testLong() throws Exception {
checkNumeric("long", 1L);
}
@Test
void testFloat() throws Exception {
checkNumeric("float", 1.0F);
}
@Test
void testDouble() throws Exception {
checkNumeric("double", 1.0);
}
private void checkNumeric(String type, Object value) throws Exception {
String def = "{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"type\":\"" + type + "\",\"name\":\"n\"}]}";
Schema schema = new Schema.Parser().parse(def);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
String[] records = { "{\"n\":1}", "{\"n\":1.0}" };
for (String record : records) {
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, record);
GenericRecord r = reader.read(null, decoder);
assertEquals(value, r.get("n"));
}
}
@Test
void testFloatPrecision() throws Exception {
String def = "{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"type\":\"float\",\"name\":\"n\"}]}";
Schema schema = new Schema.Parser().parse(def);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);
float value = 33.33000183105469f;
GenericData.Record record = new GenericData.Record(schema);
record.put(0, value);
ByteArrayOutputStream out = new ByteArrayOutputStream();
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
// check the whole float precision is kept.
assertEquals("{\"n\":33.33000183105469}", out.toString());
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, out.toString());
GenericRecord r = reader.read(null, decoder);
assertEquals(value + 0d, ((float) r.get("n")) + 0d);
}
// Ensure that even if the order of fields in JSON is different from the order
// in schema,
// it works.
@Test
void reorderFields() throws Exception {
String w = "{\"type\":\"record\",\"name\":\"R\",\"fields\":" + "[{\"type\":\"long\",\"name\":\"l\"},"
+ "{\"type\":{\"type\":\"array\",\"items\":\"int\"},\"name\":\"a\"}" + "]}";
Schema ws = new Schema.Parser().parse(w);
DecoderFactory df = DecoderFactory.get();
String data = "{\"a\":[1,2],\"l\":100}{\"l\": 200, \"a\":[1,2]}";
JsonDecoder in = df.jsonDecoder(ws, data);
assertEquals(100, in.readLong());
in.skipArray();
assertEquals(200, in.readLong());
in.skipArray();
}
@Test
void testIntWithError() throws IOException {
Schema schema = SchemaBuilder.builder("test").record("example").fields().requiredInt("id").endRecord();
String record = "{ \"id\": -1.2 }";
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(schema, schema);
JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, record);
Assertions.assertThrows(AvroTypeException.class, () -> reader.read(null, decoder));
}
}
| 7,192 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO2.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.stream.Stream;
/**
* This class has more exhaustive tests for Blocking IO. The reason we have both
* TestBlockingIO and TestBlockingIO2 is that with the mnemonics used in
* TestBlockingIO2, it is hard to test skip() operations. and with the test
* infrastructure of TestBlockingIO, it is hard to test enums, unions etc.
*/
public class TestBlockingIO2 {
@ParameterizedTest
@MethodSource("data")
public void testScan(int bufferSize, int skipLevel, String calls) throws IOException {
ByteArrayOutputStream os = new ByteArrayOutputStream();
EncoderFactory factory = new EncoderFactory().configureBlockSize(bufferSize);
Encoder encoder = factory.blockingBinaryEncoder(os, null);
Object[] values = TestValidatingIO.randomValues(calls);
TestValidatingIO.generate(encoder, calls, values);
encoder.flush();
byte[] bb = os.toByteArray();
Decoder decoder = DecoderFactory.get().binaryDecoder(bb, null);
String msg = "Case: { " + bufferSize + ", " + skipLevel + ", \"" + calls + "\" }";
TestValidatingIO.check(msg, decoder, calls, values, -1);
}
public static Stream<Arguments> data() {
return Stream.of(new Object[][] { { 64, 0, "" }, { 64, 0, "S0" }, { 64, 0, "S3" }, { 64, 0, "S64" },
{ 64, 0, "S65" }, { 64, 0, "S100" }, { 64, 1, "[]" }, { 64, 1, "[c1sS0]" }, { 64, 1, "[c1sS3]" },
{ 64, 1, "[c1sS61]" }, { 64, 1, "[c1sS62]" }, { 64, 1, "[c1sS64]" }, { 64, 1, "[c1sS65]" },
{ 64, 1, "[c2sS0sS0]" }, { 64, 1, "[c2sS0sS10]" }, { 64, 1, "[c2sS0sS63]" }, { 64, 1, "[c2sS0sS64]" },
{ 64, 1, "[c2sS0sS65]" }, { 64, 1, "[c2sS10sS0]" }, { 64, 1, "[c2sS10sS10]" }, { 64, 1, "[c2sS10sS51]" },
{ 64, 1, "[c2sS10sS52]" }, { 64, 1, "[c2sS10sS54]" }, { 64, 1, "[c2sS10sS55]" }, { 64, 1, "[c3sS0sS0sS0]" },
{ 64, 1, "[c3sS0sS0sS63]" }, { 64, 1, "[c3sS0sS0sS64]" }, { 64, 1, "[c3sS0sS0sS65]" },
{ 64, 1, "[c3sS10sS20sS10]" }, { 64, 1, "[c3sS10sS20sS23]" }, { 64, 1, "[c3sS10sS20sS24]" },
{ 64, 1, "[c3sS10sS20sS25]" }, { 64, 1, "[c1s[]]" }, { 64, 1, "[c1s[c1sS0]]" }, { 64, 1, "[c1s[c1sS10]]" },
{ 64, 1, "[c2s[c1sS10]s[]]" }, { 64, 1, "[c2s[c1sS59]s[]]" }, { 64, 1, "[c2s[c1sS60]s[]]" },
{ 64, 1, "[c2s[c1sS100]s[]]" }, { 64, 1, "[c2s[c2sS10sS53]s[]]" }, { 64, 1, "[c2s[c2sS10sS54]s[]]" },
{ 64, 1, "[c2s[c2sS10sS55]s[]]" },
{ 64, 1, "[c2s[]s[c1sS0]]" }, { 64, 1, "[c2s[]s[c1sS10]]" }, { 64, 1, "[c2s[]s[c1sS63]]" },
{ 64, 1, "[c2s[]s[c1sS64]]" }, { 64, 1, "[c2s[]s[c1sS65]]" }, { 64, 1, "[c2s[]s[c2sS10sS53]]" },
{ 64, 1, "[c2s[]s[c2sS10sS54]]" }, { 64, 1, "[c2s[]s[c2sS10sS55]]" },
{ 64, 1, "[c1s[c1sS10]]" }, { 64, 1, "[c1s[c1sS62]]" }, { 64, 1, "[c1s[c1sS63]]" }, { 64, 1, "[c1s[c1sS64]]" },
{ 64, 1, "[c1s[c2sS10sS10]]" }, { 64, 1, "[c1s[c2sS10sS52]]" }, { 64, 1, "[c1s[c2sS10sS53]]" },
{ 64, 1, "[c1s[c2sS10sS54]]" },
{ 64, 1, "[c1s[c1s[c1sS10]]]" }, { 64, 1, "[c1s[c1s[c1sS62]]]" }, { 64, 1, "[c1s[c1s[c1sS63]]]" },
{ 64, 1, "[c1s[c1s[c1sS64]]]" },
{ 64, 1, "[c1s[c1s[c2sS10sS10]]]" }, { 64, 1, "[c1s[c1s[c2sS10sS52]]]" }, { 64, 1, "[c1s[c1s[c2sS10sS53]]]" },
{ 64, 1, "[c1s[c1s[c2sS10sS54]]]" },
{ 64, 1, "[c1s[c2sS10s[c1sS10]]]" }, { 64, 1, "[c1s[c2sS10s[c1sS52]]]" }, { 64, 1, "[c1s[c2sS10s[c1sS53]]]" },
{ 64, 1, "[c1s[c2sS10s[c1sS54]]]" },
{ 64, 1, "{}" }, { 64, 1, "{c1sK5S1}" }, { 64, 1, "{c1sK5[]}" }, { 100, 1, "{c1sK5[]}" },
{ 100, 1, "{c1sK5[c1sS10]}" },
{ 100, 1, "{c1sK5e10}" }, { 100, 1, "{c1sK5U1S10}" }, { 100, 1, "{c1sK5f10S10}" }, { 100, 1, "{c1sK5NS10}" },
{ 100, 1, "{c1sK5BS10}" }, { 100, 1, "{c1sK5IS10}" }, { 100, 1, "{c1sK5LS10}" }, { 100, 1, "{c1sK5FS10}" },
{ 100, 1, "{c1sK5DS10}" }, }).map(Arguments::of);
}
}
| 7,193 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingIO.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayDeque;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestBlockingIO {
private static class Tests {
private final JsonParser parser;
private final Decoder input;
private final int depth;
public Tests(int bufferSize, int depth, String input) throws IOException {
this.depth = depth;
byte[] in = input.getBytes(StandardCharsets.UTF_8);
JsonFactory f = new JsonFactory();
JsonParser p = f.createParser(new ByteArrayInputStream(input.getBytes(StandardCharsets.UTF_8)));
ByteArrayOutputStream os = new ByteArrayOutputStream();
EncoderFactory factory = new EncoderFactory().configureBlockSize(bufferSize);
Encoder cos = factory.blockingBinaryEncoder(os, null);
serialize(cos, p, os);
cos.flush();
byte[] bb = os.toByteArray();
// dump(bb);
this.input = DecoderFactory.get().binaryDecoder(bb, null);
this.parser = f.createParser(new ByteArrayInputStream(in));
}
public void scan() throws IOException {
ArrayDeque<S> countStack = new ArrayDeque<>();
long count = 0;
while (parser.nextToken() != null) {
switch (parser.getCurrentToken()) {
case END_ARRAY:
assertEquals(0, count);
assertTrue(countStack.peek().isArray);
count = countStack.pop().count;
break;
case END_OBJECT:
assertEquals(0, count);
assertFalse(countStack.peek().isArray);
count = countStack.pop().count;
break;
case START_ARRAY:
countStack.push(new S(count, true));
count = input.readArrayStart();
continue;
case VALUE_STRING: {
String s = parser.getText();
int n = s.getBytes(StandardCharsets.UTF_8).length;
checkString(s, input, n);
break;
}
case FIELD_NAME: {
String s = parser.getCurrentName();
int n = s.getBytes(StandardCharsets.UTF_8).length;
checkString(s, input, n);
continue;
}
case START_OBJECT:
countStack.push(new S(count, false));
count = input.readMapStart();
if (count < 0) {
count = -count;
input.readLong(); // byte count
}
continue;
default:
throw new RuntimeException("Unsupported: " + parser.getCurrentToken());
}
count--;
if (count == 0) {
count = countStack.peek().isArray ? input.arrayNext() : input.mapNext();
}
}
}
public void skip(int skipLevel) throws IOException {
ArrayDeque<S> countStack = new ArrayDeque<>();
long count = 0;
while (parser.nextToken() != null) {
switch (parser.getCurrentToken()) {
case END_ARRAY:
// assertEquals(0, count);
assertTrue(countStack.peek().isArray);
count = countStack.pop().count;
break;
case END_OBJECT:
// assertEquals(0, count);
assertFalse(countStack.peek().isArray);
count = countStack.pop().count;
break;
case START_ARRAY:
if (countStack.size() == skipLevel) {
skipArray(parser, input, depth - skipLevel);
break;
} else {
countStack.push(new S(count, true));
count = input.readArrayStart();
continue;
}
case VALUE_STRING: {
if (countStack.size() == skipLevel) {
input.skipBytes();
} else {
String s = parser.getText();
int n = s.getBytes(StandardCharsets.UTF_8).length;
checkString(s, input, n);
}
break;
}
case FIELD_NAME: {
String s = parser.getCurrentName();
int n = s.getBytes(StandardCharsets.UTF_8).length;
checkString(s, input, n);
continue;
}
case START_OBJECT:
if (countStack.size() == skipLevel) {
skipMap(parser, input, depth - skipLevel);
break;
} else {
countStack.push(new S(count, false));
count = input.readMapStart();
if (count < 0) {
count = -count;
input.readLong(); // byte count
}
continue;
}
default:
throw new RuntimeException("Unsupported: " + parser.getCurrentToken());
}
count--;
if (count == 0) {
count = countStack.peek().isArray ? input.arrayNext() : input.mapNext();
}
}
}
}
protected static void dump(byte[] bb) {
int col = 0;
for (byte b : bb) {
if (col % 16 == 0) {
System.out.println();
}
col++;
System.out.print(Integer.toHexString(b & 0xff) + " ");
}
System.out.println();
}
private static class S {
public final long count;
public final boolean isArray;
public S(long count, boolean isArray) {
this.count = count;
this.isArray = isArray;
}
}
@ParameterizedTest
@MethodSource("data")
public void testScan(int size, int depth, String input) throws IOException {
Tests t = new Tests(size, depth, input);
t.scan();
}
@ParameterizedTest
@MethodSource("data")
public void testSkip1(int size, int depth, String input) throws IOException {
testSkip(size, depth, input, 0);
}
@ParameterizedTest
@MethodSource("data")
public void testSkip2(int size, int depth, String input) throws IOException {
testSkip(size, depth, input, 1);
}
@ParameterizedTest
@MethodSource("data")
public void testSkip3(int size, int depth, String input) throws IOException {
testSkip(size, depth, input, 2);
}
private void testSkip(int bufferSize, int depth, String input, int skipLevel) throws IOException {
Tests t = new Tests(bufferSize, depth, input);
t.skip(skipLevel);
}
private static void skipMap(JsonParser parser, Decoder input, int depth) throws IOException {
for (long l = input.skipMap(); l != 0; l = input.skipMap()) {
for (long i = 0; i < l; i++) {
if (depth == 0) {
input.skipBytes();
} else {
skipArray(parser, input, depth - 1);
}
}
}
parser.skipChildren();
}
private static void skipArray(JsonParser parser, Decoder input, int depth) throws IOException {
for (long l = input.skipArray(); l != 0; l = input.skipArray()) {
for (long i = 0; i < l; i++) {
if (depth == 1) {
input.skipBytes();
} else {
skipArray(parser, input, depth - 1);
}
}
}
parser.skipChildren();
}
private static void checkString(String s, Decoder input, int n) throws IOException {
ByteBuffer buf = input.readBytes(null);
assertEquals(n, buf.remaining());
String s2 = new String(buf.array(), buf.position(), buf.remaining(), StandardCharsets.UTF_8);
assertEquals(s, s2);
}
private static void serialize(Encoder cos, JsonParser p, ByteArrayOutputStream os) throws IOException {
boolean[] isArray = new boolean[100];
int[] counts = new int[100];
int stackTop = -1;
while (p.nextToken() != null) {
switch (p.getCurrentToken()) {
case END_ARRAY:
assertTrue(isArray[stackTop]);
cos.writeArrayEnd();
stackTop--;
break;
case END_OBJECT:
assertFalse(isArray[stackTop]);
cos.writeMapEnd();
stackTop--;
break;
case START_ARRAY:
if (stackTop >= 0 && isArray[stackTop]) {
cos.setItemCount(1);
cos.startItem();
counts[stackTop]++;
}
cos.writeArrayStart();
isArray[++stackTop] = true;
counts[stackTop] = 0;
continue;
case VALUE_STRING:
if (stackTop >= 0 && isArray[stackTop]) {
cos.setItemCount(1);
cos.startItem();
counts[stackTop]++;
}
byte[] bb = p.getText().getBytes(StandardCharsets.UTF_8);
cos.writeBytes(bb);
break;
case START_OBJECT:
if (stackTop >= 0 && isArray[stackTop]) {
cos.setItemCount(1);
cos.startItem();
counts[stackTop]++;
}
cos.writeMapStart();
isArray[++stackTop] = false;
counts[stackTop] = 0;
continue;
case FIELD_NAME:
cos.setItemCount(1);
cos.startItem();
counts[stackTop]++;
cos.writeBytes(p.getCurrentName().getBytes(StandardCharsets.UTF_8));
break;
default:
throw new RuntimeException("Unsupported: " + p.getCurrentToken());
}
}
}
public static Stream<Arguments> data() {
return Stream.of(new Object[][] { { 64, 0, "" }, { 64, 0, jss(0, 'a') }, { 64, 0, jss(3, 'a') },
{ 64, 0, jss(64, 'a') }, { 64, 0, jss(65, 'a') }, { 64, 0, jss(100, 'a') }, { 64, 1, "[]" },
{ 64, 1, "[" + jss(0, 'a') + "]" }, { 64, 1, "[" + jss(3, 'a') + "]" }, { 64, 1, "[" + jss(61, 'a') + "]" },
{ 64, 1, "[" + jss(62, 'a') + "]" }, { 64, 1, "[" + jss(64, 'a') + "]" }, { 64, 1, "[" + jss(65, 'a') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(0, '0') + "]" }, { 64, 1, "[" + jss(0, 'a') + "," + jss(10, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(63, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(64, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(65, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(0, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(10, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(51, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(52, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(54, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(55, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(0, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(63, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(64, '0') + "]" },
{ 64, 1, "[" + jss(0, 'a') + "," + jss(0, 'a') + "," + jss(65, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(10, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(23, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(24, '0') + "]" },
{ 64, 1, "[" + jss(10, 'a') + "," + jss(20, 'A') + "," + jss(25, '0') + "]" }, { 64, 2, "[[]]" },
{ 64, 2, "[[" + jss(0, 'a') + "], []]" }, { 64, 2, "[[" + jss(10, 'a') + "], []]" },
{ 64, 2, "[[" + jss(59, 'a') + "], []]" }, { 64, 2, "[[" + jss(60, 'a') + "], []]" },
{ 64, 2, "[[" + jss(100, 'a') + "], []]" }, { 64, 2, "[[" + jss(10, '0') + ", " + jss(53, 'a') + "], []]" },
{ 64, 2, "[[" + jss(10, '0') + ", " + jss(54, 'a') + "], []]" },
{ 64, 2, "[[" + jss(10, '0') + ", " + jss(55, 'a') + "], []]" },
{ 64, 2, "[[], [" + jss(0, 'a') + "]]" }, { 64, 2, "[[], [" + jss(10, 'a') + "]]" },
{ 64, 2, "[[], [" + jss(63, 'a') + "]]" }, { 64, 2, "[[], [" + jss(64, 'a') + "]]" },
{ 64, 2, "[[], [" + jss(65, 'a') + "]]" }, { 64, 2, "[[], [" + jss(10, '0') + ", " + jss(53, 'a') + "]]" },
{ 64, 2, "[[], [" + jss(10, '0') + ", " + jss(54, 'a') + "]]" },
{ 64, 2, "[[], [" + jss(10, '0') + ", " + jss(55, 'a') + "]]" },
{ 64, 2, "[[" + jss(10, '0') + "]]" }, { 64, 2, "[[" + jss(62, '0') + "]]" },
{ 64, 2, "[[" + jss(63, '0') + "]]" }, { 64, 2, "[[" + jss(64, '0') + "]]" },
{ 64, 2, "[[" + jss(10, 'a') + ", " + jss(10, '0') + "]]" },
{ 64, 2, "[[" + jss(10, 'a') + ", " + jss(52, '0') + "]]" },
{ 64, 2, "[[" + jss(10, 'a') + ", " + jss(53, '0') + "]]" },
{ 64, 2, "[[" + jss(10, 'a') + ", " + jss(54, '0') + "]]" }, { 64, 3, "[[[" + jss(10, '0') + "]]]" },
{ 64, 3, "[[[" + jss(62, '0') + "]]]" }, { 64, 3, "[[[" + jss(63, '0') + "]]]" },
{ 64, 3, "[[[" + jss(64, '0') + "]]]" }, { 64, 3, "[[[" + jss(10, 'a') + ", " + jss(10, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + ", " + jss(52, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + ", " + jss(53, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(54, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(10, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(52, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(53, '0') + "]]]" },
{ 64, 3, "[[[" + jss(10, 'a') + "], [" + jss(54, '0') + "]]]" },
{ 64, 2, "[[\"p\"], [\"mn\"]]" }, { 64, 2, "[[\"pqr\"], [\"mn\"]]" },
{ 64, 2, "[[\"pqrstuvwxyz\"], [\"mn\"]]" }, { 64, 2, "[[\"abc\", \"pqrstuvwxyz\"], [\"mn\"]]" },
{ 64, 2, "[[\"mn\"], [\"\"]]" }, { 64, 2, "[[\"mn\"], \"abc\"]" }, { 64, 2, "[[\"mn\"], \"abcdefghijk\"]" },
{ 64, 2, "[[\"mn\"], \"pqr\", \"abc\"]" }, { 64, 2, "[[\"mn\"]]" }, { 64, 2, "[[\"p\"], [\"mnopqrstuvwx\"]]" },
{ 64, 2, "[[\"pqr\"], [\"mnopqrstuvwx\"]]" }, { 64, 2, "[[\"pqrstuvwxyz\"], [\"mnopqrstuvwx\"]]" },
{ 64, 2, "[[\"abc\"], \"pqrstuvwxyz\", [\"mnopqrstuvwx\"]]" }, { 64, 2, "[[\"mnopqrstuvwx\"], [\"\"]]" },
{ 64, 2, "[[\"mnopqrstuvwx\"], [\"abc\"]]" }, { 64, 2, "[[\"mnopqrstuvwx\"], [\"abcdefghijk\"]]" },
{ 64, 2, "[[\"mnopqrstuvwx\"], [\"pqr\", \"abc\"]]" }, { 100, 2, "[[\"pqr\", \"mnopqrstuvwx\"]]" },
{ 100, 2, "[[\"pqr\", \"ab\", \"mnopqrstuvwx\"]]" }, { 64, 2, "[[[\"pqr\"]], [[\"ab\"], [\"mnopqrstuvwx\"]]]" },
{ 64, 1, "{}" }, { 64, 1, "{\"n\": \"v\"}" }, { 64, 1, "{\"n1\": \"v\", \"n2\": []}" },
{ 100, 1, "{\"n1\": \"v\", \"n2\": []}" }, { 100, 1, "{\"n1\": \"v\", \"n2\": [\"abc\"]}" }, })
.map(Arguments::of);
}
/**
* Returns a new JSON String {@code n} bytes long with consecutive characters
* starting with {@code c}.
*/
private static String jss(final int n, char c) {
char[] cc = new char[n + 2];
cc[0] = cc[n + 1] = '"';
for (int i = 1; i < n + 1; i++) {
if (c == 'Z') {
c = 'a';
} else if (c == 'z') {
c = '0';
} else if (c == '9') {
c = 'A';
} else {
c++;
}
cc[i] = c;
}
return new String(cc);
}
}
| 7,194 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestValidatingIO.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import org.apache.avro.Schema;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class TestValidatingIO {
enum Encoding {
BINARY, BLOCKING_BINARY, JSON,
}
private static final Logger LOG = LoggerFactory.getLogger(TestValidatingIO.class);
private static final int COUNT = 1;
@ParameterizedTest
@MethodSource("data")
public void testMain(Encoding enc, int skip, String js, String cls) throws IOException {
for (int i = 0; i < COUNT; i++) {
testOnce(new Schema.Parser().parse(js), cls, skip, enc);
}
}
private void testOnce(Schema schema, String calls, int skipLevel, Encoding encoding) throws IOException {
Object[] values = randomValues(calls);
print(encoding, skipLevel, schema, schema, values, values);
byte[] bytes = make(schema, calls, values, encoding);
check(schema, bytes, calls, values, skipLevel, encoding);
}
public static byte[] make(Schema sc, String calls, Object[] values, Encoding encoding) throws IOException {
EncoderFactory factory = EncoderFactory.get();
ByteArrayOutputStream ba = new ByteArrayOutputStream();
Encoder bvo = null;
switch (encoding) {
case BINARY:
bvo = factory.binaryEncoder(ba, null);
break;
case BLOCKING_BINARY:
bvo = factory.blockingBinaryEncoder(ba, null);
break;
case JSON:
bvo = factory.jsonEncoder(sc, ba);
break;
}
Encoder vo = factory.validatingEncoder(sc, bvo);
generate(vo, calls, values);
vo.flush();
return ba.toByteArray();
}
public static class InputScanner {
private final char[] chars;
private int cpos = 0;
public InputScanner(char[] chars) {
this.chars = chars;
}
public boolean next() {
if (cpos < chars.length) {
cpos++;
}
return cpos != chars.length;
}
public char cur() {
return chars[cpos];
}
public boolean isDone() {
return cpos == chars.length;
}
}
public static void generate(Encoder vw, String calls, Object[] values) throws IOException {
InputScanner cs = new InputScanner(calls.toCharArray());
int p = 0;
while (!cs.isDone()) {
char c = cs.cur();
cs.next();
switch (c) {
case 'N':
vw.writeNull();
break;
case 'B':
boolean b = (Boolean) values[p++];
vw.writeBoolean(b);
break;
case 'I':
int ii = (Integer) values[p++];
vw.writeInt(ii);
break;
case 'L':
long l = (Long) values[p++];
vw.writeLong(l);
break;
case 'F':
float f = (Float) values[p++];
vw.writeFloat(f);
break;
case 'D':
double d = (Double) values[p++];
vw.writeDouble(d);
break;
case 'S': {
extractInt(cs);
String s = (String) values[p++];
vw.writeString(new Utf8(s));
break;
}
case 'K': {
extractInt(cs);
String s = (String) values[p++];
vw.writeString(s);
break;
}
case 'b': {
extractInt(cs);
byte[] bb = (byte[]) values[p++];
vw.writeBytes(bb);
break;
}
case 'f': {
extractInt(cs);
byte[] bb = (byte[]) values[p++];
vw.writeFixed(bb);
break;
}
case 'e': {
int e = extractInt(cs);
vw.writeEnum(e);
break;
}
case '[':
vw.writeArrayStart();
break;
case ']':
vw.writeArrayEnd();
break;
case '{':
vw.writeMapStart();
break;
case '}':
vw.writeMapEnd();
break;
case 'c':
vw.setItemCount(extractInt(cs));
break;
case 's':
vw.startItem();
break;
case 'U': {
vw.writeIndex(extractInt(cs));
break;
}
default:
Assertions.fail();
break;
}
}
}
public static Object[] randomValues(String calls) {
Random r = new Random(0L);
InputScanner cs = new InputScanner(calls.toCharArray());
List<Object> result = new ArrayList<>();
while (!cs.isDone()) {
char c = cs.cur();
cs.next();
switch (c) {
case 'N':
break;
case 'B':
result.add(r.nextBoolean());
break;
case 'I':
result.add(r.nextInt());
break;
case 'L':
result.add((long) r.nextInt());
break;
case 'F':
result.add((float) r.nextInt());
break;
case 'D':
result.add((double) r.nextInt());
break;
case 'S':
case 'K':
result.add(nextString(r, extractInt(cs)));
break;
case 'b':
case 'f':
result.add(nextBytes(r, extractInt(cs)));
break;
case 'e':
case 'c':
case 'U':
extractInt(cs);
case '[':
case ']':
case '{':
case '}':
case 's':
break;
default:
Assertions.fail();
break;
}
}
return result.toArray();
}
private static int extractInt(InputScanner sc) {
int r = 0;
while (!sc.isDone()) {
if (Character.isDigit(sc.cur())) {
r = r * 10 + sc.cur() - '0';
sc.next();
} else {
break;
}
}
return r;
}
private static byte[] nextBytes(Random r, int length) {
byte[] bb = new byte[length];
r.nextBytes(bb);
return bb;
}
private static String nextString(Random r, int length) {
char[] cc = new char[length];
for (int i = 0; i < length; i++) {
cc[i] = (char) ('A' + r.nextInt(26));
}
return new String(cc);
}
private static void check(Schema sc, byte[] bytes, String calls, Object[] values, final int skipLevel,
Encoding encoding) throws IOException {
// dump(bytes);
// System.out.println(new String(bytes, "UTF-8"));
Decoder bvi = null;
switch (encoding) {
case BINARY:
case BLOCKING_BINARY:
bvi = DecoderFactory.get().binaryDecoder(bytes, null);
break;
case JSON:
InputStream in = new ByteArrayInputStream(bytes);
bvi = new JsonDecoder(sc, in);
}
Decoder vi = new ValidatingDecoder(sc, bvi);
String msg = "Error in validating case: " + sc;
check(msg, vi, calls, values, skipLevel);
}
public static void check(String msg, Decoder vi, String calls, Object[] values, final int skipLevel)
throws IOException {
InputScanner cs = new InputScanner(calls.toCharArray());
int p = 0;
int level = 0;
long[] counts = new long[100];
boolean[] isArray = new boolean[100];
boolean[] isEmpty = new boolean[100];
while (!cs.isDone()) {
final char c = cs.cur();
cs.next();
try {
switch (c) {
case 'N':
vi.readNull();
break;
case 'B':
Assertions.assertEquals(values[p++], vi.readBoolean(), msg);
break;
case 'I':
Assertions.assertEquals(values[p++], vi.readInt(), msg);
break;
case 'L':
Assertions.assertEquals(values[p++], vi.readLong(), msg);
break;
case 'F':
if (!(values[p] instanceof Float))
Assertions.fail();
float f = (Float) values[p++];
Assertions.assertEquals(f, vi.readFloat(), Math.abs(f / 1000));
break;
case 'D':
if (!(values[p] instanceof Double))
Assertions.fail();
double d = (Double) values[p++];
Assertions.assertEquals(d, vi.readDouble(), Math.abs(d / 1000), msg);
break;
case 'S':
extractInt(cs);
if (level == skipLevel) {
vi.skipString();
p++;
} else {
String s = (String) values[p++];
Assertions.assertEquals(new Utf8(s), vi.readString(null), msg);
}
break;
case 'K':
extractInt(cs);
if (level == skipLevel) {
vi.skipString();
p++;
} else {
String s = (String) values[p++];
Assertions.assertEquals(new Utf8(s), vi.readString(null), msg);
}
break;
case 'b':
extractInt(cs);
if (level == skipLevel) {
vi.skipBytes();
p++;
} else {
byte[] bb = (byte[]) values[p++];
ByteBuffer bb2 = vi.readBytes(null);
byte[] actBytes = new byte[bb2.remaining()];
System.arraycopy(bb2.array(), bb2.position(), actBytes, 0, bb2.remaining());
Assertions.assertArrayEquals(bb, actBytes, msg);
}
break;
case 'f': {
int len = extractInt(cs);
if (level == skipLevel) {
vi.skipFixed(len);
p++;
} else {
byte[] bb = (byte[]) values[p++];
byte[] actBytes = new byte[len];
vi.readFixed(actBytes);
Assertions.assertArrayEquals(bb, actBytes, msg);
}
}
break;
case 'e': {
int e = extractInt(cs);
if (level == skipLevel) {
vi.readEnum();
} else {
Assertions.assertEquals(e, vi.readEnum(), msg);
}
}
break;
case '[':
if (level == skipLevel) {
p += skip(msg, cs, vi, true);
break;
} else {
level++;
counts[level] = vi.readArrayStart();
isArray[level] = true;
isEmpty[level] = counts[level] == 0;
continue;
}
case '{':
if (level == skipLevel) {
p += skip(msg, cs, vi, false);
break;
} else {
level++;
counts[level] = vi.readMapStart();
isArray[level] = false;
isEmpty[level] = counts[level] == 0;
continue;
}
case ']':
Assertions.assertEquals(0, counts[level], msg);
if (!isEmpty[level]) {
Assertions.assertEquals(0, vi.arrayNext(), msg);
}
level--;
break;
case '}':
Assertions.assertEquals(0, counts[level]);
if (!isEmpty[level]) {
Assertions.assertEquals(0, vi.mapNext(), msg);
}
level--;
break;
case 's':
if (counts[level] == 0) {
if (isArray[level]) {
counts[level] = vi.arrayNext();
} else {
counts[level] = vi.mapNext();
}
}
counts[level]--;
continue;
case 'c':
extractInt(cs);
continue;
case 'U': {
int idx = extractInt(cs);
Assertions.assertEquals(idx, vi.readIndex(), msg);
continue;
}
case 'R':
((ResolvingDecoder) vi).readFieldOrder();
continue;
default:
Assertions.fail(msg);
}
} catch (RuntimeException e) {
throw new RuntimeException(msg, e);
}
}
Assertions.assertEquals(values.length, p, msg);
}
private static int skip(String msg, InputScanner cs, Decoder vi, boolean isArray) throws IOException {
final char end = isArray ? ']' : '}';
if (isArray) {
Assertions.assertEquals(0, vi.skipArray(), msg);
} else if (end == '}') {
Assertions.assertEquals(0, vi.skipMap(), msg);
}
int level = 0;
int p = 0;
while (!cs.isDone()) {
char c = cs.cur();
cs.next();
switch (c) {
case '[':
case '{':
++level;
break;
case ']':
case '}':
if (c == end && level == 0) {
return p;
}
level--;
break;
case 'B':
case 'I':
case 'L':
case 'F':
case 'D':
case 'S':
case 'K':
case 'b':
case 'f':
case 'e':
p++;
break;
}
}
throw new RuntimeException("Don't know how to skip");
}
public static Stream<Arguments> data() {
return convertTo2dStream(encodings, skipLevels, testSchemas());
}
private static Object[][] encodings = new Object[][] { { Encoding.BINARY }, { Encoding.BLOCKING_BINARY },
{ Encoding.JSON } };
private static Object[][] skipLevels = new Object[][] { { -1 }, { 0 }, { 1 }, { 2 }, };
public static Stream<Arguments> convertTo2dStream(final Object[][]... values) {
Iterator<Object[]> iter = cartesian(values);
Stream<Object[]> stream = StreamSupport.stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED),
false);
return stream.map(Arguments::of);
}
/**
* Returns the Cartesian product of input sequences.
*/
public static Iterator<Object[]> cartesian(final Object[][]... values) {
return new Iterator<Object[]>() {
private int[] pos = new int[values.length];
@Override
public boolean hasNext() {
return pos[0] < values[0].length;
}
@Override
public Object[] next() {
Object[][] v = new Object[values.length][];
for (int i = 0; i < v.length; i++) {
v[i] = values[i][pos[i]];
}
for (int i = v.length - 1; i >= 0; i--) {
if (++pos[i] == values[i].length) {
if (i != 0) {
pos[i] = 0;
}
} else {
break;
}
}
return concat(v);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Concatenates the input sequences in order and forms a longer sequence.
*/
public static Object[] concat(Object[]... oo) {
int l = 0;
for (Object[] o : oo) {
l += o.length;
}
Object[] result = new Object[l];
l = 0;
for (Object[] o : oo) {
System.arraycopy(o, 0, result, l, o.length);
l += o.length;
}
return result;
}
/**
* Pastes incoming tables to form a wider table. All incoming tables should be
* of same height.
*/
static Object[][] paste(Object[][]... in) {
Object[][] result = new Object[in[0].length][];
Object[][] cc = new Object[in.length][];
for (int i = 0; i < result.length; i++) {
for (int j = 0; j < cc.length; j++) {
cc[j] = in[j][i];
}
result[i] = concat(cc);
}
return result;
}
public static Object[][] testSchemas() {
/**
* The first argument is a schema. The second one is a sequence of (single
* character) mnemonics: N null B boolean I int L long F float D double K
* followed by integer - key-name (and its length) in a map S followed by
* integer - string and its length b followed by integer - bytes and length f
* followed by integer - fixed and length c Number of items to follow in an
* array/map. U followed by integer - Union and its branch e followed by integer
* - Enum and its value [ Start array ] End array { Start map } End map s start
* item
*/
return new Object[][] { { "\"null\"", "N" }, { "\"boolean\"", "B" }, { "\"int\"", "I" }, { "\"long\"", "L" },
{ "\"float\"", "F" }, { "\"double\"", "D" }, { "\"string\"", "S0" }, { "\"string\"", "S10" },
{ "\"bytes\"", "b0" }, { "\"bytes\"", "b10" }, { "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 1}", "f1" },
{ "{\"type\":\"fixed\", \"name\":\"fi\", \"size\": 10}", "f10" },
{ "{\"type\":\"enum\", \"name\":\"en\", \"symbols\":[\"v1\", \"v2\"]}", "e1" },
{ "{\"type\":\"array\", \"items\": \"boolean\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[]", }, { "{\"type\":\"array\", \"items\": \"long\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"float\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"double\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"string\"}", "[]", },
{ "{\"type\":\"array\", \"items\": \"bytes\"}", "[]", },
{ "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", " + "\"name\":\"fi\", \"size\": 10}}", "[]" },
{ "{\"type\":\"array\", \"items\": \"boolean\"}", "[c1sB]" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[c1sI]" },
{ "{\"type\":\"array\", \"items\": \"long\"}", "[c1sL]" },
{ "{\"type\":\"array\", \"items\": \"float\"}", "[c1sF]" },
{ "{\"type\":\"array\", \"items\": \"double\"}", "[c1sD]" },
{ "{\"type\":\"array\", \"items\": \"string\"}", "[c1sS10]" },
{ "{\"type\":\"array\", \"items\": \"bytes\"}", "[c1sb10]" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[c1sIc1sI]" },
{ "{\"type\":\"array\", \"items\": \"int\"}", "[c2sIsI]" },
{ "{\"type\":\"array\", \"items\":{\"type\":\"fixed\", " + "\"name\":\"fi\", \"size\": 10}}", "[c2sf10sf10]" },
{ "{\"type\":\"map\", \"values\": \"boolean\"}", "{}" }, { "{\"type\":\"map\", \"values\": \"int\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"long\"}", "{}" }, { "{\"type\":\"map\", \"values\": \"float\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"double\"}", "{}" }, { "{\"type\":\"map\", \"values\": \"string\"}", "{}" },
{ "{\"type\":\"map\", \"values\": \"bytes\"}", "{}" },
{ "{\"type\":\"map\", \"values\": " + "{\"type\":\"array\", \"items\":\"int\"}}", "{}" },
{ "{\"type\":\"map\", \"values\": \"boolean\"}", "{c1sK5B}" },
{ "{\"type\":\"map\", \"values\": \"int\"}", "{c1sK5I}" },
{ "{\"type\":\"map\", \"values\": \"long\"}", "{c1sK5L}" },
{ "{\"type\":\"map\", \"values\": \"float\"}", "{c1sK5F}" },
{ "{\"type\":\"map\", \"values\": \"double\"}", "{c1sK5D}" },
{ "{\"type\":\"map\", \"values\": \"string\"}", "{c1sK5S10}" },
{ "{\"type\":\"map\", \"values\": \"bytes\"}", "{c1sK5b10}" },
{ "{\"type\":\"map\", \"values\": " + "{\"type\":\"array\", \"items\":\"int\"}}", "{c1sK5[c3sIsIsI]}" },
{ "{\"type\":\"map\", \"values\": \"boolean\"}", "{c1sK5Bc2sK5BsK5B}" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"boolean\"}]}", "B" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"int\"}]}", "I" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"long\"}]}", "L" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"float\"}]}", "F" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"double\"}]}", "D" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"string\"}]}", "S10" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f\", \"type\":\"bytes\"}]}", "b10" },
// multi-field records
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f1\", \"type\":\"int\"},"
+ "{\"name\":\"f2\", \"type\":\"double\"}," + "{\"name\":\"f3\", \"type\":\"string\"}]}", "IDS10" },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f0\", \"type\":\"null\"},"
+ "{\"name\":\"f1\", \"type\":\"boolean\"}," + "{\"name\":\"f2\", \"type\":\"int\"},"
+ "{\"name\":\"f3\", \"type\":\"long\"}," + "{\"name\":\"f4\", \"type\":\"float\"},"
+ "{\"name\":\"f5\", \"type\":\"double\"}," + "{\"name\":\"f6\", \"type\":\"string\"},"
+ "{\"name\":\"f7\", \"type\":\"bytes\"}]}", "NBILFDS10b25" },
// record of records
{ "{\"type\":\"record\",\"name\":\"outer\",\"fields\":[" + "{\"name\":\"f1\", \"type\":{\"type\":\"record\", "
+ "\"name\":\"inner\", \"fields\":[" + "{\"name\":\"g1\", \"type\":\"int\"}, {\"name\":\"g2\", "
+ "\"type\":\"double\"}]}}," + "{\"name\":\"f2\", \"type\":\"string\"},"
+ "{\"name\":\"f3\", \"type\":\"inner\"}]}", "IDS10ID" },
// record with array
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f1\", \"type\":\"long\"},"
+ "{\"name\":\"f2\", " + "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}", "L[c1sI]" },
// record with map
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f1\", \"type\":\"long\"},"
+ "{\"name\":\"f2\", " + "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}", "L{c1sK5I}" },
// array of records
{ "{\"type\":\"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f2\", \"type\":\"null\"}]}}", "[c2sLNsLN]" },
{ "{\"type\":\"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f2\", "
+ "\"type\":{\"type\":\"array\", \"items\":\"int\"}}]}}", "[c2sL[c1sI]sL[c2sIsI]]" },
{ "{\"type\":\"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f2\", "
+ "\"type\":{\"type\":\"map\", \"values\":\"int\"}}]}}", "[c2sL{c1sK5I}sL{c2sK5IsK5I}]" },
{ "{\"type\":\"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f2\", " + "\"type\":[\"null\", \"int\"]}]}}",
"[c2sLU0NsLU1I]" },
{ "[\"boolean\"]", "U0B" }, { "[\"int\"]", "U0I" }, { "[\"long\"]", "U0L" }, { "[\"float\"]", "U0F" },
{ "[\"double\"]", "U0D" }, { "[\"string\"]", "U0S10" }, { "[\"bytes\"]", "U0b10" },
{ "[\"null\", \"int\"]", "U0N" }, { "[\"boolean\", \"int\"]", "U0B" }, { "[\"boolean\", \"int\"]", "U1I" },
{ "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]", "U0B" },
{ "[\"boolean\", {\"type\":\"array\", \"items\":\"int\"} ]", "U1[c1sI]" },
// Recursion
{ "{\"type\": \"record\", \"name\": \"Node\", \"fields\": [" + "{\"name\":\"label\", \"type\":\"string\"},"
+ "{\"name\":\"children\", \"type\":" + "{\"type\": \"array\", \"items\": \"Node\" }}]}", "S10[c1sS10[]]" },
{ "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+ "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+ "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": [" + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+ "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}", "U0N" },
{ "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+ "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+ "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": [" + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+ "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}", "U1S10" },
{ "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+ "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+ "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": [" + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+ "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}", "U2U1S10U0N" },
// Deep recursion
{ "{\"type\": \"record\", \"name\": \"Node\", \"fields\": [" + "{\"name\":\"children\", \"type\":"
+ "{\"type\": \"array\", \"items\": \"Node\" }}]}",
"[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[c1s[]]]]]]]]]]]]" },
};
}
static void dump(byte[] bb) {
int col = 0;
for (byte b : bb) {
if (col % 16 == 0) {
System.out.println();
}
col++;
System.out.print(Integer.toHexString(b & 0xff) + " ");
}
System.out.println();
}
static void print(Encoding encoding, int skipLevel, Schema writerSchema, Schema readerSchema, Object[] writtenValues,
Object[] expectedValues) {
LOG.debug("{} Skip Level {}", encoding, skipLevel);
printSchemaAndValues("Writer", writerSchema, writtenValues);
printSchemaAndValues("Reader", readerSchema, expectedValues);
}
private static void printSchemaAndValues(String schemaType, Schema schema, Object[] values) {
LOG.debug("{} Schema {}", schemaType, schema);
for (Object value : values) {
LOG.debug("{} -> {}", value, value.getClass().getSimpleName());
}
}
}
| 7,195 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.Test;
public class TestBinaryData {
/**
* Insert a Long value into an Array. The worst-case scenario is
* {@link Long#MAX_VALUE} because it requires 9 bytes to encode (instead of the
* normal 8). When skipping it, the next byte should be 10.
*/
@Test
void skipLong() {
byte[] b = new byte[10];
BinaryData.encodeLong(Long.MAX_VALUE, b, 0);
final int nextIndex = BinaryData.skipLong(b, 0);
assertEquals(nextIndex, 10);
}
@Test
void testIntLongVleEquality() {
byte[] intResult = new byte[9];
byte[] longResult = new byte[9];
BinaryData.encodeInt(0, intResult, 0);
BinaryData.encodeLong(0, longResult, 0);
assertArrayEquals(intResult, longResult);
BinaryData.encodeInt(42, intResult, 0);
BinaryData.encodeLong(42, longResult, 0);
assertArrayEquals(intResult, longResult);
BinaryData.encodeInt(-24, intResult, 0);
BinaryData.encodeLong(-24, longResult, 0);
assertArrayEquals(intResult, longResult);
BinaryData.encodeInt(Integer.MAX_VALUE, intResult, 0);
BinaryData.encodeLong(Integer.MAX_VALUE, longResult, 0);
assertArrayEquals(intResult, longResult);
BinaryData.encodeInt(Integer.MIN_VALUE, intResult, 0);
BinaryData.encodeLong(Integer.MIN_VALUE, longResult, 0);
assertArrayEquals(intResult, longResult);
}
}
| 7,196 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Random;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class TestBinaryEncoderFidelity {
static byte[] legacydata;
static byte[] complexdata;
EncoderFactory factory = EncoderFactory.get();
public static void generateData(Encoder e, boolean useReadOnlyByteBuffer) throws IOException {
// generate a bunch of data that should test the bounds of a BinaryEncoder
Random r = new Random(665321);
e.writeNull();
e.writeBoolean(true);
e.writeBoolean(false);
byte[] bytes = new byte[10];
ByteBuffer bb;
if (useReadOnlyByteBuffer) {
bb = ByteBuffer.wrap(bytes, 4, 4).asReadOnlyBuffer();
} else {
bb = ByteBuffer.wrap(bytes, 4, 4);
}
r.nextBytes(bytes);
e.writeBytes(bytes);
e.writeBytes(new byte[0]);
e.writeBytes(bytes, 3, 3);
e.writeBytes(new byte[0], 0, 0);
e.writeBytes(ByteBuffer.wrap(bytes, 2, 2));
e.writeBytes(bb);
e.writeBytes(bb);
e.writeDouble(0.0);
e.writeDouble(-0.0);
e.writeDouble(Double.NaN);
e.writeDouble(r.nextDouble());
e.writeDouble(Double.NEGATIVE_INFINITY);
e.writeEnum(65);
e.writeFixed(bytes);
e.writeFixed(bytes, 7, 2);
e.writeFloat(1.0f);
e.writeFloat(r.nextFloat());
e.writeFloat(Float.POSITIVE_INFINITY);
e.writeFloat(Float.MIN_NORMAL);
e.writeIndex(-2);
e.writeInt(0);
e.writeInt(-1);
e.writeInt(1);
e.writeInt(0x40);
e.writeInt(-0x41);
e.writeInt(0x2000);
e.writeInt(-0x2001);
e.writeInt(0x80000);
e.writeInt(-0x80001);
e.writeInt(0x4000000);
e.writeInt(-0x4000001);
e.writeInt(r.nextInt());
e.writeInt(r.nextInt());
e.writeInt(Integer.MAX_VALUE);
e.writeInt(Integer.MIN_VALUE);
e.writeLong(0);
e.writeLong(-1);
e.writeLong(1);
e.writeLong(0x40);
e.writeLong(-0x41);
e.writeLong(0x2000);
e.writeLong(-0x2001);
e.writeLong(0x80000);
e.writeLong(-0x80001);
e.writeLong(0x4000000);
e.writeLong(-0x4000001);
e.writeLong(0x200000000L);
e.writeLong(-0x200000001L);
e.writeLong(0x10000000000L);
e.writeLong(-0x10000000001L);
e.writeLong(0x800000000000L);
e.writeLong(-0x800000000001L);
e.writeLong(0x40000000000000L);
e.writeLong(-0x40000000000001L);
e.writeLong(0x2000000000000000L);
e.writeLong(-0x2000000000000001L);
e.writeLong(r.nextLong());
e.writeLong(r.nextLong());
e.writeLong(Long.MAX_VALUE);
e.writeLong(Long.MIN_VALUE);
e.writeString(new StringBuilder("StringBuilder\u00A2"));
e.writeString("String\u20AC");
e.writeString("");
e.writeString(new Utf8("Utf8\uD834\uDD1E"));
if (e instanceof BinaryEncoder) {
int count = ((BinaryEncoder) e).bytesBuffered();
System.out.println(e.getClass().getSimpleName() + " buffered: " + count);
}
e.flush();
}
static void generateComplexData(Encoder e) throws IOException {
e.writeArrayStart();
e.setItemCount(1);
e.startItem();
e.writeInt(1);
e.writeArrayEnd();
e.writeMapStart();
e.setItemCount(2);
e.startItem();
e.writeString("foo");
e.writeInt(-1);
e.writeDouble(33.3);
e.startItem();
e.writeString("bar");
e.writeInt(1);
e.writeDouble(-33.3);
e.writeMapEnd();
e.flush();
}
@BeforeAll
public static void generateLegacyData() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder e = new LegacyBinaryEncoder(baos);
generateData(e, false);
legacydata = baos.toByteArray();
baos.reset();
generateComplexData(e);
complexdata = baos.toByteArray();
}
@Test
void binaryEncoder() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryEncoder e = factory.binaryEncoder(baos, null);
generateData(e, true);
byte[] result = baos.toByteArray();
assertEquals(legacydata.length, result.length);
assertArrayEquals(legacydata, result);
baos.reset();
generateComplexData(e);
byte[] result2 = baos.toByteArray();
assertEquals(complexdata.length, result2.length);
assertArrayEquals(complexdata, result2);
}
@Test
void directBinaryEncoder() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryEncoder e = factory.directBinaryEncoder(baos, null);
generateData(e, true);
byte[] result = baos.toByteArray();
assertEquals(legacydata.length, result.length);
assertArrayEquals(legacydata, result);
baos.reset();
generateComplexData(e);
byte[] result2 = baos.toByteArray();
assertEquals(complexdata.length, result2.length);
assertArrayEquals(complexdata, result2);
}
@Test
void blockingDirectBinaryEncoder() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryEncoder e = factory.blockingDirectBinaryEncoder(baos, null);
generateData(e, true);
byte[] result = baos.toByteArray();
assertEquals(legacydata.length, result.length);
assertArrayEquals(legacydata, result);
baos.reset();
generateComplexData(e);
byte[] result2 = baos.toByteArray();
// blocking will cause different length, should be two bytes larger
assertEquals(complexdata.length + 2, result2.length);
// the first byte is the array start, with the count of items negative
assertEquals(complexdata[0] >>> 1, result2[0]);
baos.reset();
e.writeArrayStart();
e.setItemCount(1);
e.startItem();
e.writeInt(1);
e.writeArrayEnd();
// 1: 1 element in the array
// 2: 1 byte for the int
// 3: zigzag encoded int
// 4: 0 elements in the next block
assertArrayEquals(baos.toByteArray(), new byte[] { 1, 2, 2, 0 });
baos.reset();
e.writeArrayStart();
e.setItemCount(0);
e.writeArrayEnd();
// This is correct
// 0: 0 elements in the block
assertArrayEquals(baos.toByteArray(), new byte[] { 0 });
baos.reset();
}
@Test
void blockingBinaryEncoder() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BinaryEncoder e = factory.blockingBinaryEncoder(baos, null);
generateData(e, true);
byte[] result = baos.toByteArray();
assertEquals(legacydata.length, result.length);
assertArrayEquals(legacydata, result);
baos.reset();
generateComplexData(e);
byte[] result2 = baos.toByteArray();
// blocking will cause different length, should be two bytes larger
assertEquals(complexdata.length + 2, result2.length);
// the first byte is the array start, with the count of items negative
assertEquals(complexdata[0] >>> 1, result2[0]);
}
}
| 7,197 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.avro.AvroTypeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Type;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestEncoders {
private static final int ENCODER_BUFFER_SIZE = 32;
private static final int EXAMPLE_DATA_SIZE = 17;
private static final EncoderFactory FACTORY = EncoderFactory.get();
@TempDir
public Path dataDir;
@Test
void binaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
BinaryEncoder enc = FACTORY.binaryEncoder(out, null);
assertSame(enc, FACTORY.binaryEncoder(out, enc));
}
@Test
void badBinaryEncoderInit() {
assertThrows(NullPointerException.class, () -> {
FACTORY.binaryEncoder(null, null);
});
}
@Test
void blockingBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
BinaryEncoder reuse = null;
reuse = FACTORY.blockingBinaryEncoder(out, reuse);
assertSame(reuse, FACTORY.blockingBinaryEncoder(out, reuse));
// comparison
}
@Test
void badBlockintBinaryEncoderInit() {
assertThrows(NullPointerException.class, () -> {
FACTORY.binaryEncoder(null, null);
});
}
@Test
void directBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
BinaryEncoder enc = FACTORY.directBinaryEncoder(out, null);
assertSame(enc, FACTORY.directBinaryEncoder(out, enc));
}
@Test
void badDirectBinaryEncoderInit() {
assertThrows(NullPointerException.class, () -> {
FACTORY.directBinaryEncoder(null, null);
});
}
@Test
void blockingDirectBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
BinaryEncoder enc = FACTORY.blockingDirectBinaryEncoder(out, null);
assertSame(enc, FACTORY.blockingDirectBinaryEncoder(out, enc));
}
@Test
void badBlockingDirectBinaryEncoderInit() {
assertThrows(NullPointerException.class, () -> {
FACTORY.blockingDirectBinaryEncoder(null, null);
});
}
@Test
void jsonEncoderInit() throws IOException {
Schema s = new Schema.Parser().parse("\"int\"");
OutputStream out = new ByteArrayOutputStream();
FACTORY.jsonEncoder(s, out);
JsonEncoder enc = FACTORY.jsonEncoder(s, new JsonFactory().createGenerator(out, JsonEncoding.UTF8));
enc.configure(out);
}
@Test
void badJsonEncoderInitOS() throws IOException {
assertThrows(NullPointerException.class, () -> {
FACTORY.jsonEncoder(Schema.create(Type.INT), (OutputStream) null);
});
}
@Test
void badJsonEncoderInit() throws IOException {
assertThrows(NullPointerException.class, () -> {
FACTORY.jsonEncoder(Schema.create(Type.INT), (JsonGenerator) null);
});
}
@Test
void jsonEncoderNewlineDelimited() throws IOException {
OutputStream out = new ByteArrayOutputStream();
Schema ints = Schema.create(Type.INT);
Encoder e = FACTORY.jsonEncoder(ints, out);
String separator = System.getProperty("line.separator");
GenericDatumWriter<Integer> writer = new GenericDatumWriter<>(ints);
writer.write(1, e);
writer.write(2, e);
e.flush();
assertEquals("1" + separator + "2", out.toString());
}
@Test
void jsonEncoderWhenIncludeNamespaceOptionIsFalse() throws IOException {
String value = "{\"b\": {\"string\":\"myVal\"}, \"a\": 1}";
String schemaStr = "{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
+ "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": [\"null\", \"string\"]}" + "]}";
Schema schema = new Schema.Parser().parse(schemaStr);
byte[] avroBytes = fromJsonToAvro(value, schema);
ObjectMapper mapper = new ObjectMapper();
assertEquals(mapper.readTree("{\"b\":\"myVal\",\"a\":1}"),
mapper.readTree(fromAvroToJson(avroBytes, schema, false)));
}
@Test
void jsonEncoderWhenIncludeNamespaceOptionIsTrue() throws IOException {
String value = "{\"b\": {\"string\":\"myVal\"}, \"a\": 1}";
String schemaStr = "{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
+ "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": [\"null\", \"string\"]}" + "]}";
Schema schema = new Schema.Parser().parse(schemaStr);
byte[] avroBytes = fromJsonToAvro(value, schema);
ObjectMapper mapper = new ObjectMapper();
assertEquals(mapper.readTree("{\"b\":{\"string\":\"myVal\"},\"a\":1}"),
mapper.readTree(fromAvroToJson(avroBytes, schema, true)));
}
@Test
void validatingEncoderInit() throws IOException {
Schema s = new Schema.Parser().parse("\"int\"");
OutputStream out = new ByteArrayOutputStream();
Encoder e = FACTORY.directBinaryEncoder(out, null);
FACTORY.validatingEncoder(s, e).configure(e);
}
@Test
void jsonRecordOrdering() throws IOException {
String value = "{\"b\": 2, \"a\": 1}";
Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": ["
+ "{\"name\": \"a\", \"type\": \"int\"}, {\"name\": \"b\", \"type\": \"int\"}" + "]}");
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
Object o = reader.read(null, decoder);
assertEquals("{\"a\": 1, \"b\": 2}", o.toString());
}
@Test
void jsonExcessFields() throws IOException {
assertThrows(AvroTypeException.class, () -> {
String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a0\": 45, \"a2\":true, \"a1\": null}}";
Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+ "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+ "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+ "]}");
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
reader.read(null, decoder);
});
}
@Test
void jsonRecordOrdering2() throws IOException {
String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
Schema schema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+ "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+ "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+ "]}");
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, value);
Object o = reader.read(null, decoder);
assertEquals("{\"a\": {\"a1\": null, \"a2\": true}, \"b\": {\"b1\": \"h\", \"b2\": 3.14, \"b3\": 1.4}}",
o.toString());
}
@Test
void jsonRecordOrderingWithProjection() throws IOException {
String value = "{\"b\": { \"b3\": 1.4, \"b2\": 3.14, \"b1\": \"h\"}, \"a\": {\"a2\":true, \"a1\": null}}";
Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+ "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+ "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":\"float\"}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+ "]}");
Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
GenericDatumReader<Object> reader = new GenericDatumReader<>(writerSchema, readerSchema);
Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
Object o = reader.read(null, decoder);
assertEquals("{\"a\": {\"a1\": null, \"a2\": true}}", o.toString());
}
@Test
void jsonRecordOrderingWithProjection2() throws IOException {
String value = "{\"b\": { \"b1\": \"h\", \"b2\": [3.14, 3.56], \"b3\": 1.4}, \"a\": {\"a2\":true, \"a1\": null}}";
Schema writerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}},\n"
+ "{\"name\": \"b\", \"type\": {\"type\":\"record\",\"name\":\"B\",\"fields\":\n"
+ "[{\"name\":\"b1\", \"type\":\"string\"}, {\"name\":\"b2\", \"type\":{\"type\":\"array\", \"items\":\"float\"}}, {\"name\":\"b3\", \"type\":\"double\"}]}}\n"
+ "]}");
Schema readerSchema = new Schema.Parser().parse("{\"type\": \"record\", \"name\": \"ab\", \"fields\": [\n"
+ "{\"name\": \"a\", \"type\": {\"type\":\"record\",\"name\":\"A\",\"fields\":\n"
+ "[{\"name\":\"a1\", \"type\":\"null\"}, {\"name\":\"a2\", \"type\":\"boolean\"}]}}\n" + "]}");
GenericDatumReader<Object> reader = new GenericDatumReader<>(writerSchema, readerSchema);
Decoder decoder = DecoderFactory.get().jsonDecoder(writerSchema, value);
Object o = reader.read(null, decoder);
assertEquals("{\"a\": {\"a1\": null, \"a2\": true}}", o.toString());
}
@Test
void arrayBackedByteBuffer() throws IOException {
ByteBuffer buffer = ByteBuffer.wrap(someBytes(EXAMPLE_DATA_SIZE));
testWithBuffer(buffer);
}
@Test
void mappedByteBuffer() throws IOException {
Path file = dataDir.resolve("testMappedByteBuffer.avro");
Files.write(file, someBytes(EXAMPLE_DATA_SIZE));
MappedByteBuffer buffer = FileChannel.open(file, StandardOpenOption.READ).map(FileChannel.MapMode.READ_ONLY, 0,
EXAMPLE_DATA_SIZE);
testWithBuffer(buffer);
}
private void testWithBuffer(ByteBuffer buffer) throws IOException {
assertThat(asList(buffer.position(), buffer.remaining()), is(asList(0, EXAMPLE_DATA_SIZE)));
ByteArrayOutputStream output = new ByteArrayOutputStream(EXAMPLE_DATA_SIZE * 2);
EncoderFactory encoderFactory = new EncoderFactory();
encoderFactory.configureBufferSize(ENCODER_BUFFER_SIZE);
Encoder encoder = encoderFactory.binaryEncoder(output, null);
new GenericDatumWriter<ByteBuffer>(Schema.create(Schema.Type.BYTES)).write(buffer, encoder);
encoder.flush();
assertThat(output.toByteArray(), equalTo(avroEncoded(someBytes(EXAMPLE_DATA_SIZE))));
assertThat(asList(buffer.position(), buffer.remaining()), is(asList(0, EXAMPLE_DATA_SIZE))); // fails if buffer is
// not array-backed and
// buffer overflow
// occurs
}
private byte[] someBytes(int size) {
byte[] result = new byte[size];
for (int i = 0; i < size; i++) {
result[i] = (byte) i;
}
return result;
}
private byte[] avroEncoded(byte[] bytes) {
assert bytes.length < 64;
byte[] result = new byte[1 + bytes.length];
result[0] = (byte) (bytes.length * 2); // zig-zag encoding
System.arraycopy(bytes, 0, result, 1, bytes.length);
return result;
}
private byte[] fromJsonToAvro(String json, Schema schema) throws IOException {
DatumReader<Object> reader = new GenericDatumReader<>(schema);
GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema);
ByteArrayOutputStream output = new ByteArrayOutputStream();
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, json);
Encoder encoder = EncoderFactory.get().binaryEncoder(output, null);
Object datum = reader.read(null, decoder);
writer.write(datum, encoder);
encoder.flush();
return output.toByteArray();
}
private String fromAvroToJson(byte[] avroBytes, Schema schema, boolean includeNamespace) throws IOException {
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
ByteArrayOutputStream output = new ByteArrayOutputStream();
JsonEncoder encoder = FACTORY.jsonEncoder(schema, output);
encoder.setIncludeNamespace(includeNamespace);
Decoder decoder = DecoderFactory.get().binaryDecoder(avroBytes, null);
Object datum = reader.read(null, decoder);
writer.write(datum, encoder);
encoder.flush();
output.flush();
return new String(output.toByteArray(), StandardCharsets.UTF_8.name());
}
@Test
public void testJsonEncoderInitAutoFlush() throws IOException {
Schema s = new Schema.Parser().parse("\"int\"");
OutputStream baos = new ByteArrayOutputStream();
OutputStream out = new BufferedOutputStream(baos);
JsonEncoder enc = FACTORY.jsonEncoder(s, out, false);
enc.configure(out, false);
enc.writeInt(24);
enc.flush();
assertEquals("", baos.toString());
out.flush();
assertEquals("24", baos.toString());
}
@Test
public void testJsonEncoderInitAutoFlushDisabled() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OutputStream out = new BufferedOutputStream(baos);
Schema ints = Schema.create(Type.INT);
Encoder e = FACTORY.jsonEncoder(ints, out, false, false);
String separator = System.getProperty("line.separator");
GenericDatumWriter<Integer> writer = new GenericDatumWriter<Integer>(ints);
writer.write(1, e);
writer.write(2, e);
e.flush();
assertEquals("", baos.toString());
out.flush();
assertEquals("1" + separator + "2", baos.toString());
out.close();
}
}
| 7,198 |
0 | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro | Create_ds/avro/lang/java/avro/src/test/java/org/apache/avro/io/TestResolvingIOResolving.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import org.apache.avro.Schema;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import java.io.IOException;
import java.util.stream.Stream;
public class TestResolvingIOResolving {
@ParameterizedTest
@MethodSource("data3")
public void testResolving(TestValidatingIO.Encoding encoding, int skipLevel, String jsonWriterSchema,
String writerCalls, Object[] writerValues, String jsonReaderSchema, String readerCalls, Object[] readerValues)
throws IOException {
Schema writerSchema = new Schema.Parser().parse(jsonWriterSchema);
byte[] bytes = TestValidatingIO.make(writerSchema, writerCalls, writerValues, encoding);
Schema readerSchema = new Schema.Parser().parse(jsonReaderSchema);
TestValidatingIO.print(encoding, skipLevel, writerSchema, readerSchema, writerValues, readerValues);
TestResolvingIO.check(writerSchema, readerSchema, bytes, readerCalls, readerValues, encoding, skipLevel);
}
public static Stream<Arguments> data3() {
return TestValidatingIO.convertTo2dStream(TestResolvingIO.encodings, TestResolvingIO.skipLevels,
dataForResolvingTests());
}
private static Object[][] dataForResolvingTests() {
// The mnemonics are the same as {@link TestValidatingIO#testSchemas}
return new Object[][] {
// Projection
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f1\", \"type\":\"string\"},"
+ "{\"name\":\"f2\", \"type\":\"string\"}," + "{\"name\":\"f3\", \"type\":\"int\"}]}", "S10S10IS10S10I",
new Object[] { "s1", "s2", 100, "t1", "t2", 200 },
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f1\", \"type\":\"string\" },"
+ "{\"name\":\"f2\", \"type\":\"string\"}]}",
"RS10S10RS10S10", new Object[] { "s1", "s2", "t1", "t2" } },
// Reordered fields
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"int\"}," + "{\"name\":\"f2\", \"type\":\"string\"}]}", "IS10",
new Object[] { 10, "hello" },
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f2\", \"type\":\"string\" },"
+ "{\"name\":\"f1\", \"type\":\"long\"}]}",
"RLS10", new Object[] { 10L, "hello" } },
// Default values
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "", new Object[] {},
"{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]}",
"RI", new Object[] { 100 } },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f2\", \"type\":\"int\"}]}", "I",
new Object[] { 10 },
"{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101}," + "{\"name\":\"f2\", \"type\":\"int\"}]}",
"RII", new Object[] { 10, 101 } },
{ "{\"type\":\"record\",\"name\":\"outer\",\"fields\":[" + "{\"name\": \"g1\", "
+ "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+ "{\"name\":\"f2\", \"type\":\"int\"}]}}, " + "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
new Object[] { 10, 11L },
"{\"type\":\"record\",\"name\":\"outer\",\"fields\":[" + "{\"name\": \"g1\", "
+ "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101}," + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+ "{\"name\": \"g2\", \"type\": \"long\"}]}",
"RRIIL", new Object[] { 10, 101, 11L } },
// Default value for a record.
{ "{\"type\":\"record\",\"name\":\"outer\",\"fields\":[" + "{\"name\": \"g2\", \"type\": \"long\"}]}", "L",
new Object[] { 11L },
"{\"type\":\"record\",\"name\":\"outer\",\"fields\":[" + "{\"name\": \"g1\", "
+ "\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+ "{\"name\":\"f1\", \"type\":\"int\" }," + "{\"name\":\"f2\", \"type\":\"int\"}] }, "
+ "\"default\": { \"f1\": 10, \"f2\": 101 } }, " + "{\"name\": \"g2\", \"type\": \"long\"}]}",
"RLRII", new Object[] { 11L, 10, 101 } },
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "", new Object[] {},
"{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f\", \"type\":{ \"type\": \"array\", \"items\": \"int\" }, " + "\"default\": [100]}]}",
"[c1sI]", new Object[] { 100 } },
{ "{ \"type\": \"array\", \"items\": {\"type\":\"record\"," + "\"name\":\"r\",\"fields\":[]} }", "[c1s]",
new Object[] {},
"{ \"type\": \"array\", \"items\": {\"type\":\"record\"," + "\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]} }",
"[c1sI]", new Object[] { 100 } },
// Enum resolution
{ "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[\"x\",\"y\",\"z\"]}", "e2", new Object[] {},
"{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}", "e1", new Object[] {} },
{ "{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"x\", \"y\" ]}", "e1", new Object[] {},
"{\"type\":\"enum\",\"name\":\"e\",\"symbols\":[ \"y\", \"z\" ]}", "e0", new Object[] {} },
// Union
{ "\"int\"", "I", new Object[] { 100 }, "[ \"long\", \"int\"]", "U1I", new Object[] { 100 } },
{ "[ \"long\", \"int\"]", "U1I", new Object[] { 100 }, "\"int\"", "I", new Object[] { 100 } },
// Union + promotion
{ "\"int\"", "I", new Object[] { 100 }, "[ \"long\", \"string\"]", "U0L", new Object[] { 100L } },
{ "[ \"int\", \"string\"]", "U0I", new Object[] { 100 }, "\"long\"", "L", new Object[] { 100L } },
// Record where union field is skipped.
{ "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f0\", \"type\":\"boolean\"}," + "{\"name\":\"f1\", \"type\":\"int\"},"
+ "{\"name\":\"f2\", \"type\":[\"int\", \"long\"]}," + "{\"name\":\"f3\", \"type\":\"float\"}" + "]}",
"BIU0IF", new Object[] { true, 100, 121, 10.75f },
"{\"type\":\"record\",\"name\":\"r\",\"fields\":[" + "{\"name\":\"f0\", \"type\":\"boolean\"},"
+ "{\"name\":\"f1\", \"type\":\"long\"}," + "{\"name\":\"f3\", \"type\":\"double\"}]}",
"BLD", new Object[] { true, 100L, 10.75d } },
// Array of record with arrays.
{ "{ \"type\": \"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f0\", \"type\":\"boolean\"},"
+ "{\"name\":\"f1\", \"type\": {\"type\":\"array\", \"items\": \"boolean\" }}" + "]}}",
"[c2sB[c2sBsB]sB[c3sBsBsB]]", new Object[] { true, false, false, false, true, true, true },
"{ \"type\": \"array\", \"items\":" + "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ "{\"name\":\"f0\", \"type\":\"boolean\"}" + "]}}",
"[c2sBsB]", new Object[] { true, false } }, };
}
}
| 7,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.