index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
/** File-level metadata. */
public class ColumnFileMetaData extends MetaData<ColumnFileMetaData> {
static ColumnFileMetaData read(InputBuffer in) throws IOException {
ColumnFileMetaData result = new ColumnFileMetaData();
MetaData.read(in, result);
return result;
}
}
| 7,400 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnMetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
/** Metadata for a column. */
public class ColumnMetaData extends MetaData<ColumnMetaData> {
static final String NAME_KEY = RESERVED_KEY_PREFIX + "name";
static final String TYPE_KEY = RESERVED_KEY_PREFIX + "type";
static final String VALUES_KEY = RESERVED_KEY_PREFIX + "values";
static final String PARENT_KEY = RESERVED_KEY_PREFIX + "parent";
static final String ARRAY_KEY = RESERVED_KEY_PREFIX + "array";
// cache these values for better performance
private String name;
private ValueType type;
private boolean values;
private ColumnMetaData parent;
private boolean isArray;
private transient List<ColumnMetaData> children = new ArrayList<>(0);
private transient int number = -1;
private ColumnMetaData() {
} // non-public ctor
/** Construct given a name and type. */
public ColumnMetaData(String name, ValueType type) {
this.name = name;
setReserved(NAME_KEY, name);
this.type = type;
setReserved(TYPE_KEY, type.getName());
}
/** Return this column's name. */
public String getName() {
return name;
}
/** Return this column's type. */
public ValueType getType() {
return type;
}
/** Return this column's parent or null. */
public ColumnMetaData getParent() {
return parent;
}
/** Return this column's children or null. */
public List<ColumnMetaData> getChildren() {
return children;
}
/** Return true if this column is an array. */
public boolean isArray() {
return isArray;
}
/** Return this column's number in a file. */
public int getNumber() {
return number;
}
void setNumber(int number) {
this.number = number;
}
/**
* Set whether this column has an index of blocks by value. This only makes
* sense for sorted columns and permits one to seek into a column by value.
*/
public ColumnMetaData hasIndexValues(boolean values) {
if (isArray)
throw new TrevniRuntimeException("Array column cannot have index: " + this);
this.values = values;
return setReservedBoolean(VALUES_KEY, values);
}
/** Set this column's parent. A parent must be a preceding array column. */
public ColumnMetaData setParent(ColumnMetaData parent) {
if (!parent.isArray())
throw new TrevniRuntimeException("Parent is not an array: " + parent);
if (values)
throw new TrevniRuntimeException("Array column cannot have index: " + this);
this.parent = parent;
parent.children.add(this);
return setReserved(PARENT_KEY, parent.getName());
}
/** Set whether this column is an array. */
public ColumnMetaData isArray(boolean isArray) {
if (values)
throw new TrevniRuntimeException("Array column cannot have index: " + this);
this.isArray = isArray;
return setReservedBoolean(ARRAY_KEY, isArray);
}
/** Get whether this column has an index of blocks by value. */
public boolean hasIndexValues() {
return getBoolean(VALUES_KEY);
}
static ColumnMetaData read(InputBuffer in, ColumnFileReader file) throws IOException {
ColumnMetaData result = new ColumnMetaData();
MetaData.read(in, result);
result.name = result.getString(NAME_KEY);
result.type = ValueType.forName(result.getString(TYPE_KEY));
result.values = result.getBoolean(VALUES_KEY);
result.isArray = result.getBoolean(ARRAY_KEY);
String parentName = result.getString(PARENT_KEY);
if (parentName != null)
result.setParent(file.getColumnMetaData(parentName));
return result;
}
}
| 7,401 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/BZip2Codec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream;
public class BZip2Codec extends Codec {
private ByteArrayOutputStream outputBuffer;
public static final int DEFAULT_BUFFER_SIZE = 64 * 1024;
@Override
ByteBuffer compress(ByteBuffer uncompressedData) throws IOException {
ByteArrayOutputStream baos = getOutputBuffer(uncompressedData.remaining());
try (BZip2CompressorOutputStream outputStream = new BZip2CompressorOutputStream(baos)) {
outputStream.write(uncompressedData.array(), computeOffset(uncompressedData), uncompressedData.remaining());
}
return ByteBuffer.wrap(baos.toByteArray());
}
@Override
ByteBuffer decompress(ByteBuffer compressedData) throws IOException {
ByteArrayInputStream bais = new ByteArrayInputStream(compressedData.array(), computeOffset(compressedData),
compressedData.remaining());
try (BZip2CompressorInputStream inputStream = new BZip2CompressorInputStream(bais)) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int readCount = -1;
while ((readCount = inputStream.read(buffer, compressedData.position(), buffer.length)) > 0) {
baos.write(buffer, 0, readCount);
}
return ByteBuffer.wrap(baos.toByteArray());
}
}
private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
if (null == outputBuffer)
outputBuffer = new ByteArrayOutputStream(suggestedLength);
outputBuffer.reset();
return outputBuffer;
}
}
| 7,402 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/SnappyCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import org.xerial.snappy.Snappy;
/** Implements <a href="https://code.google.com/p/snappy/">Snappy</a> codec. */
final class SnappyCodec extends Codec {
@Override
ByteBuffer compress(ByteBuffer in) throws IOException {
int offset = computeOffset(in);
ByteBuffer out = ByteBuffer.allocate(Snappy.maxCompressedLength(in.remaining()));
int size = Snappy.compress(in.array(), offset, in.remaining(), out.array(), 0);
((Buffer) out).limit(size);
return out;
}
@Override
ByteBuffer decompress(ByteBuffer in) throws IOException {
int offset = computeOffset(in);
ByteBuffer out = ByteBuffer.allocate(Snappy.uncompressedLength(in.array(), offset, in.remaining()));
int size = Snappy.uncompress(in.array(), offset, in.remaining(), out.array(), 0);
((Buffer) out).limit(size);
return out;
}
}
| 7,403 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/OutputBuffer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
/** Used to write values. */
class OutputBuffer extends ByteArrayOutputStream {
static final int BLOCK_SIZE = 64 * 1024;
private int bitCount; // position in booleans
public OutputBuffer() {
this(BLOCK_SIZE + (BLOCK_SIZE >> 2));
}
public OutputBuffer(int size) {
super(size);
}
public boolean isFull() {
return size() >= BLOCK_SIZE;
}
public ByteBuffer asByteBuffer() {
return ByteBuffer.wrap(buf, 0, count);
}
public void writeValue(Object value, ValueType type) throws IOException {
switch (type) {
case NULL:
break;
case BOOLEAN:
writeBoolean((Boolean) value);
break;
case INT:
writeInt((Integer) value);
break;
case LONG:
writeLong((Long) value);
break;
case FIXED32:
writeFixed32((Integer) value);
break;
case FIXED64:
writeFixed64((Long) value);
break;
case FLOAT:
writeFloat((Float) value);
break;
case DOUBLE:
writeDouble((Double) value);
break;
case STRING:
writeString((String) value);
break;
case BYTES:
if (value instanceof ByteBuffer)
writeBytes((ByteBuffer) value);
else
writeBytes((byte[]) value);
break;
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public void writeBoolean(boolean value) {
if (bitCount == 0) { // first bool in byte
ensure(1);
count++;
}
if (value)
buf[count - 1] |= (byte) (1 << bitCount);
bitCount++;
if (bitCount == 8)
bitCount = 0;
}
public void writeLength(int length) throws IOException {
bitCount = 0;
writeInt(length);
}
public void writeString(String string) throws IOException {
byte[] bytes = string.getBytes(StandardCharsets.UTF_8);
writeInt(bytes.length);
write(bytes, 0, bytes.length);
}
public void writeBytes(ByteBuffer bytes) {
int pos = bytes.position();
int start = bytes.arrayOffset() + pos;
int len = bytes.limit() - pos;
writeBytes(bytes.array(), start, len);
}
public void writeBytes(byte[] bytes) {
writeBytes(bytes, 0, bytes.length);
}
public void writeBytes(byte[] bytes, int start, int len) {
writeInt(len);
write(bytes, start, len);
}
public void writeFloat(float f) throws IOException {
writeFixed32(Float.floatToRawIntBits(f));
}
public void writeDouble(double d) throws IOException {
writeFixed64(Double.doubleToRawLongBits(d));
}
public void writeFixed32(int i) throws IOException {
ensure(4);
buf[count] = (byte) ((i) & 0xFF);
buf[count + 1] = (byte) ((i >>> 8) & 0xFF);
buf[count + 2] = (byte) ((i >>> 16) & 0xFF);
buf[count + 3] = (byte) ((i >>> 24) & 0xFF);
count += 4;
}
public void writeFixed64(long l) throws IOException {
ensure(8);
int first = (int) (l & 0xFFFFFFFF);
int second = (int) ((l >>> 32) & 0xFFFFFFFF);
buf[count] = (byte) ((first) & 0xFF);
buf[count + 4] = (byte) ((second) & 0xFF);
buf[count + 5] = (byte) ((second >>> 8) & 0xFF);
buf[count + 1] = (byte) ((first >>> 8) & 0xFF);
buf[count + 2] = (byte) ((first >>> 16) & 0xFF);
buf[count + 6] = (byte) ((second >>> 16) & 0xFF);
buf[count + 7] = (byte) ((second >>> 24) & 0xFF);
buf[count + 3] = (byte) ((first >>> 24) & 0xFF);
count += 8;
}
public void writeInt(int n) {
ensure(5);
n = (n << 1) ^ (n >> 31); // move sign to low-order bit
if ((n & ~0x7F) != 0) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
}
}
}
}
buf[count++] = (byte) n;
}
public void writeLong(long n) throws IOException {
ensure(10);
n = (n << 1) ^ (n >> 63); // move sign to low-order bit
if ((n & ~0x7FL) != 0) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
if (n > 0x7F) {
buf[count++] = (byte) ((n | 0x80) & 0xFF);
n >>>= 7;
}
}
}
}
}
}
}
}
}
buf[count++] = (byte) n;
}
private void ensure(int n) {
if (count + n > buf.length)
buf = Arrays.copyOf(buf, Math.max(buf.length << 1, count + n));
}
public static int size(Object value, ValueType type) {
switch (type) {
case NULL:
return 0;
case INT:
return size((Integer) value);
case LONG:
return size((Long) value);
case FIXED32:
case FLOAT:
return 4;
case FIXED64:
case DOUBLE:
return 8;
case STRING:
return size((String) value);
case BYTES:
if (value instanceof ByteBuffer)
return size((ByteBuffer) value);
return size((byte[]) value);
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public static int size(int n) {
n = (n << 1) ^ (n >> 31); // move sign to low-order bit
if (n <= (1 << (7 * 1)) - 1)
return 1;
if (n <= (1 << (7 * 2)) - 1)
return 2;
if (n <= (1 << (7 * 3)) - 1)
return 3;
if (n <= (1 << (7 * 4)) - 1)
return 4;
return 5;
}
public static int size(long n) {
n = (n << 1) ^ (n >> 63); // move sign to low-order bit
if (n <= (1 << (7 * 1)) - 1)
return 1;
if (n <= (1 << (7 * 2)) - 1)
return 2;
if (n <= (1 << (7 * 3)) - 1)
return 3;
if (n <= (1 << (7 * 4)) - 1)
return 4;
if (n <= (1 << (7 * 5)) - 1)
return 5;
if (n <= (1 << (7 * 6)) - 1)
return 6;
if (n <= (1 << (7 * 7)) - 1)
return 7;
if (n <= (1 << (7 * 8)) - 1)
return 8;
if (n <= (1 << (7 * 9)) - 1)
return 9;
return 10;
}
public static int size(ByteBuffer bytes) {
int length = bytes.remaining();
return size(length) + length;
}
public static int size(byte[] bytes) {
int length = bytes.length;
return size(length) + length;
}
public static int size(String string) {
int length = utf8Length(string);
return size(length) + length;
}
private static int utf8Length(String string) {
int stringLength = string.length();
int utf8Length = 0;
for (int i = 0; i < stringLength; i++) {
char c = string.charAt(i);
int p = c; // code point
if (Character.isHighSurrogate(c) // surrogate pair
&& i != stringLength - 1 && Character.isLowSurrogate(string.charAt(i + 1))) {
p = string.codePointAt(i);
i++;
}
if (p <= 0x007F) {
utf8Length += 1;
} else if (p <= 0x07FF) {
utf8Length += 2;
} else if (p <= 0x0FFFF) {
utf8Length += 3;
} else if (p <= 0x01FFFFF) {
utf8Length += 4;
} else if (p <= 0x03FFFFFF) {
utf8Length += 5;
} else {
utf8Length += 6;
}
}
return utf8Length;
}
}
| 7,404 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBuffer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.EOFException;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
/** Used to read values. */
class InputBuffer {
private Input in;
private long inLength;
private long offset; // pos of next read from in
private byte[] buf; // data from input
private int pos; // position within buffer
private int limit; // end of valid buffer data
private CharsetDecoder utf8 = Charset.forName("UTF-8").newDecoder();
private int bitCount; // position in booleans
private int runLength; // length of run
private int runValue; // value of run
public InputBuffer(Input in) throws IOException {
this(in, 0);
}
public InputBuffer(Input in, long position) throws IOException {
this.in = in;
this.inLength = in.length();
this.offset = position;
if (in instanceof InputBytes) { // use buffer directly
this.buf = ((InputBytes) in).getBuffer();
this.limit = (int) in.length();
this.offset = limit;
this.pos = (int) position;
} else { // create new buffer
this.buf = new byte[8192]; // big enough for primitives
}
}
public void seek(long position) throws IOException {
runLength = 0;
if (position >= (offset - limit) && position <= offset) {
pos = (int) (limit - (offset - position)); // seek in buffer;
return;
}
pos = 0;
limit = 0;
offset = position;
}
public long tell() {
return (offset - limit) + pos;
}
public long length() {
return inLength;
}
public <T extends Comparable> T readValue(ValueType type) throws IOException {
switch (type) {
case NULL:
return null;
case BOOLEAN:
return (T) Boolean.valueOf(readBoolean());
case INT:
return (T) Integer.valueOf(readInt());
case LONG:
return (T) Long.valueOf(readLong());
case FIXED32:
return (T) Integer.valueOf(readFixed32());
case FIXED64:
return (T) Long.valueOf(readFixed64());
case FLOAT:
return (T) Float.valueOf(readFloat());
case DOUBLE:
return (T) Double.valueOf(readDouble());
case STRING:
return (T) readString();
case BYTES:
return (T) readBytes(null);
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public void skipValue(ValueType type) throws IOException {
switch (type) {
case NULL:
break;
case BOOLEAN:
readBoolean();
break;
case INT:
readInt();
break;
case LONG:
readLong();
break;
case FIXED32:
case FLOAT:
skip(4);
break;
case FIXED64:
case DOUBLE:
skip(8);
break;
case STRING:
case BYTES:
skipBytes();
break;
default:
throw new TrevniRuntimeException("Unknown value type: " + type);
}
}
public boolean readBoolean() throws IOException {
if (bitCount == 0)
read();
int bits = buf[pos - 1] & 0xff;
int bit = (bits >> bitCount) & 1;
bitCount++;
if (bitCount == 8)
bitCount = 0;
return bit != 0;
}
public int readLength() throws IOException {
bitCount = 0;
if (runLength > 0) {
runLength--; // in run
return runValue;
}
int length = readInt();
if (length >= 0) // not a run
return length;
runLength = (1 - length) >>> 1; // start of run
runValue = (length + 1) & 1;
return runValue;
}
public int readInt() throws IOException {
if ((limit - pos) < 5) { // maybe not in buffer
int b = read();
int n = b & 0x7f;
for (int shift = 7; b > 0x7f; shift += 7) {
b = read();
n ^= (b & 0x7f) << shift;
}
return (n >>> 1) ^ -(n & 1); // back to two's-complement
}
int len = 1;
int b = buf[pos] & 0xff;
int n = b & 0x7f;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
n ^= (b & 0x7f) << 7;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
n ^= (b & 0x7f) << 14;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
n ^= (b & 0x7f) << 21;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
n ^= (b & 0x7f) << 28;
if (b > 0x7f) {
throw new IOException("Invalid int encoding");
}
}
}
}
}
pos += len;
if (pos > limit)
throw new EOFException();
return (n >>> 1) ^ -(n & 1); // back to two's-complement
}
public long readLong() throws IOException {
if ((limit - pos) < 10) { // maybe not in buffer
int b = read();
long n = b & 0x7f;
for (int shift = 7; b > 0x7f; shift += 7) {
b = read();
n ^= (b & 0x7fL) << shift;
}
return (n >>> 1) ^ -(n & 1); // back to two's-complement
}
int b = buf[pos++] & 0xff;
int n = b & 0x7f;
long l;
if (b > 0x7f) {
b = buf[pos++] & 0xff;
n ^= (b & 0x7f) << 7;
if (b > 0x7f) {
b = buf[pos++] & 0xff;
n ^= (b & 0x7f) << 14;
if (b > 0x7f) {
b = buf[pos++] & 0xff;
n ^= (b & 0x7f) << 21;
if (b > 0x7f) {
// only the low 28 bits can be set, so this won't carry
// the sign bit to the long
l = innerLongDecode((long) n);
} else {
l = n;
}
} else {
l = n;
}
} else {
l = n;
}
} else {
l = n;
}
if (pos > limit) {
throw new EOFException();
}
return (l >>> 1) ^ -(l & 1); // back to two's-complement
}
// splitting readLong up makes it faster because of the JVM does more
// optimizations on small methods
private long innerLongDecode(long l) throws IOException {
int len = 1;
int b = buf[pos] & 0xff;
l ^= (b & 0x7fL) << 28;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
l ^= (b & 0x7fL) << 35;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
l ^= (b & 0x7fL) << 42;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
l ^= (b & 0x7fL) << 49;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
l ^= (b & 0x7fL) << 56;
if (b > 0x7f) {
b = buf[pos + len++] & 0xff;
l ^= (b & 0x7fL) << 63;
if (b > 0x7f) {
throw new IOException("Invalid long encoding");
}
}
}
}
}
}
pos += len;
return l;
}
public float readFloat() throws IOException {
return Float.intBitsToFloat(readFixed32());
}
public int readFixed32() throws IOException {
if ((limit - pos) < 4) // maybe not in buffer
return read() | (read() << 8) | (read() << 16) | (read() << 24);
int len = 1;
int n = (buf[pos] & 0xff) | ((buf[pos + len++] & 0xff) << 8) | ((buf[pos + len++] & 0xff) << 16)
| ((buf[pos + len++] & 0xff) << 24);
if ((pos + 4) > limit)
throw new EOFException();
pos += 4;
return n;
}
public double readDouble() throws IOException {
return Double.longBitsToDouble(readFixed64());
}
public long readFixed64() throws IOException {
return (readFixed32() & 0xFFFFFFFFL) | (((long) readFixed32()) << 32);
}
public String readString() throws IOException {
int length = readInt();
if (length <= (limit - pos)) { // in buffer
String result = utf8.decode(ByteBuffer.wrap(buf, pos, length)).toString();
pos += length;
return result;
}
byte[] bytes = new byte[length];
readFully(bytes, 0, length);
return utf8.decode(ByteBuffer.wrap(bytes, 0, length)).toString();
}
public byte[] readBytes() throws IOException {
byte[] result = new byte[readInt()];
readFully(result);
return result;
}
public ByteBuffer readBytes(ByteBuffer old) throws IOException {
int length = readInt();
ByteBuffer result;
if (old != null && length <= old.capacity()) {
result = old;
((Buffer) result).clear();
} else {
result = ByteBuffer.allocate(length);
}
readFully(result.array(), result.position(), length);
((Buffer) result).limit(length);
return result;
}
public void skipBytes() throws IOException {
skip(readInt());
}
private void skip(long length) throws IOException {
seek(tell() + length);
}
public int read() throws IOException {
if (pos >= limit) {
limit = readInput(buf, 0, buf.length);
pos = 0;
}
return buf[pos++] & 0xFF;
}
public void readFully(byte[] bytes) throws IOException {
readFully(bytes, 0, bytes.length);
}
public void readFully(byte[] bytes, int start, int len) throws IOException {
int buffered = limit - pos;
if (len > buffered) { // buffer is insufficient
System.arraycopy(buf, pos, bytes, start, buffered); // consume buffer
start += buffered;
len -= buffered;
pos += buffered;
if (len > buf.length) { // bigger than buffer
do {
int read = readInput(bytes, start, len); // read directly into result
len -= read;
start += read;
} while (len > 0);
return;
}
limit = readInput(buf, 0, buf.length); // refill buffer
pos = 0;
}
System.arraycopy(buf, pos, bytes, start, len); // copy from buffer
pos += len;
}
private int readInput(byte[] b, int start, int len) throws IOException {
int read = in.read(offset, b, start, len);
if (read < 0)
throw new EOFException();
offset += read;
return read;
}
}
| 7,405 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/BlockDescriptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
class BlockDescriptor {
int rowCount;
int uncompressedSize;
int compressedSize;
BlockDescriptor() {
}
BlockDescriptor(int rowCount, int uncompressedSize, int compressedSize) {
this.rowCount = rowCount;
this.uncompressedSize = uncompressedSize;
this.compressedSize = compressedSize;
}
public void writeTo(OutputBuffer out) throws IOException {
out.writeFixed32(rowCount);
out.writeFixed32(uncompressedSize);
out.writeFixed32(compressedSize);
}
public static BlockDescriptor read(InputBuffer in) throws IOException {
BlockDescriptor result = new BlockDescriptor();
result.rowCount = in.readFixed32();
result.uncompressedSize = in.readFixed32();
result.compressedSize = in.readFixed32();
return result;
}
}
| 7,406 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnValues.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Iterator;
/** An iterator over column values. */
public class ColumnValues<T extends Comparable> implements Iterator<T>, Iterable<T> {
private final ColumnDescriptor column;
private final ValueType type;
private final Codec codec;
private final Checksum checksum;
private final InputBuffer in;
private InputBuffer values;
private int block = -1;
private long row = 0;
private T previous;
private int arrayLength;
ColumnValues(ColumnDescriptor column) throws IOException {
this.column = column;
this.type = column.metaData.getType();
this.codec = Codec.get(column.metaData);
this.checksum = Checksum.get(column.metaData);
this.in = new InputBuffer(column.file);
column.ensureBlocksRead();
}
/** Return the current row number within this file. */
public long getRow() {
return row;
}
/** Seek to the named row. */
public void seek(long r) throws IOException {
if (r < row || r >= column.lastRow(block)) // not in current block
startBlock(column.findBlock(r)); // seek to block start
while (r > row && hasNext()) { // skip within block
values.skipValue(type);
row++;
}
previous = null;
}
/** Seek to the named value. */
public void seek(T v) throws IOException {
if (!column.metaData.hasIndexValues())
throw new TrevniRuntimeException("Column does not have value index: " + column.metaData.getName());
if (previous == null // not in current block?
|| previous.compareTo(v) > 0
|| (block != column.blockCount() - 1 && column.firstValues[block + 1].compareTo(v) <= 0))
startBlock(column.findBlock(v)); // seek to block start
while (hasNext()) { // scan block
long savedPosition = values.tell();
T savedPrevious = previous;
if (next().compareTo(v) >= 0) {
values.seek(savedPosition);
previous = savedPrevious;
row--;
return;
}
}
}
private void startBlock(int block) throws IOException {
this.block = block;
this.row = column.firstRows[block];
in.seek(column.blockStarts[block]);
int end = column.blocks[block].compressedSize;
byte[] raw = new byte[end + checksum.size()];
in.readFully(raw);
ByteBuffer data = codec.decompress(ByteBuffer.wrap(raw, 0, end));
if (!checksum.compute(data).equals(ByteBuffer.wrap(raw, end, checksum.size())))
throw new IOException("Checksums mismatch.");
values = new InputBuffer(new InputBytes(data));
}
@Override
public Iterator iterator() {
return this;
}
@Override
public boolean hasNext() {
return block < column.blockCount() - 1 || row < column.lastRow(block);
}
@Override
public T next() {
if (column.metaData.isArray() || column.metaData.getParent() != null)
throw new TrevniRuntimeException("Column is array: " + column.metaData.getName());
try {
startRow();
return nextValue();
} catch (IOException e) {
throw new TrevniRuntimeException(e);
}
}
/**
* Expert: Must be called before any calls to {@link #nextLength()} or
* {@link #nextValue()}.
*/
public void startRow() throws IOException {
if (row >= column.lastRow(block)) {
if (block >= column.blockCount())
throw new TrevniRuntimeException("Read past end of column.");
startBlock(block + 1);
}
row++;
}
/** Expert: Returns the next length in an array column. */
public int nextLength() throws IOException {
if (!column.metaData.isArray())
throw new TrevniRuntimeException("Column is not array: " + column.metaData.getName());
assert arrayLength == 0;
return arrayLength = values.readLength();
}
/** Expert: Returns the next value in a column. */
public T nextValue() throws IOException {
arrayLength--;
return previous = values.readValue(type);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
| 7,407 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/DeflateCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.Inflater;
import java.util.zip.InflaterOutputStream;
/** Implements DEFLATE (RFC1951) compression and decompression. */
class DeflateCodec extends Codec {
private ByteArrayOutputStream outputBuffer;
private Deflater deflater;
private Inflater inflater;
@Override
ByteBuffer compress(ByteBuffer data) throws IOException {
ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
try (OutputStream outputStream = new DeflaterOutputStream(baos, getDeflater())) {
outputStream.write(data.array(), computeOffset(data), data.remaining());
}
return ByteBuffer.wrap(baos.toByteArray());
}
@Override
ByteBuffer decompress(ByteBuffer data) throws IOException {
ByteArrayOutputStream baos = getOutputBuffer(data.remaining());
try (OutputStream outputStream = new InflaterOutputStream(baos, getInflater())) {
outputStream.write(data.array(), computeOffset(data), data.remaining());
}
return ByteBuffer.wrap(baos.toByteArray());
}
private Inflater getInflater() {
if (null == inflater)
inflater = new Inflater(true);
inflater.reset();
return inflater;
}
private Deflater getDeflater() {
if (null == deflater)
deflater = new Deflater(Deflater.DEFAULT_COMPRESSION, true);
deflater.reset();
return deflater;
}
private ByteArrayOutputStream getOutputBuffer(int suggestedLength) {
if (null == outputBuffer)
outputBuffer = new ByteArrayOutputStream(suggestedLength);
outputBuffer.reset();
return outputBuffer;
}
}
| 7,408 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ValueType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
/** The datatypes that may be stored in a column. */
public enum ValueType {
NULL, BOOLEAN, INT, LONG, FIXED32, FIXED64, FLOAT, DOUBLE, STRING, BYTES;
private final String name;
private ValueType() {
this.name = this.name().toLowerCase();
}
/** Return the name of this type. */
public String getName() {
return name;
}
/** Return a type given its name. */
public static ValueType forName(String name) {
return valueOf(name.toUpperCase());
}
}
| 7,409 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.io.Closeable;
import java.io.File;
import java.util.Arrays;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.HashMap;
/** Reads data from a column file. */
public class ColumnFileReader implements Closeable {
private Input file;
private long rowCount;
private int columnCount;
private ColumnFileMetaData metaData;
private ColumnDescriptor[] columns;
private Map<String, ColumnDescriptor> columnsByName;
/** Construct reading from the named file. */
public ColumnFileReader(File file) throws IOException {
this(new InputFile(file));
}
/** Construct reading from the provided input. */
public ColumnFileReader(Input file) throws IOException {
this.file = file;
readHeader();
}
/** Return the number of rows in this file. */
public long getRowCount() {
return rowCount;
}
/** Return the number of columns in this file. */
public long getColumnCount() {
return columnCount;
}
/** Return this file's metadata. */
public ColumnFileMetaData getMetaData() {
return metaData;
}
/** Return all columns' metadata. */
public ColumnMetaData[] getColumnMetaData() {
ColumnMetaData[] result = new ColumnMetaData[columnCount];
for (int i = 0; i < columnCount; i++)
result[i] = columns[i].metaData;
return result;
}
/** Return root columns' metadata. Roots are columns that have no parent. */
public List<ColumnMetaData> getRoots() {
List<ColumnMetaData> result = new ArrayList<>();
for (int i = 0; i < columnCount; i++)
if (columns[i].metaData.getParent() == null)
result.add(columns[i].metaData);
return result;
}
/** Return a column's metadata. */
public ColumnMetaData getColumnMetaData(int number) {
return columns[number].metaData;
}
/** Return a column's metadata. */
public ColumnMetaData getColumnMetaData(String name) {
return getColumn(name).metaData;
}
private <T extends Comparable> ColumnDescriptor<T> getColumn(String name) {
ColumnDescriptor column = columnsByName.get(name);
if (column == null)
throw new TrevniRuntimeException("No column named: " + name);
return (ColumnDescriptor<T>) column;
}
private void readHeader() throws IOException {
InputBuffer in = new InputBuffer(file, 0);
readMagic(in);
this.rowCount = in.readFixed64();
this.columnCount = in.readFixed32();
this.metaData = ColumnFileMetaData.read(in);
this.columnsByName = new HashMap<>(columnCount);
columns = new ColumnDescriptor[columnCount];
readColumnMetaData(in);
readColumnStarts(in);
}
private void readMagic(InputBuffer in) throws IOException {
byte[] magic = new byte[ColumnFileWriter.MAGIC.length];
try {
in.readFully(magic);
} catch (IOException e) {
throw new IOException("Not a data file.");
}
if (!(Arrays.equals(ColumnFileWriter.MAGIC, magic) || !Arrays.equals(ColumnFileWriter.MAGIC_1, magic)
|| !Arrays.equals(ColumnFileWriter.MAGIC_0, magic)))
throw new IOException("Not a data file.");
}
private void readColumnMetaData(InputBuffer in) throws IOException {
for (int i = 0; i < columnCount; i++) {
ColumnMetaData meta = ColumnMetaData.read(in, this);
meta.setDefaults(this.metaData);
ColumnDescriptor column = new ColumnDescriptor(file, meta);
columns[i] = column;
meta.setNumber(i);
columnsByName.put(meta.getName(), column);
}
}
private void readColumnStarts(InputBuffer in) throws IOException {
for (int i = 0; i < columnCount; i++)
columns[i].start = in.readFixed64();
}
/** Return an iterator over values in the named column. */
public <T extends Comparable> ColumnValues<T> getValues(String columnName) throws IOException {
return new ColumnValues<>(getColumn(columnName));
}
/** Return an iterator over values in a column. */
public <T extends Comparable> ColumnValues<T> getValues(int column) throws IOException {
return new ColumnValues<>(columns[column]);
}
@Override
public void close() throws IOException {
file.close();
}
}
| 7,410 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/MetaData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.LinkedHashMap;
/** Base class for metadata. */
public class MetaData<T extends MetaData> extends LinkedHashMap<String, byte[]> {
static final String RESERVED_KEY_PREFIX = "trevni.";
static final String CODEC_KEY = RESERVED_KEY_PREFIX + "codec";
static final String CHECKSUM_KEY = RESERVED_KEY_PREFIX + "checksum";
private MetaData<?> defaults;
void setDefaults(MetaData defaults) {
this.defaults = defaults;
}
/** Return the compression codec name. */
public String getCodec() {
return getString(CODEC_KEY);
}
/** Set the compression codec name. */
public T setCodec(String codec) {
setReserved(CODEC_KEY, codec);
return (T) this;
}
/** Return the checksum algorithm name. */
public String getChecksum() {
return getString(CHECKSUM_KEY);
}
/** Set the checksum algorithm name. */
public T setChecksum(String checksum) {
setReserved(CHECKSUM_KEY, checksum);
return (T) this;
}
/** Return the value of a metadata property as a String. */
public String getString(String key) {
byte[] value = get(key);
if (value == null && defaults != null)
value = defaults.get(key);
if (value == null)
return null;
return new String(value, StandardCharsets.UTF_8);
}
/** Return the value of a metadata property as a long. */
public long getLong(String key) {
return Long.parseLong(getString(key));
}
/** Return true iff a key has any value, false if it is not present. */
public boolean getBoolean(String key) {
return get(key) != null;
}
/** Set a metadata property to a binary value. */
public T set(String key, byte[] value) {
if (isReserved(key)) {
throw new TrevniRuntimeException("Cannot set reserved key: " + key);
}
put(key, value);
return (T) this;
}
/** Test if a metadata key is reserved. */
public static boolean isReserved(String key) {
return key.startsWith(RESERVED_KEY_PREFIX);
}
/** Set a metadata property to a String value. */
public T set(String key, String value) {
return set(key, value.getBytes(StandardCharsets.UTF_8));
}
T setReserved(String key, String value) {
put(key, value.getBytes(StandardCharsets.UTF_8));
return (T) this;
}
T setReservedBoolean(String key, boolean value) {
if (value)
setReserved(key, "");
else
remove(key);
return (T) this;
}
/** Set a metadata property to a long value. */
public T set(String key, long value) {
return set(key, Long.toString(value));
}
void write(OutputBuffer out) throws IOException {
out.writeInt(size());
for (Map.Entry<String, byte[]> e : entrySet()) {
out.writeString(e.getKey());
out.writeBytes(e.getValue());
}
}
static void read(InputBuffer in, MetaData<?> metaData) throws IOException {
int size = in.readInt();
for (int i = 0; i < size; i++)
metaData.put(in.readString(), in.readBytes());
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append('{').append(' ');
for (Map.Entry<String, byte[]> e : entrySet()) {
builder.append(e.getKey());
builder.append('=');
builder.append(new String(e.getValue(), StandardCharsets.ISO_8859_1));
builder.append(' ');
}
builder.append('}');
return builder.toString();
}
}
| 7,411 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnFileWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.Set;
import java.util.HashSet;
/**
* Writes data to a column file. All data is buffered until
* {@link #writeTo(File)} is called.
*/
public class ColumnFileWriter {
static final byte[] MAGIC_0 = new byte[] { 'T', 'r', 'v', 0 };
static final byte[] MAGIC_1 = new byte[] { 'T', 'r', 'v', 1 };
static final byte[] MAGIC = new byte[] { 'T', 'r', 'v', 2 };
private ColumnFileMetaData metaData;
private ColumnOutputBuffer[] columns;
private long rowCount;
private int columnCount;
private long size;
/** Construct given metadata for each column in the file. */
public ColumnFileWriter(ColumnFileMetaData fileMeta, ColumnMetaData... columnMeta) throws IOException {
checkColumns(columnMeta);
this.metaData = fileMeta;
this.columnCount = columnMeta.length;
this.columns = new ColumnOutputBuffer[columnCount];
for (int i = 0; i < columnCount; i++) {
ColumnMetaData c = columnMeta[i];
c.setDefaults(metaData);
columns[i] = c.isArray() ? new ArrayColumnOutputBuffer(this, c) : new ColumnOutputBuffer(this, c);
size += OutputBuffer.BLOCK_SIZE; // over-estimate
}
}
private void checkColumns(ColumnMetaData[] columnMeta) {
Set<String> seen = new HashSet<>();
for (ColumnMetaData c : columnMeta) {
String name = c.getName();
if (seen.contains(name))
throw new TrevniRuntimeException("Duplicate column name: " + name);
ColumnMetaData parent = c.getParent();
if (parent != null && !seen.contains(parent.getName()))
throw new TrevniRuntimeException("Parent must precede child: " + name);
seen.add(name);
}
}
void incrementSize(int n) {
size += n;
}
/**
* Return the approximate size of the file that will be written. Tries to
* slightly over-estimate. Indicates both the size in memory of the buffered
* data as well as the size of the file that will be written by
* {@link #writeTo(OutputStream)}.
*/
public long sizeEstimate() {
return size;
}
/** Return this file's metadata. */
public ColumnFileMetaData getMetaData() {
return metaData;
}
/** Return the number of columns in the file. */
public int getColumnCount() {
return columnCount;
}
/** Add a row to the file. */
public void writeRow(Object... row) throws IOException {
startRow();
for (int column = 0; column < columnCount; column++)
writeValue(row[column], column);
endRow();
}
/** Expert: Called before any values are written to a row. */
public void startRow() throws IOException {
for (int column = 0; column < columnCount; column++)
columns[column].startRow();
}
/**
* Expert: Declare a count of items to be written to an array column or a column
* whose parent is an array.
*/
public void writeLength(int length, int column) throws IOException {
columns[column].writeLength(length);
}
/**
* Expert: Add a value to a row. For values in array columns or whose parents
* are array columns, this must be preceded by a call to
* {@link #writeLength(int, int)} and must be called that many times. For normal
* columns this is called once for each row in the column.
*/
public void writeValue(Object value, int column) throws IOException {
columns[column].writeValue(value);
}
/** Expert: Called after all values are written to a row. */
public void endRow() throws IOException {
for (int column = 0; column < columnCount; column++)
columns[column].endRow();
rowCount++;
}
/** Write all rows added to the named file. */
public void writeTo(File file) throws IOException {
try (OutputStream out = new FileOutputStream(file)) {
writeTo(out);
}
}
/** Write all rows added to the named output stream. */
public void writeTo(OutputStream out) throws IOException {
writeHeader(out);
for (int column = 0; column < columnCount; column++)
columns[column].writeTo(out);
}
private void writeHeader(OutputStream out) throws IOException {
OutputBuffer header = new OutputBuffer();
header.write(MAGIC); // magic
header.writeFixed64(rowCount); // row count
header.writeFixed32(columnCount); // column count
metaData.write(header); // file metadata
for (ColumnOutputBuffer column : columns)
column.getMeta().write(header); // column metadata
for (long start : computeStarts(header.size()))
header.writeFixed64(start); // column starts
header.writeTo(out);
}
private long[] computeStarts(long start) throws IOException {
long[] result = new long[columnCount];
start += columnCount * 8; // room for starts
for (int column = 0; column < columnCount; column++) {
result[column] = start;
start += columns[column].size();
}
return result;
}
}
| 7,412 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/Input.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.io.Closeable;
/** A byte source that supports positioned read and length. */
public interface Input extends Closeable {
/** Return the total length of the input. */
long length() throws IOException;
/** Positioned read. */
int read(long position, byte[] b, int start, int len) throws IOException;
}
| 7,413 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/NullCodec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.nio.ByteBuffer;
/** Implements "null" (pass through) codec. */
final class NullCodec extends Codec {
@Override
ByteBuffer compress(ByteBuffer buffer) throws IOException {
return buffer;
}
@Override
ByteBuffer decompress(ByteBuffer data) throws IOException {
return data;
}
}
| 7,414 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/InputBytes.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
/** An {@link Input} backed with data in a byte array. */
public class InputBytes extends ByteArrayInputStream implements Input {
/** Construct for the given bytes. */
public InputBytes(byte[] data) {
super(data);
}
/** Construct for the given bytes. */
public InputBytes(ByteBuffer data) {
super(data.array(), data.position(), data.limit());
}
@Override
public long length() throws IOException {
return this.count;
}
@Override
public synchronized int read(long pos, byte[] b, int start, int len) throws IOException {
this.pos = (int) pos;
return read(b, start, len);
}
byte[] getBuffer() {
return buf;
}
}
| 7,415 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnDescriptor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.util.Arrays;
class ColumnDescriptor<T extends Comparable> {
final Input file;
final ColumnMetaData metaData;
long start;
long dataStart;
BlockDescriptor[] blocks;
long[] blockStarts; // for random access
long[] firstRows; // for binary searches
T[] firstValues; // for binary searches
public ColumnDescriptor(Input file, ColumnMetaData metaData) {
this.file = file;
this.metaData = metaData;
}
public int findBlock(long row) {
int block = Arrays.binarySearch(firstRows, row);
if (block < 0)
block = -block - 2;
return block;
}
public int findBlock(T value) {
int block = Arrays.binarySearch(firstValues, value);
if (block < 0)
block = -block - 2;
return block;
}
public int blockCount() {
return blocks.length;
}
public long lastRow(int block) {
if (blocks.length == 0 || block < 0)
return 0;
return firstRows[block] + blocks[block].rowCount;
}
public void ensureBlocksRead() throws IOException {
if (blocks != null)
return;
// read block descriptors
InputBuffer in = new InputBuffer(file, start);
int blockCount = in.readFixed32();
BlockDescriptor[] blocks = new BlockDescriptor[blockCount];
if (metaData.hasIndexValues())
firstValues = (T[]) new Comparable[blockCount];
for (int i = 0; i < blockCount; i++) {
blocks[i] = BlockDescriptor.read(in);
if (metaData.hasIndexValues())
firstValues[i] = in.readValue(metaData.getType());
}
dataStart = in.tell();
// compute blockStarts and firstRows
Checksum checksum = Checksum.get(metaData);
blockStarts = new long[blocks.length];
firstRows = new long[blocks.length];
long startPosition = dataStart;
long row = 0;
for (int i = 0; i < blockCount; i++) {
BlockDescriptor b = blocks[i];
blockStarts[i] = startPosition;
firstRows[i] = row;
startPosition += b.compressedSize + checksum.size();
row += b.rowCount;
}
this.blocks = blocks;
}
}
| 7,416 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/Codec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.nio.ByteBuffer;
/** Interface for compression codecs. */
abstract class Codec {
public static Codec get(MetaData meta) {
String name = meta.getCodec();
if (name == null || "null".equals(name))
return new NullCodec();
else if ("deflate".equals(name))
return new DeflateCodec();
else if ("snappy".equals(name))
return new SnappyCodec();
else if ("bzip2".equals(name))
return new BZip2Codec();
else
throw new TrevniRuntimeException("Unknown codec: " + name);
}
/** Compress data */
abstract ByteBuffer compress(ByteBuffer uncompressedData) throws IOException;
/** Decompress data */
abstract ByteBuffer decompress(ByteBuffer compressedData) throws IOException;
// Codecs often reference the array inside a ByteBuffer. Compute the offset
// to the start of data correctly in the case that our ByteBuffer
// is a slice() of another.
protected static int computeOffset(ByteBuffer data) {
return data.arrayOffset() + data.position();
}
}
| 7,417 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/Checksum.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.nio.ByteBuffer;
/** Interface for checksum algorithms. */
abstract class Checksum {
public static Checksum get(MetaData meta) {
String name = meta.getChecksum();
if (name == null || "null".equals(name))
return new NullChecksum();
else if ("crc32".equals(name))
return new Crc32Checksum();
else
throw new TrevniRuntimeException("Unknown checksum: " + name);
}
/** The number of bytes per checksum. */
public abstract int size();
/** Compute a checksum. */
public abstract ByteBuffer compute(ByteBuffer data);
}
| 7,418 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/Crc32Checksum.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.zip.CRC32;
/** Implements CRC32 checksum. */
final class Crc32Checksum extends Checksum {
private CRC32 crc32 = new CRC32();
@Override
public int size() {
return 4;
}
@Override
public ByteBuffer compute(ByteBuffer data) {
crc32.reset();
crc32.update(data.array(), data.position(), data.remaining());
ByteBuffer result = ByteBuffer.allocate(size());
result.putInt((int) crc32.getValue());
((Buffer) result).flip();
return result;
}
}
| 7,419 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/ColumnOutputBuffer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
class ColumnOutputBuffer {
private ColumnFileWriter writer;
private ColumnMetaData meta;
private Codec codec;
private Checksum checksum;
private OutputBuffer buffer;
private List<BlockDescriptor> blockDescriptors;
private List<byte[]> blockData;
private List<byte[]> firstValues;
private int rowCount;
private long size = 4; // room for block count
public ColumnOutputBuffer(ColumnFileWriter writer, ColumnMetaData meta) throws IOException {
this.writer = writer;
this.meta = meta;
this.codec = Codec.get(meta);
this.checksum = Checksum.get(meta);
this.buffer = new OutputBuffer();
this.blockDescriptors = new ArrayList<>();
this.blockData = new ArrayList<>();
if (meta.hasIndexValues())
this.firstValues = new ArrayList<>();
}
public ColumnMetaData getMeta() {
return meta;
}
public OutputBuffer getBuffer() {
return buffer;
}
public void startRow() throws IOException {
if (buffer.isFull())
flushBuffer();
}
public void writeLength(int length) throws IOException {
throw new TrevniRuntimeException("Not an array column: " + meta);
}
public void writeValue(Object value) throws IOException {
buffer.writeValue(value, meta.getType());
if (meta.hasIndexValues() && rowCount == 0)
firstValues.add(buffer.toByteArray());
}
public void endRow() throws IOException {
rowCount++;
}
void flushBuffer() throws IOException {
if (rowCount == 0)
return;
ByteBuffer raw = buffer.asByteBuffer();
ByteBuffer c = codec.compress(raw);
blockDescriptors.add(new BlockDescriptor(rowCount, raw.remaining(), c.remaining()));
ByteBuffer data = ByteBuffer.allocate(c.remaining() + checksum.size());
data.put(c);
data.put(checksum.compute(raw));
blockData.add(data.array());
int sizeIncrement = (4 * 3) // descriptor
+ (firstValues != null // firstValue
? firstValues.get(firstValues.size() - 1).length
: 0)
+ data.position(); // data
writer.incrementSize(sizeIncrement);
size += sizeIncrement;
buffer = new OutputBuffer();
rowCount = 0;
}
public long size() throws IOException {
flushBuffer();
return size;
}
public void writeTo(OutputStream out) throws IOException {
OutputBuffer header = new OutputBuffer();
header.writeFixed32(blockDescriptors.size());
for (int i = 0; i < blockDescriptors.size(); i++) {
blockDescriptors.get(i).writeTo(header);
if (meta.hasIndexValues())
header.write(firstValues.get(i));
}
header.writeTo(out);
for (byte[] data : blockData)
out.write(data);
}
}
| 7,420 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/InputFile.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.io.File;
import java.io.FileInputStream;
import java.nio.channels.FileChannel;
import java.nio.ByteBuffer;
import java.io.IOException;
/** An {@link Input} for files. */
public class InputFile implements Input {
private FileChannel channel;
/** Construct for the given file. */
public InputFile(File file) throws IOException {
this.channel = new FileInputStream(file).getChannel();
}
@Override
public long length() throws IOException {
return channel.size();
}
@Override
public int read(long position, byte[] b, int start, int len) throws IOException {
return channel.read(ByteBuffer.wrap(b, start, len), position);
}
@Override
public void close() throws IOException {
channel.close();
}
}
| 7,421 |
0 | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache | Create_ds/avro/lang/java/trevni/core/src/main/java/org/apache/trevni/NullChecksum.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni;
import java.nio.ByteBuffer;
/** Implements "null" (empty) checksum. */
final class NullChecksum extends Checksum {
@Override
public int size() {
return 0;
}
@Override
public ByteBuffer compute(ByteBuffer data) {
return ByteBuffer.allocate(0);
}
}
| 7,422 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/WordCountUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.io.File;
import java.util.StringTokenizer;
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.fs.FileUtil;
import org.apache.avro.Schema;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.mapred.Pair;
public class WordCountUtil {
public File dir;
public File linesFiles;
public File countFiles;
public WordCountUtil(String testName) {
this(testName, "part-00000");
}
public WordCountUtil(String testName, String partDirName) {
dir = new File("target/wc", testName);
linesFiles = new File(new File(dir, "in"), "lines.avro");
countFiles = new File(new File(dir, "out"), partDirName + "/part-0.trv");
}
public static final String[] LINES = new String[] { "the quick brown fox jumps over the lazy dog",
"the cow jumps over the moon", "the rain in spain falls mainly on the plains" };
public static final Map<String, Long> COUNTS = new TreeMap<>();
public static final long TOTAL;
static {
long total = 0;
for (String line : LINES) {
StringTokenizer tokens = new StringTokenizer(line);
while (tokens.hasMoreTokens()) {
String word = tokens.nextToken();
long count = COUNTS.getOrDefault(word, 0L);
count++;
total++;
COUNTS.put(word, count);
}
}
TOTAL = total;
}
public File getDir() {
return dir;
}
public void writeLinesFile() throws IOException {
FileUtil.fullyDelete(dir);
DatumWriter<String> writer = new GenericDatumWriter<>();
DataFileWriter<String> out = new DataFileWriter<>(writer);
linesFiles.getParentFile().mkdirs();
out.create(Schema.create(Schema.Type.STRING), linesFiles);
for (String line : LINES)
out.append(line);
out.close();
}
public void validateCountsFile() throws Exception {
AvroColumnReader<Pair<String, Long>> reader = new AvroColumnReader<>(
new AvroColumnReader.Params(countFiles).setModel(SpecificData.get()));
int numWords = 0;
for (Pair<String, Long> wc : reader) {
assertEquals(COUNTS.get(wc.key()), wc.value(), wc.key());
numWords++;
}
reader.close();
assertEquals(COUNTS.size(), numWords);
}
public void validateCountsFileGenericRecord() throws Exception {
AvroColumnReader<GenericRecord> reader = new AvroColumnReader<>(
new AvroColumnReader.Params(countFiles).setModel(SpecificData.get()));
int numWords = 0;
for (GenericRecord wc : reader) {
assertEquals(COUNTS.get(wc.get("key")), wc.get("value"), (String) wc.get("key"));
// assertEquals(wc.getKey(), COUNTS.get(wc.getKey()), wc.getValue());
numWords++;
}
reader.close();
assertEquals(COUNTS.size(), numWords);
}
}
| 7,423 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestCases.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.File;
import java.io.EOFException;
import java.io.InputStream;
import java.io.FileInputStream;
import java.util.List;
import java.util.ArrayList;
import org.apache.trevni.ColumnFileMetaData;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.DatumReader;
import org.apache.avro.generic.GenericDatumReader;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestCases {
private static final File DIR = new File("src/test/cases/");
private static final File FILE = new File("target", "case.trv");
@Test
void cases() throws Exception {
for (File f : DIR.listFiles())
if (f.isDirectory() && !f.getName().startsWith("."))
runCase(f);
}
private void runCase(File dir) throws Exception {
Schema schema = new Schema.Parser().parse(new File(dir, "input.avsc"));
List<Object> data = fromJson(schema, new File(dir, "input.json"));
// write full data
AvroColumnWriter<Object> writer = new AvroColumnWriter<>(schema, new ColumnFileMetaData());
for (Object datum : data)
writer.write(datum);
writer.writeTo(FILE);
// test that the full schema reads correctly
checkRead(schema, data);
// test that sub-schemas read correctly
for (File f : dir.listFiles())
if (f.isDirectory() && !f.getName().startsWith(".")) {
Schema s = new Schema.Parser().parse(new File(f, "sub.avsc"));
checkRead(s, fromJson(s, new File(f, "sub.json")));
}
}
private void checkRead(Schema s, List<Object> data) throws Exception {
try (AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(FILE).setSchema(s))) {
for (Object datum : data)
assertEquals(datum, reader.next());
}
}
private List<Object> fromJson(Schema schema, File file) throws Exception {
List<Object> data = new ArrayList<>();
try (InputStream in = new FileInputStream(file)) {
DatumReader reader = new GenericDatumReader(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, in);
while (true)
data.add(reader.read(null, decoder));
} catch (EOFException e) {
}
return data;
}
}
| 7,424 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestWordCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
import org.apache.hadoop.mapred.Reporter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapred.AvroJob;
import org.apache.avro.mapred.Pair;
import org.apache.avro.mapred.AvroMapper;
import org.apache.avro.mapred.AvroReducer;
import org.apache.avro.mapred.AvroCollector;
import org.apache.avro.Schema;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestWordCount {
public static class MapImpl extends AvroMapper<String, Pair<String, Long>> {
@Override
public void map(String text, AvroCollector<Pair<String, Long>> collector, Reporter reporter) throws IOException {
StringTokenizer tokens = new StringTokenizer(text);
while (tokens.hasMoreTokens())
collector.collect(new Pair<>(tokens.nextToken(), 1L));
}
}
public static class ReduceImpl extends AvroReducer<String, Long, Pair<String, Long>> {
@Override
public void reduce(String word, Iterable<Long> counts, AvroCollector<Pair<String, Long>> collector,
Reporter reporter) throws IOException {
long sum = 0;
for (long count : counts)
sum += count;
collector.collect(new Pair<>(word, sum));
}
}
@Test
void runTestsInOrder() throws Exception {
testOutputFormat();
testInputFormat();
}
static final Schema STRING = Schema.create(Schema.Type.STRING);
static {
GenericData.setStringType(STRING, GenericData.StringType.String);
}
static final Schema LONG = Schema.create(Schema.Type.LONG);
public void testOutputFormat() throws Exception {
JobConf job = new JobConf();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
wordCountUtil.writeLinesFile();
AvroJob.setInputSchema(job, STRING);
AvroJob.setOutputSchema(job, Pair.getPairSchema(STRING, LONG));
AvroJob.setMapperClass(job, MapImpl.class);
AvroJob.setCombinerClass(job, ReduceImpl.class);
AvroJob.setReducerClass(job, ReduceImpl.class);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
FileOutputFormat.setCompressOutput(job, true);
job.setOutputFormat(AvroTrevniOutputFormat.class);
JobClient.runJob(job);
wordCountUtil.validateCountsFile();
}
private static long total;
public static class Counter extends AvroMapper<GenericRecord, Void> {
@Override
public void map(GenericRecord r, AvroCollector<Void> collector, Reporter reporter) throws IOException {
total += (Long) r.get("value");
}
}
public void testInputFormat() throws Exception {
JobConf job = new JobConf();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapredTest");
Schema subSchema = new Schema.Parser().parse("{\"type\":\"record\"," + "\"name\":\"PairValue\"," + "\"fields\": [ "
+ "{\"name\":\"value\", \"type\":\"long\"}" + "]}");
AvroJob.setInputSchema(job, subSchema);
AvroJob.setMapperClass(job, Counter.class);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
job.setInputFormat(AvroTrevniInputFormat.class);
job.setNumReduceTasks(0); // map-only
job.setOutputFormat(NullOutputFormat.class); // ignore output
total = 0;
JobClient.runJob(job);
assertEquals(WordCountUtil.TOTAL, total);
}
}
| 7,425 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestShredder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import org.apache.trevni.ValueType;
import org.apache.trevni.ColumnMetaData;
import org.apache.trevni.ColumnFileMetaData;
import org.apache.avro.Schema;
import org.apache.avro.util.RandomData;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestShredder {
private static final long SEED = System.currentTimeMillis();
private static final int COUNT = 100;
private static final File FILE = new File("target", "test.trv");
@Test
void primitives() throws Exception {
check(Schema.create(Schema.Type.NULL), new ColumnMetaData("null", ValueType.NULL));
check(Schema.create(Schema.Type.BOOLEAN), new ColumnMetaData("boolean", ValueType.BOOLEAN));
check(Schema.create(Schema.Type.INT), new ColumnMetaData("int", ValueType.INT));
check(Schema.create(Schema.Type.LONG), new ColumnMetaData("long", ValueType.LONG));
check(Schema.create(Schema.Type.FLOAT), new ColumnMetaData("float", ValueType.FLOAT));
check(Schema.create(Schema.Type.DOUBLE), new ColumnMetaData("double", ValueType.DOUBLE));
check(Schema.create(Schema.Type.BYTES), new ColumnMetaData("bytes", ValueType.BYTES));
check(Schema.create(Schema.Type.STRING), new ColumnMetaData("string", ValueType.STRING));
check(Schema.createEnum("E", null, null, Arrays.asList("X", "Y", "Z")), new ColumnMetaData("E", ValueType.INT));
check(Schema.createFixed("F", null, null, 5), new ColumnMetaData("F", ValueType.BYTES));
}
private static final String SIMPLE_FIELDS = "{\"name\":\"x\",\"type\":\"int\"},"
+ "{\"name\":\"y\",\"type\":\"string\"}";
private static final String SIMPLE_RECORD = "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" + SIMPLE_FIELDS + "]}";
@Test
void simpleRecord() throws Exception {
check(new Schema.Parser().parse(SIMPLE_RECORD), new ColumnMetaData("x", ValueType.INT),
new ColumnMetaData("y", ValueType.STRING));
}
@Test
void defaultValue() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"R\",\"fields\":[" + SIMPLE_FIELDS + ","
+ "{\"name\":\"z\",\"type\":\"int\"," + "\"default\":1,\"" + RandomData.USE_DEFAULT + "\":true}" + "]}";
checkWrite(new Schema.Parser().parse(SIMPLE_RECORD));
checkRead(new Schema.Parser().parse(s));
}
@Test
void nestedRecord() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":[" + "{\"name\":\"x\",\"type\":\"int\"},"
+ "{\"name\":\"R\",\"type\":" + SIMPLE_RECORD + "}," + "{\"name\":\"y\",\"type\":\"string\"}" + "]}";
check(new Schema.Parser().parse(s), new ColumnMetaData("x", ValueType.INT),
new ColumnMetaData("R#x", ValueType.INT), new ColumnMetaData("R#y", ValueType.STRING),
new ColumnMetaData("y", ValueType.STRING));
}
@Test
void namedRecord() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":[" + "{\"name\":\"R1\",\"type\":" + SIMPLE_RECORD + "},"
+ "{\"name\":\"R2\",\"type\":\"R\"}" + "]}";
check(new Schema.Parser().parse(s), new ColumnMetaData("R1#x", ValueType.INT),
new ColumnMetaData("R1#y", ValueType.STRING), new ColumnMetaData("R2#x", ValueType.INT),
new ColumnMetaData("R2#y", ValueType.STRING));
}
@Test
void simpleArray() throws Exception {
String s = "{\"type\":\"array\",\"items\":\"long\"}";
check(new Schema.Parser().parse(s), new ColumnMetaData("[]", ValueType.LONG).isArray(true));
}
private static final String RECORD_ARRAY = "{\"type\":\"array\",\"items\":" + SIMPLE_RECORD + "}";
@Test
void array() throws Exception {
ColumnMetaData p = new ColumnMetaData("[]", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(RECORD_ARRAY), p, new ColumnMetaData("[]#x", ValueType.INT).setParent(p),
new ColumnMetaData("[]#y", ValueType.STRING).setParent(p));
}
@Test
void simpleUnion() throws Exception {
String s = "[\"int\",\"string\"]";
check(new Schema.Parser().parse(s), new ColumnMetaData("int", ValueType.INT).isArray(true),
new ColumnMetaData("string", ValueType.STRING).isArray(true));
}
@Test
void simpleOptional() throws Exception {
String s = "[\"null\",\"string\"]";
check(new Schema.Parser().parse(s), new ColumnMetaData("string", ValueType.STRING).isArray(true));
}
private static final String UNION = "[\"null\",\"int\"," + SIMPLE_RECORD + "]";
@Test
void union() throws Exception {
ColumnMetaData p = new ColumnMetaData("R", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(UNION), new ColumnMetaData("int", ValueType.INT).isArray(true), p,
new ColumnMetaData("R#x", ValueType.INT).setParent(p),
new ColumnMetaData("R#y", ValueType.STRING).setParent(p));
}
@Test
void nestedArray() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":[" + "{\"name\":\"x\",\"type\":\"int\"},"
+ "{\"name\":\"A\",\"type\":" + RECORD_ARRAY + "}," + "{\"name\":\"y\",\"type\":\"string\"}" + "]}";
ColumnMetaData p = new ColumnMetaData("A[]", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(s), new ColumnMetaData("x", ValueType.INT), p,
new ColumnMetaData("A[]#x", ValueType.INT).setParent(p),
new ColumnMetaData("A[]#y", ValueType.STRING).setParent(p), new ColumnMetaData("y", ValueType.STRING));
}
@Test
void nestedUnion() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":[" + "{\"name\":\"x\",\"type\":\"int\"},"
+ "{\"name\":\"u\",\"type\":" + UNION + "}," + "{\"name\":\"y\",\"type\":\"string\"}" + "]}";
ColumnMetaData p = new ColumnMetaData("u/R", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(s), new ColumnMetaData("x", ValueType.INT),
new ColumnMetaData("u/int", ValueType.INT).isArray(true), p,
new ColumnMetaData("u/R#x", ValueType.INT).setParent(p),
new ColumnMetaData("u/R#y", ValueType.STRING).setParent(p), new ColumnMetaData("y", ValueType.STRING));
}
@Test
void unionInArray() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":["
+ "{\"name\":\"a\",\"type\":{\"type\":\"array\",\"items\":" + UNION + "}}" + "]}";
ColumnMetaData p = new ColumnMetaData("a[]", ValueType.NULL).isArray(true);
ColumnMetaData r = new ColumnMetaData("a[]/R", ValueType.NULL).setParent(p).isArray(true);
check(new Schema.Parser().parse(s), p, new ColumnMetaData("a[]/int", ValueType.INT).setParent(p).isArray(true), r,
new ColumnMetaData("a[]/R#x", ValueType.INT).setParent(r),
new ColumnMetaData("a[]/R#y", ValueType.STRING).setParent(r));
}
@Test
void arrayInUnion() throws Exception {
String s = "{\"type\":\"record\",\"name\":\"S\",\"fields\":[" + "{\"name\":\"a\",\"type\":[\"int\"," + RECORD_ARRAY
+ "]}]}";
ColumnMetaData q = new ColumnMetaData("a/array", ValueType.NULL).isArray(true);
ColumnMetaData r = new ColumnMetaData("a/array[]", ValueType.NULL).setParent(q).isArray(true);
check(new Schema.Parser().parse(s), new ColumnMetaData("a/int", ValueType.INT).isArray(true), q, r,
new ColumnMetaData("a/array[]#x", ValueType.INT).setParent(r),
new ColumnMetaData("a/array[]#y", ValueType.STRING).setParent(r));
}
@Test
void simpleMap() throws Exception {
String s = "{\"type\":\"map\",\"values\":\"long\"}";
ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(s), p, new ColumnMetaData(">key", ValueType.STRING).setParent(p),
new ColumnMetaData(">value", ValueType.LONG).setParent(p));
}
@Test
void map() throws Exception {
String s = "{\"type\":\"map\",\"values\":" + SIMPLE_RECORD + "}";
ColumnMetaData p = new ColumnMetaData(">", ValueType.NULL).isArray(true);
check(new Schema.Parser().parse(s), p, new ColumnMetaData(">key", ValueType.STRING).setParent(p),
new ColumnMetaData(">value#x", ValueType.INT).setParent(p),
new ColumnMetaData(">value#y", ValueType.STRING).setParent(p));
}
private void check(Schema s, ColumnMetaData... expected) throws Exception {
ColumnMetaData[] shredded = new AvroColumnator(s).getColumns();
assertEquals(expected.length, shredded.length);
for (int i = 0; i < expected.length; i++)
assertEquals(expected[i].toString(), shredded[i].toString());
checkWrite(s);
checkRead(s);
}
private void checkWrite(Schema schema) throws IOException {
AvroColumnWriter<Object> writer = new AvroColumnWriter<>(schema, new ColumnFileMetaData());
int count = 0;
for (Object datum : new RandomData(schema, COUNT, SEED)) {
// System.out.println("datum="+datum);
writer.write(datum);
}
writer.writeTo(FILE);
}
private void checkRead(Schema schema) throws IOException {
AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(FILE).setSchema(schema));
for (Object expected : new RandomData(schema, COUNT, SEED))
assertEquals(expected, reader.next());
reader.close();
}
}
| 7,426 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestEvolvedSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.trevni.ColumnFileMetaData;
import org.apache.trevni.avro.AvroColumnReader.Params;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class TestEvolvedSchema {
private static String writerSchema = "{" + " \"namespace\": \"org.apache.avro\","
+ " \"name\": \"test_evolution\"," + " \"type\": \"record\"," + " \"fields\": ["
+ " { \"name\": \"a\", \"type\":\"string\" }," + " { \"name\": \"b\", \"type\":\"int\" }" + " ]"
+ "}";
private static String innerSchema = "{\"name\":\"c1\"," + " \"type\":\"record\","
+ " \"fields\":[{\"name\":\"c11\", \"type\":\"int\", \"default\": 2},"
+ " {\"name\":\"c12\", \"type\":\"string\", \"default\":\"goodbye\"}]}";
private static String evolvedSchema2 = "{" + " \"namespace\": \"org.apache.avro\","
+ " \"name\": \"test_evolution\"," + " \"type\": \"record\"," + " \"fields\": ["
+ " { \"name\": \"a\", \"type\":\"string\" }," + " { \"name\": \"b\", \"type\":\"int\" },"
+ " { \"name\": \"c\", \"type\":" + innerSchema + ","
+ " \"default\":{\"c11\": 1, \"c12\": \"hello\"}" + " }" + " ]" + "}";
GenericData.Record writtenRecord;
GenericData.Record evolvedRecord;
GenericData.Record innerRecord;
private static final Schema writer = new Schema.Parser().parse(writerSchema);
private static final Schema evolved = new Schema.Parser().parse(evolvedSchema2);
private static final Schema inner = new Schema.Parser().parse(innerSchema);
@BeforeEach
public void setUp() {
writtenRecord = new GenericData.Record(writer);
writtenRecord.put("a", "record");
writtenRecord.put("b", 21);
innerRecord = new GenericData.Record(inner);
innerRecord.put("c11", 1);
innerRecord.put("c12", "hello");
evolvedRecord = new GenericData.Record(evolved);
evolvedRecord.put("a", "record");
evolvedRecord.put("b", 21);
evolvedRecord.put("c", innerRecord);
}
@Test
void trevniEvolvedRead() throws IOException {
AvroColumnWriter<GenericRecord> acw = new AvroColumnWriter<>(writer, new ColumnFileMetaData());
acw.write(writtenRecord);
File serializedTrevni = File.createTempFile("trevni", null);
acw.writeTo(serializedTrevni);
AvroColumnReader.Params params = new Params(serializedTrevni);
params.setSchema(evolved);
try (AvroColumnReader<GenericRecord> acr = new AvroColumnReader<>(params)) {
GenericRecord readRecord = acr.next();
assertEquals(evolvedRecord, readRecord);
assertFalse(acr.hasNext());
}
}
@Test
void avroEvolvedRead() throws IOException {
File serializedAvro = File.createTempFile("avro", null);
DatumWriter<GenericRecord> dw = new GenericDatumWriter<>(writer);
DataFileWriter<GenericRecord> dfw = new DataFileWriter<>(dw);
dfw.create(writer, serializedAvro);
dfw.append(writtenRecord);
dfw.flush();
dfw.close();
GenericDatumReader<GenericRecord> reader = new GenericDatumReader<>(writer);
reader.setExpected(evolved);
try (DataFileReader<GenericRecord> dfr = new DataFileReader<>(serializedAvro, reader)) {
GenericRecord readRecord = dfr.next();
assertEquals(evolvedRecord, readRecord);
assertFalse(dfr.hasNext());
}
}
}
| 7,427 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/TestMetadataFiltering.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import org.apache.avro.mapred.AvroJob;
import org.apache.hadoop.mapred.JobConf;
import org.apache.trevni.ColumnFileMetaData;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class TestMetadataFiltering {
@Test
void metadataFiltering() throws Exception {
JobConf job = new JobConf();
job.set(AvroTrevniOutputFormat.META_PREFIX + "test1", "1");
job.set(AvroTrevniOutputFormat.META_PREFIX + "test2", "2");
job.set("test3", "3");
job.set(AvroJob.TEXT_PREFIX + "test4", "4");
job.set(AvroTrevniOutputFormat.META_PREFIX + "test5", "5");
ColumnFileMetaData metadata = AvroTrevniOutputFormat.filterMetadata(job);
assertTrue(metadata.get("test1") != null);
assertEquals(new String(metadata.get("test1")), "1");
assertTrue(metadata.get("test2") != null);
assertEquals(new String(metadata.get("test2")), "2");
assertTrue(metadata.get("test5") != null);
assertEquals(new String(metadata.get("test5")), "5");
assertNull(metadata.get("test3"));
assertNull(metadata.get("test4"));
}
}
| 7,428 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyValueWordCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.trevni.avro.WordCountUtil;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestKeyValueWordCount {
private static long total = 0;
static final Schema STRING = Schema.create(Schema.Type.STRING);
static {
GenericData.setStringType(STRING, GenericData.StringType.String);
}
static final Schema LONG = Schema.create(Schema.Type.LONG);
private static class WordCountMapper extends Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
private LongWritable mCount = new LongWritable();
private Text mText = new Text();
@Override
protected void setup(Context context) {
mCount.set(1);
}
@Override
protected void map(AvroKey<String> key, NullWritable value, Context context)
throws IOException, InterruptedException {
try {
StringTokenizer tokens = new StringTokenizer(key.datum());
while (tokens.hasMoreTokens()) {
mText.set(tokens.nextToken());
context.write(mText, mCount);
}
} catch (Exception e) {
throw new RuntimeException(key + " " + key.datum(), e);
}
}
}
private static class WordCountReducer extends Reducer<Text, LongWritable, AvroKey<String>, AvroValue<Long>> {
AvroKey<String> resultKey = new AvroKey<>();
AvroValue<Long> resultValue = new AvroValue<>();
@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
throws IOException, InterruptedException {
long sum = 0;
for (LongWritable value : values) {
sum += value.get();
}
resultKey.datum(key.toString());
resultValue.datum(sum);
context.write(resultKey, resultValue);
}
}
public static class Counter extends Mapper<AvroKey<String>, AvroValue<Long>, NullWritable, NullWritable> {
@Override
protected void map(AvroKey<String> key, AvroValue<Long> value, Context context)
throws IOException, InterruptedException {
total += value.datum();
}
}
@Test
void iOFormat() throws Exception {
checkOutputFormat();
checkInputFormat();
}
public void checkOutputFormat() throws Exception {
Job job = Job.getInstance();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest", "part-r-00000");
wordCountUtil.writeLinesFile();
AvroJob.setInputKeySchema(job, STRING);
AvroJob.setOutputKeySchema(job, STRING);
AvroJob.setOutputValueSchema(job, LONG);
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
FileOutputFormat.setCompressOutput(job, true);
job.setInputFormatClass(AvroKeyInputFormat.class);
job.setOutputFormatClass(AvroTrevniKeyValueOutputFormat.class);
job.waitForCompletion(true);
wordCountUtil.validateCountsFileGenericRecord();
}
public void checkInputFormat() throws Exception {
Job job = Job.getInstance();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyValueTest");
job.setMapperClass(Counter.class);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
job.setInputFormatClass(AvroTrevniKeyValueInputFormat.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(NullOutputFormat.class);
total = 0;
job.waitForCompletion(true);
assertEquals(WordCountUtil.TOTAL, total);
}
}
| 7,429 |
0 | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/test/java/org/apache/trevni/avro/mapreduce/TestKeyWordCount.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.mapreduce.AvroKeyInputFormat;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.Pair;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.trevni.avro.WordCountUtil;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestKeyWordCount {
private static long total = 0;
static final Schema STRING = Schema.create(Schema.Type.STRING);
static {
GenericData.setStringType(STRING, GenericData.StringType.String);
}
static final Schema LONG = Schema.create(Schema.Type.LONG);
private static class WordCountMapper extends Mapper<AvroKey<String>, NullWritable, Text, LongWritable> {
private LongWritable mCount = new LongWritable();
private Text mText = new Text();
@Override
protected void setup(Context context) {
mCount.set(1);
}
@Override
protected void map(AvroKey<String> key, NullWritable value, Context context)
throws IOException, InterruptedException {
try {
StringTokenizer tokens = new StringTokenizer(key.datum());
while (tokens.hasMoreTokens()) {
mText.set(tokens.nextToken());
context.write(mText, mCount);
}
} catch (Exception e) {
throw new RuntimeException(key + " " + key.datum(), e);
}
}
}
private static class WordCountReducer extends Reducer<Text, LongWritable, AvroKey<GenericData.Record>, NullWritable> {
private AvroKey<GenericData.Record> result;
@Override
protected void setup(Context context) {
result = new AvroKey<>();
result.datum(new Record(Pair.getPairSchema(STRING, LONG)));
}
@Override
protected void reduce(Text key, Iterable<LongWritable> values, Context context)
throws IOException, InterruptedException {
long count = 0;
for (LongWritable value : values) {
count += value.get();
}
result.datum().put("key", key.toString());
result.datum().put("value", count);
context.write(result, NullWritable.get());
}
}
public static class Counter extends Mapper<AvroKey<GenericData.Record>, NullWritable, NullWritable, NullWritable> {
@Override
protected void map(AvroKey<GenericData.Record> key, NullWritable value, Context context)
throws IOException, InterruptedException {
total += (Long) key.datum().get("value");
}
}
@Test
void iOFormat() throws Exception {
checkOutputFormat();
checkInputFormat();
}
public void checkOutputFormat() throws Exception {
Job job = Job.getInstance();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest", "part-r-00000");
wordCountUtil.writeLinesFile();
AvroJob.setInputKeySchema(job, STRING);
AvroJob.setOutputKeySchema(job, Pair.getPairSchema(STRING, LONG));
job.setMapperClass(WordCountMapper.class);
job.setReducerClass(WordCountReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/in"));
FileOutputFormat.setOutputPath(job, new Path(wordCountUtil.getDir().toString() + "/out"));
FileOutputFormat.setCompressOutput(job, true);
job.setInputFormatClass(AvroKeyInputFormat.class);
job.setOutputFormatClass(AvroTrevniKeyOutputFormat.class);
job.waitForCompletion(true);
wordCountUtil.validateCountsFile();
}
public void checkInputFormat() throws Exception {
Job job = Job.getInstance();
WordCountUtil wordCountUtil = new WordCountUtil("trevniMapReduceKeyTest");
job.setMapperClass(Counter.class);
Schema subSchema = new Schema.Parser().parse("{\"type\":\"record\"," + "\"name\":\"PairValue\"," + "\"fields\": [ "
+ "{\"name\":\"value\", \"type\":\"long\"}" + "]}");
AvroJob.setInputKeySchema(job, subSchema);
FileInputFormat.setInputPaths(job, new Path(wordCountUtil.getDir().toString() + "/out/*"));
job.setInputFormatClass(AvroTrevniKeyInputFormat.class);
job.setNumReduceTasks(0);
job.setOutputFormatClass(NullOutputFormat.class);
total = 0;
job.waitForCompletion(true);
assertEquals(WordCountUtil.TOTAL, total);
}
}
| 7,430 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.IdentityHashMap;
import org.apache.trevni.ColumnMetaData;
import org.apache.trevni.ValueType;
import org.apache.trevni.TrevniRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
/** Utility that computes the column layout of a schema. */
class AvroColumnator {
private List<ColumnMetaData> columns = new ArrayList<>();
private List<Integer> arrayWidths = new ArrayList<>();
public AvroColumnator(Schema schema) {
Schema schema1 = schema;
columnize(null, schema, null, false);
}
/** Return columns for the schema. */
public ColumnMetaData[] getColumns() {
return columns.toArray(new ColumnMetaData[0]);
}
/**
* Return array giving the number of columns immediately following each column
* that are descendents of that column.
*/
public int[] getArrayWidths() {
int[] result = new int[arrayWidths.size()];
int i = 0;
for (Integer width : arrayWidths)
result[i++] = width;
return result;
}
private Map<Schema, Schema> seen = new IdentityHashMap<>();
private void columnize(String path, Schema s, ColumnMetaData parent, boolean isArray) {
if (isSimple(s)) {
if (path == null)
path = s.getFullName();
addColumn(path, simpleValueType(s), parent, isArray);
return;
}
if (seen.containsKey(s)) // catch recursion
throw new TrevniRuntimeException("Cannot shred recursive schemas: " + s);
seen.put(s, s);
switch (s.getType()) {
case MAP:
path = path == null ? ">" : path + ">";
int start = columns.size();
ColumnMetaData p = addColumn(path, ValueType.NULL, parent, true);
addColumn(p(path, "key", ""), ValueType.STRING, p, false);
columnize(p(path, "value", ""), s.getValueType(), p, false);
arrayWidths.set(start, columns.size() - start); // fixup with actual width
break;
case RECORD:
for (Field field : s.getFields()) // flatten fields to columns
columnize(p(path, field.name(), "#"), field.schema(), parent, isArray);
break;
case ARRAY:
path = path == null ? "[]" : path + "[]";
addArrayColumn(path, s.getElementType(), parent);
break;
case UNION:
for (Schema branch : s.getTypes()) // array per non-null branch
if (branch.getType() != Schema.Type.NULL)
addArrayColumn(p(path, branch, "/"), branch, parent);
break;
default:
throw new TrevniRuntimeException("Unknown schema: " + s);
}
seen.remove(s);
}
private String p(String parent, Schema child, String sep) {
if (child.getType() == Schema.Type.UNION)
return parent;
return p(parent, child.getFullName(), sep);
}
private String p(String parent, String child, String sep) {
return parent == null ? child : parent + sep + child;
}
private ColumnMetaData addColumn(String path, ValueType type, ColumnMetaData parent, boolean isArray) {
ColumnMetaData column = new ColumnMetaData(path, type);
if (parent != null)
column.setParent(parent);
column.isArray(isArray);
columns.add(column);
arrayWidths.add(1); // placeholder
return column;
}
private void addArrayColumn(String path, Schema element, ColumnMetaData parent) {
if (path == null)
path = element.getFullName();
if (isSimple(element)) { // optimize simple arrays
addColumn(path, simpleValueType(element), parent, true);
return;
}
// complex array: insert a parent column with lengths
int start = columns.size();
ColumnMetaData array = addColumn(path, ValueType.NULL, parent, true);
columnize(path, element, array, false);
arrayWidths.set(start, columns.size() - start); // fixup with actual width
}
static boolean isSimple(Schema s) {
switch (s.getType()) {
case NULL:
case BOOLEAN:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BYTES:
case STRING:
case ENUM:
case FIXED:
return true;
default:
return false;
}
}
private ValueType simpleValueType(Schema s) {
switch (s.getType()) {
case NULL:
return ValueType.NULL;
case BOOLEAN:
return ValueType.BOOLEAN;
case INT:
return ValueType.INT;
case LONG:
return ValueType.LONG;
case FLOAT:
return ValueType.FLOAT;
case DOUBLE:
return ValueType.DOUBLE;
case BYTES:
return ValueType.BYTES;
case STRING:
return ValueType.STRING;
case ENUM:
return ValueType.INT;
case FIXED:
return ValueType.BYTES;
default:
throw new TrevniRuntimeException("Unknown schema: " + s);
}
}
}
| 7,431 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniOutputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.util.Progressable;
import org.apache.avro.Schema;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.mapred.AvroJob;
import org.apache.avro.mapred.AvroWrapper;
import org.apache.trevni.ColumnFileMetaData;
/**
* An {@link org.apache.hadoop.mapred.OutputFormat} that writes Avro data to
* Trevni files.
*
* <p>
* Writes a directory of files per task, each comprising a single filesystem
* block. To reduce the number of files, increase the default filesystem block
* size for the job. Each task also requires enough memory to buffer a
* filesystem block.
*/
public class AvroTrevniOutputFormat<T> extends FileOutputFormat<AvroWrapper<T>, NullWritable> {
/** The file name extension for trevni files. */
public final static String EXT = ".trv";
public static final String META_PREFIX = "trevni.meta.";
/** Add metadata to job output files. */
public static void setMeta(JobConf job, String key, String value) {
job.set(META_PREFIX + key, value);
}
@Override
public RecordWriter<AvroWrapper<T>, NullWritable> getRecordWriter(FileSystem ignore, final JobConf job,
final String name, Progressable prog) throws IOException {
boolean isMapOnly = job.getNumReduceTasks() == 0;
final Schema schema = isMapOnly ? AvroJob.getMapOutputSchema(job) : AvroJob.getOutputSchema(job);
final ColumnFileMetaData meta = filterMetadata(job);
final Path dir = FileOutputFormat.getTaskOutputPath(job, name);
final FileSystem fs = dir.getFileSystem(job);
if (!fs.mkdirs(dir))
throw new IOException("Failed to create directory: " + dir);
final long blockSize = fs.getDefaultBlockSize(dir);
return new RecordWriter<AvroWrapper<T>, NullWritable>() {
private int part = 0;
private AvroColumnWriter<T> writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
private void flush() throws IOException {
try (OutputStream out = fs.create(new Path(dir, "part-" + (part++) + EXT))) {
writer.writeTo(out);
}
writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
}
@Override
public void write(AvroWrapper<T> wrapper, NullWritable ignore) throws IOException {
writer.write(wrapper.datum());
if (writer.sizeEstimate() >= blockSize) // block full
flush();
}
public void close(Reporter reporter) throws IOException {
flush();
}
};
}
static ColumnFileMetaData filterMetadata(final JobConf job) {
final ColumnFileMetaData meta = new ColumnFileMetaData();
for (Map.Entry<String, String> e : job)
if (e.getKey().startsWith(META_PREFIX))
meta.put(e.getKey().substring(META_PREFIX.length()), e.getValue().getBytes(StandardCharsets.UTF_8));
return meta;
}
}
| 7,432 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.io.Closeable;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.Iterator;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import org.apache.trevni.ColumnMetaData;
import org.apache.trevni.ColumnFileReader;
import org.apache.trevni.ColumnValues;
import org.apache.trevni.Input;
import org.apache.trevni.InputFile;
import org.apache.trevni.TrevniRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import static org.apache.trevni.avro.AvroColumnator.isSimple;
/**
* Read files written with {@link AvroColumnWriter}. A subset of the schema used
* for writing may be specified when reading. In this case only columns of the
* subset schema are read.
*/
public class AvroColumnReader<D> implements Iterator<D>, Iterable<D>, Closeable {
private ColumnFileReader reader;
private GenericData model;
private Schema fileSchema;
private Schema readSchema;
private ColumnValues[] values;
private int[] arrayWidths;
private int column; // current index in values
private Map<String, Map<String, Object>> defaults = new HashMap<>();
/** Parameters for reading an Avro column file. */
public static class Params {
Input input;
Schema schema;
GenericData model = GenericData.get();
/** Construct reading from a file. */
public Params(File file) throws IOException {
this(new InputFile(file));
}
/** Construct reading from input. */
public Params(Input input) {
this.input = input;
}
/** Set subset schema to project data down to. */
public Params setSchema(Schema schema) {
this.schema = schema;
return this;
}
/** Set data representation. */
public Params setModel(GenericData model) {
this.model = model;
return this;
}
}
/** Construct a reader for a file. */
public AvroColumnReader(Params params) throws IOException {
this.reader = new ColumnFileReader(params.input);
this.model = params.model;
this.fileSchema = new Schema.Parser().parse(reader.getMetaData().getString(AvroColumnWriter.SCHEMA_KEY));
this.readSchema = params.schema == null ? fileSchema : params.schema;
initialize();
}
/** Return the schema for data in this file. */
public Schema getFileSchema() {
return fileSchema;
}
void initialize() throws IOException {
// compute a mapping from column name to number for file
Map<String, Integer> fileColumnNumbers = new HashMap<>();
int i = 0;
for (ColumnMetaData c : new AvroColumnator(fileSchema).getColumns())
fileColumnNumbers.put(c.getName(), i++);
// create iterator for each column in readSchema
AvroColumnator readColumnator = new AvroColumnator(readSchema);
this.arrayWidths = readColumnator.getArrayWidths();
ColumnMetaData[] readColumns = readColumnator.getColumns();
this.values = new ColumnValues[readColumns.length];
int j = 0;
for (ColumnMetaData c : readColumns) {
Integer n = fileColumnNumbers.get(c.getName());
if (n != null)
values[j++] = reader.getValues(n);
}
findDefaults(readSchema, fileSchema);
}
// get defaults for fields in read that are not in write
private void findDefaults(Schema read, Schema write) {
switch (read.getType()) {
case NULL:
case BOOLEAN:
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BYTES:
case STRING:
case ENUM:
case FIXED:
if (read.getType() != write.getType())
throw new TrevniRuntimeException("Type mismatch: " + read + " & " + write);
break;
case MAP:
findDefaults(read.getValueType(), write.getValueType());
break;
case ARRAY:
findDefaults(read.getElementType(), write.getElementType());
break;
case UNION:
for (Schema s : read.getTypes()) {
Integer index = write.getIndexNamed(s.getFullName());
if (index == null)
throw new TrevniRuntimeException("No matching branch: " + s);
findDefaults(s, write.getTypes().get(index));
}
break;
case RECORD:
for (Field f : read.getFields()) {
Field g = write.getField(f.name());
if (g == null)
setDefault(read, f);
else
findDefaults(f.schema(), g.schema());
}
break;
default:
throw new TrevniRuntimeException("Unknown schema: " + read);
}
}
private void setDefault(Schema record, Field f) {
String recordName = record.getFullName();
Map<String, Object> recordDefaults = defaults.computeIfAbsent(recordName, k -> new HashMap<>());
recordDefaults.put(f.name(), model.getDefaultValue(f));
}
@Override
public Iterator<D> iterator() {
return this;
}
@Override
public boolean hasNext() {
return values[0].hasNext();
}
/** Return the number of rows in this file. */
public long getRowCount() {
return reader.getRowCount();
}
@Override
public D next() {
try {
for (ColumnValues value : values)
if (value != null)
value.startRow();
this.column = 0;
return (D) read(readSchema);
} catch (IOException e) {
throw new TrevniRuntimeException(e);
}
}
private Object read(Schema s) throws IOException {
if (isSimple(s))
return nextValue(s, column++);
final int startColumn = column;
switch (s.getType()) {
case MAP:
int size = values[column].nextLength();
Map map = new HashMap(size);
for (int i = 0; i < size; i++) {
this.column = startColumn;
values[column++].nextValue(); // null in parent
String key = (String) values[column++].nextValue(); // key
map.put(key, read(s.getValueType())); // value
}
column = startColumn + arrayWidths[startColumn];
return map;
case RECORD:
Object record = model.newRecord(null, s);
Map<String, Object> rDefaults = defaults.get(s.getFullName());
for (Field f : s.getFields()) {
Object value = ((rDefaults != null) && rDefaults.containsKey(f.name()))
? model.deepCopy(f.schema(), rDefaults.get(f.name()))
: read(f.schema());
model.setField(record, f.name(), f.pos(), value);
}
return record;
case ARRAY:
int length = values[column].nextLength();
List elements = new GenericData.Array(length, s);
for (int i = 0; i < length; i++) {
this.column = startColumn;
Object value = nextValue(s, column++);
if (!isSimple(s.getElementType()))
value = read(s.getElementType());
elements.add(value);
}
column = startColumn + arrayWidths[startColumn];
return elements;
case UNION:
Object value = null;
for (Schema branch : s.getTypes()) {
if (branch.getType() == Schema.Type.NULL)
continue;
if (values[column].nextLength() == 1) {
value = nextValue(branch, column);
column++;
if (!isSimple(branch))
value = read(branch);
} else {
column += arrayWidths[column];
}
}
return value;
default:
throw new TrevniRuntimeException("Unknown schema: " + s);
}
}
private Object nextValue(Schema s, int column) throws IOException {
Object v = values[column].nextValue();
switch (s.getType()) {
case ENUM:
return model.createEnum(s.getEnumSymbols().get((Integer) v), s);
case FIXED:
return model.createFixed(null, ((ByteBuffer) v).array(), s);
}
return v;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
@Override
public void close() throws IOException {
reader.close();
}
}
| 7,433 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroColumnWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.io.File;
import java.io.OutputStream;
import java.util.Collection;
import java.util.Map;
import org.apache.trevni.ColumnFileMetaData;
import org.apache.trevni.ColumnFileWriter;
import org.apache.trevni.TrevniRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericFixed;
import org.apache.avro.util.Utf8;
import static org.apache.trevni.avro.AvroColumnator.isSimple;
/**
* Write Avro records to a Trevni column file.
*
* <p>
* Each primitive type is written to a separate column.
*
* <p>
* Output is buffered until {@link #writeTo(OutputStream)} is called. The
* {@link #sizeEstimate()} indicates both the amount of data buffered and the
* size of the file that will be written.
*/
public class AvroColumnWriter<D> {
private Schema schema;
private GenericData model;
private ColumnFileWriter writer;
private int[] arrayWidths;
public static final String SCHEMA_KEY = "avro.schema";
public AvroColumnWriter(Schema s, ColumnFileMetaData meta) throws IOException {
this(s, meta, GenericData.get());
}
public AvroColumnWriter(Schema s, ColumnFileMetaData meta, GenericData model) throws IOException {
this.schema = s;
AvroColumnator columnator = new AvroColumnator(s);
meta.set(SCHEMA_KEY, s.toString()); // save schema in file
this.writer = new ColumnFileWriter(meta, columnator.getColumns());
this.arrayWidths = columnator.getArrayWidths();
this.model = model;
}
/**
* Return the approximate size of the file that will be written. Tries to
* slightly over-estimate. Indicates both the size in memory of the buffered
* data as well as the size of the file that will be written by
* {@link #writeTo(OutputStream)}.
*/
public long sizeEstimate() {
return writer.sizeEstimate();
}
/** Write all rows added to the named output stream. */
public void writeTo(OutputStream out) throws IOException {
writer.writeTo(out);
}
/** Write all rows added to the named file. */
public void writeTo(File file) throws IOException {
writer.writeTo(file);
}
/** Add a row to the file. */
public void write(D value) throws IOException {
writer.startRow();
int count = write(value, schema, 0);
assert (count == writer.getColumnCount());
writer.endRow();
}
private int write(Object o, Schema s, int column) throws IOException {
if (isSimple(s)) {
writeValue(o, s, column);
return column + 1;
}
switch (s.getType()) {
case MAP:
Map<?, ?> map = (Map) o;
writer.writeLength(map.size(), column);
for (Map.Entry e : map.entrySet()) {
writer.writeValue(null, column);
writer.writeValue(e.getKey(), column + 1);
int c = write(e.getValue(), s.getValueType(), column + 2);
assert (c == column + arrayWidths[column]);
}
return column + arrayWidths[column];
case RECORD:
for (Field f : s.getFields())
column = write(model.getField(o, f.name(), f.pos()), f.schema(), column);
return column;
case ARRAY:
Collection elements = (Collection) o;
writer.writeLength(elements.size(), column);
if (isSimple(s.getElementType())) { // optimize simple arrays
for (Object element : elements)
writeValue(element, s.getElementType(), column);
return column + 1;
}
for (Object element : elements) {
writer.writeValue(null, column);
int c = write(element, s.getElementType(), column + 1);
assert (c == column + arrayWidths[column]);
}
return column + arrayWidths[column];
case UNION:
int b = model.resolveUnion(s, o);
int i = 0;
for (Schema branch : s.getTypes()) {
boolean selected = i++ == b;
if (branch.getType() == Schema.Type.NULL)
continue;
if (!selected) {
writer.writeLength(0, column);
column += arrayWidths[column];
} else {
writer.writeLength(1, column);
if (isSimple(branch)) {
writeValue(o, branch, column++);
} else {
writer.writeValue(null, column);
column = write(o, branch, column + 1);
}
}
}
return column;
default:
throw new TrevniRuntimeException("Unknown schema: " + s);
}
}
private void writeValue(Object value, Schema s, int column) throws IOException {
switch (s.getType()) {
case STRING:
if (value instanceof Utf8) // convert Utf8 to String
value = value.toString();
break;
case ENUM:
if (value instanceof Enum)
value = ((Enum) value).ordinal();
else
value = s.getEnumOrdinal(value.toString());
break;
case FIXED:
value = ((GenericFixed) value).bytes();
break;
}
writer.writeValue(value, column);
}
}
| 7,434 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/AvroTrevniInputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.mapred.AvroJob;
import org.apache.avro.mapred.AvroWrapper;
/**
* An {@link org.apache.hadoop.mapred.InputFormat} for Trevni files.
*
* <p>
* A subset schema to be read may be specified with
* {@link AvroJob#setInputSchema(JobConf,Schema)}.
*/
public class AvroTrevniInputFormat<T> extends FileInputFormat<AvroWrapper<T>, NullWritable> {
@Override
protected boolean isSplitable(FileSystem fs, Path filename) {
return false;
}
@Override
protected FileStatus[] listStatus(JobConf job) throws IOException {
List<FileStatus> result = new ArrayList<>();
job.setBoolean("mapred.input.dir.recursive", true);
for (FileStatus file : super.listStatus(job))
if (file.getPath().getName().endsWith(AvroTrevniOutputFormat.EXT))
result.add(file);
return result.toArray(new FileStatus[0]);
}
@Override
public RecordReader<AvroWrapper<T>, NullWritable> getRecordReader(InputSplit split, final JobConf job,
Reporter reporter) throws IOException {
final FileSplit file = (FileSplit) split;
reporter.setStatus(file.toString());
final AvroColumnReader.Params params = new AvroColumnReader.Params(new HadoopInput(file.getPath(), job));
params.setModel(ReflectData.get());
if (job.get(AvroJob.INPUT_SCHEMA) != null)
params.setSchema(AvroJob.getInputSchema(job));
return new RecordReader<AvroWrapper<T>, NullWritable>() {
private AvroColumnReader<T> reader = new AvroColumnReader<>(params);
private float rows = reader.getRowCount();
private long row;
@Override
public AvroWrapper<T> createKey() {
return new AvroWrapper<>(null);
}
@Override
public NullWritable createValue() {
return NullWritable.get();
}
@Override
public boolean next(AvroWrapper<T> wrapper, NullWritable ignore) throws IOException {
if (!reader.hasNext())
return false;
wrapper.datum(reader.next());
row++;
return true;
}
@Override
public float getProgress() throws IOException {
return row / rows;
}
@Override
public long getPos() throws IOException {
return row;
}
@Override
public void close() throws IOException {
reader.close();
}
};
}
}
| 7,435 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/HadoopInput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.trevni.Input;
/** Adapt a Hadoop {@link FSDataInputStream} to Trevni's {@link Input}. */
public class HadoopInput implements Input {
private final FSDataInputStream stream;
private final long len;
/** Construct given a path and a configuration. */
public HadoopInput(Path path, Configuration conf) throws IOException {
this.stream = path.getFileSystem(conf).open(path);
this.len = path.getFileSystem(conf).getFileStatus(path).getLen();
}
@Override
public long length() {
return len;
}
@Override
public int read(long p, byte[] b, int s, int l) throws IOException {
return stream.read(p, b, s, l);
}
@Override
public void close() throws IOException {
stream.close();
}
}
| 7,436 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyInputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapred.AvroKey;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
/**
* An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
*
* This implement was modeled off
* {@link org.apache.avro.mapreduce.AvroKeyInputFormat} to allow for easy
* transition
*
* A MapReduce InputFormat that can handle Trevni container files.
*
* <p>
* Keys are AvroKey wrapper objects that contain the Trevni data. Since Trevni
* container files store only records (not key/value pairs), the value from this
* InputFormat is a NullWritable.
* </p>
*
* <p>
* A subset schema to be read may be specified with
* {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema}.
*/
public class AvroTrevniKeyInputFormat<T> extends FileInputFormat<AvroKey<T>, NullWritable> {
@Override
public RecordReader<AvroKey<T>, NullWritable> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
return new AvroTrevniKeyRecordReader<>();
}
}
| 7,437 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapred.AvroKey;
import org.apache.hadoop.io.NullWritable;
/**
* Reads records from an input split representing a chunk of an Trenvi container
* file.
*
* @param <T> The (java) type of data in Trevni container file.
*/
public class AvroTrevniKeyRecordReader<T> extends AvroTrevniRecordReaderBase<AvroKey<T>, NullWritable, T> {
/** A reusable object to hold records of the Avro container file. */
private final AvroKey<T> mCurrentKey = new AvroKey<>();
/** {@inheritDoc} */
@Override
public AvroKey<T> getCurrentKey() throws IOException, InterruptedException {
return mCurrentKey;
}
/** {@inheritDoc} */
@Override
public NullWritable getCurrentValue() throws IOException, InterruptedException {
return NullWritable.get();
}
/** {@inheritDoc} */
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
boolean hasNext = super.nextKeyValue();
mCurrentKey.datum(getCurrentRecord());
return hasNext;
}
}
| 7,438 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyOutputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapred.AvroKey;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/**
* An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
* Trevni files.
*
* This implement was modeled off
* {@link org.apache.avro.mapreduce.AvroKeyOutputFormat} to allow for easy
* transition
*
* FileOutputFormat for writing Trevni container files.
*
* <p>
* Since Trevni container files only contain records (not key/value pairs), this
* output format ignores the value.
* </p>
*
* @param <T> The (java) type of the Trevni data to write.
*
* <p>
* Writes a directory of files per task, each comprising a single
* filesystem block. To reduce the number of files, increase the
* default filesystem block size for the job. Each task also requires
* enough memory to buffer a filesystem block.
*/
public class AvroTrevniKeyOutputFormat<T> extends FileOutputFormat<AvroKey<T>, NullWritable> {
@Override
public RecordWriter<AvroKey<T>, NullWritable> getRecordWriter(TaskAttemptContext context)
throws IOException, InterruptedException {
return new AvroTrevniKeyRecordWriter<>(context);
}
}
| 7,439 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyRecordWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* Writes Trevni records to an Trevni container file output stream.
*
* @param <T> The Java type of the Trevni data to write.
*/
public class AvroTrevniKeyRecordWriter<T> extends AvroTrevniRecordWriterBase<AvroKey<T>, NullWritable, T> {
/**
* Constructor.
*
* @param context The TaskAttempContext to supply the writer with information
* form the job configuration
*/
public AvroTrevniKeyRecordWriter(TaskAttemptContext context) throws IOException {
super(context);
}
/** {@inheritDoc} */
@Override
public void write(AvroKey<T> key, NullWritable value) throws IOException, InterruptedException {
writer.write(key.datum());
if (writer.sizeEstimate() >= blockSize) // block full
flush();
}
/** {@inheritDoc} */
@Override
protected Schema initSchema(TaskAttemptContext context) {
boolean isMapOnly = context.getNumReduceTasks() == 0;
return isMapOnly ? AvroJob.getMapOutputKeySchema(context.getConfiguration())
: AvroJob.getOutputKeySchema(context.getConfiguration());
}
}
| 7,440 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.hadoop.io.AvroKeyValue;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
/**
* Reads Trevni generic records from an Trevni container file, where the records
* contain two fields: 'key' and 'value'.
*
* <p>
* The contents of the 'key' field will be parsed into an AvroKey object. The
* contents of the 'value' field will be parsed into an AvroValue object.
* </p>
*
* @param <K> The type of the Avro key to read.
* @param <V> The type of the Avro value to read.
*/
public class AvroTrevniKeyValueRecordReader<K, V>
extends AvroTrevniRecordReaderBase<AvroKey<K>, AvroValue<V>, GenericRecord> {
/** The current key the reader is on. */
private final AvroKey<K> mCurrentKey = new AvroKey<>();
/** The current value the reader is on. */
private final AvroValue<V> mCurrentValue = new AvroValue<>();
/** {@inheritDoc} */
@Override
public AvroKey<K> getCurrentKey() throws IOException, InterruptedException {
return mCurrentKey;
}
/** {@inheritDoc} */
@Override
public AvroValue<V> getCurrentValue() throws IOException, InterruptedException {
return mCurrentValue;
}
/** {@inheritDoc} */
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
boolean hasNext = super.nextKeyValue();
AvroKeyValue<K, V> avroKeyValue = new AvroKeyValue<>(getCurrentRecord());
mCurrentKey.datum(avroKeyValue.getKey());
mCurrentValue.datum(avroKeyValue.getValue());
return hasNext;
}
}
| 7,441 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueOutputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/**
* An {@link org.apache.hadoop.mapreduce.OutputFormat} that writes Avro data to
* Trevni files.
*
* This implement was modeled off
* {@link org.apache.avro.mapreduce.AvroKeyValueOutputFormat} to allow for easy
* transition
*
* * FileOutputFormat for writing Trevni container files of key/value pairs.
*
* <p>
* Since Trevni container files can only contain records (not key/value pairs),
* this output format puts the key and value into an Avro generic record with
* two fields, named 'key' and 'value'.
* </p>
*
* <p>
* The keys and values given to this output format may be Avro objects wrapped
* in <code>AvroKey</code> or <code>AvroValue</code> objects. The basic Writable
* types are also supported (e.g., IntWritable, Text); they will be converted to
* their corresponding Avro types.
* </p>
*
* @param <K> The type of key. If an Avro type, it must be wrapped in an
* <code>AvroKey</code>.
* @param <V> The type of value. If an Avro type, it must be wrapped in an
* <code>AvroValue</code>.
*
* <p>
* Writes a directory of files per task, each comprising a single
* filesystem block. To reduce the number of files, increase the
* default filesystem block size for the job. Each task also requires
* enough memory to buffer a filesystem block.
*/
public class AvroTrevniKeyValueOutputFormat<K, V> extends FileOutputFormat<AvroKey<K>, AvroValue<V>> {
/** {@inheritDoc} */
@Override
public RecordWriter<AvroKey<K>, AvroValue<V>> getRecordWriter(TaskAttemptContext context)
throws IOException, InterruptedException {
return new AvroTrevniKeyValueRecordWriter<>(context);
}
}
| 7,442 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueRecordWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.hadoop.io.AvroDatumConverter;
import org.apache.avro.hadoop.io.AvroDatumConverterFactory;
import org.apache.avro.hadoop.io.AvroKeyValue;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* Writes key/value pairs to an Trevni container file.
*
* <p>
* Each entry in the Trevni container file will be a generic record with two
* fields, named 'key' and 'value'. The input types may be basic Writable
* objects like Text or IntWritable, or they may be AvroWrapper subclasses
* (AvroKey or AvroValue). Writable objects will be converted to their
* corresponding Avro types when written to the generic record key/value pair.
* </p>
*
* @param <K> The type of key to write.
* @param <V> The type of value to write.
*/
public class AvroTrevniKeyValueRecordWriter<K, V>
extends AvroTrevniRecordWriterBase<AvroKey<K>, AvroValue<V>, GenericRecord> {
/**
* The writer schema for the generic record entries of the Trevni container
* file.
*/
Schema mKeyValuePairSchema;
/** A reusable Avro generic record for writing key/value pairs to the file. */
AvroKeyValue<Object, Object> keyValueRecord;
/** A helper object that converts the input key to an Avro datum. */
AvroDatumConverter<K, ?> keyConverter;
/** A helper object that converts the input value to an Avro datum. */
AvroDatumConverter<V, ?> valueConverter;
/**
* Constructor.
*
* @param context The TaskAttempContext to supply the writer with information
* form the job configuration
*/
public AvroTrevniKeyValueRecordWriter(TaskAttemptContext context) throws IOException {
super(context);
mKeyValuePairSchema = initSchema(context);
keyValueRecord = new AvroKeyValue<>(new GenericData.Record(mKeyValuePairSchema));
}
/** {@inheritDoc} */
@Override
public void write(AvroKey<K> key, AvroValue<V> value) throws IOException, InterruptedException {
keyValueRecord.setKey(key.datum());
keyValueRecord.setValue(value.datum());
writer.write(keyValueRecord.get());
if (writer.sizeEstimate() >= blockSize) // block full
flush();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override
protected Schema initSchema(TaskAttemptContext context) {
AvroDatumConverterFactory converterFactory = new AvroDatumConverterFactory(context.getConfiguration());
keyConverter = converterFactory.create((Class<K>) context.getOutputKeyClass());
valueConverter = converterFactory.create((Class<V>) context.getOutputValueClass());
// Create the generic record schema for the key/value pair.
return AvroKeyValue.getSchema(keyConverter.getWriterSchema(), valueConverter.getWriterSchema());
}
}
| 7,443 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniKeyValueInputFormat.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapred.AvroKey;
import org.apache.avro.mapred.AvroValue;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
/**
* An {@link org.apache.hadoop.mapreduce.InputFormat} for Trevni files.
*
* This implement was modeled off
* {@link org.apache.avro.mapreduce.AvroKeyValueInputFormat} to allow for easy
* transition
*
* <p>
* A MapReduce InputFormat that reads from Trevni container files of key/value
* generic records.
*
* <p>
* Trevni container files that container generic records with the two fields
* 'key' and 'value' are expected. The contents of the 'key' field will be used
* as the job input key, and the contents of the 'value' field will be used as
* the job output value.
* </p>
*
* @param <K> The type of the Trevni key to read.
* @param <V> The type of the Trevni value to read.
*
* <p>
* A subset schema to be read may be specified with
* {@link org.apache.avro.mapreduce.AvroJob#setInputKeySchema} and
* {@link org.apache.avro.mapreduce.AvroJob#setInputValueSchema}.
*/
public class AvroTrevniKeyValueInputFormat<K, V> extends FileInputFormat<AvroKey<K>, AvroValue<V>> {
/** {@inheritDoc} */
@Override
public RecordReader<AvroKey<K>, AvroValue<V>> createRecordReader(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
return new AvroTrevniKeyValueRecordReader<>();
}
}
| 7,444 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordReaderBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.avro.reflect.ReflectData;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.trevni.avro.AvroColumnReader;
import org.apache.trevni.avro.HadoopInput;
/**
* Abstract base class for <code>RecordReader</code>s that read Trevni container
* files.
*
* @param <K> The type of key the record reader should generate.
* @param <V> The type of value the record reader should generate.
* @param <T> The type of the entries within the Trevni container file being
* read.
*/
public abstract class AvroTrevniRecordReaderBase<K, V, T> extends RecordReader<K, V> {
/** The Trevni file reader */
private AvroColumnReader<T> reader;
/** Number of rows in the Trevni file */
private float rows;
/** The current row number being read in */
private long row;
/** A reusable object to hold records of the Avro container file. */
private T mCurrentRecord;
/** {@inheritDoc} */
@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException {
final FileSplit file = (FileSplit) inputSplit;
context.setStatus(file.toString());
final AvroColumnReader.Params params = new AvroColumnReader.Params(
new HadoopInput(file.getPath(), context.getConfiguration()));
params.setModel(ReflectData.get());
if (AvroJob.getInputKeySchema(context.getConfiguration()) != null) {
params.setSchema(AvroJob.getInputKeySchema(context.getConfiguration()));
}
reader = new AvroColumnReader<>(params);
rows = reader.getRowCount();
}
/** {@inheritDoc} */
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (!reader.hasNext())
return false;
mCurrentRecord = reader.next();
row++;
return true;
}
/**
* Gets the current record read from the Trevni container file.
*
* <p>
* Calling <code>nextKeyValue()</code> moves this to the next record.
* </p>
*
* @return The current Trevni record (may be null if no record has been read).
*/
protected T getCurrentRecord() {
return mCurrentRecord;
}
/** {@inheritDoc} */
@Override
public void close() throws IOException {
reader.close();
}
/** {@inheritDoc} */
@Override
public float getProgress() throws IOException, InterruptedException {
return row / rows;
}
}
| 7,445 |
0 | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro | Create_ds/avro/lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.trevni.avro.mapreduce;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Map.Entry;
import org.apache.avro.Schema;
import org.apache.avro.reflect.ReflectData;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.trevni.ColumnFileMetaData;
import org.apache.trevni.avro.AvroColumnWriter;
/**
* Abstract base class for <code>RecordWriter</code>s that writes Trevni
* container files.
*
* @param <K> The type of key the record writer should generate.
* @param <V> The type of value the record wrtier should generate.
* @param <T> The type of the entries within the Trevni container file being
* written.
*/
public abstract class AvroTrevniRecordWriterBase<K, V, T> extends RecordWriter<K, V> {
/** trevni file extension */
public final static String EXT = ".trv";
/** prefix of job configs that we care about */
public static final String META_PREFIX = "trevni.meta.";
/**
* Counter that increments as new trevni files are create because the current
* file has exceeded the block size
*/
protected int part = 0;
/** Trevni file writer */
protected AvroColumnWriter<T> writer;
/** This will be a unique directory linked to the task */
final Path dirPath;
/** HDFS object */
final FileSystem fs;
/** Current configured blocksize */
final long blockSize;
/** Provided avro schema from the context */
protected Schema schema;
/** meta data to be stored in the output file. */
protected ColumnFileMetaData meta;
/**
* Constructor.
*
* @param context The TaskAttempContext to supply the writer with information
* form the job configuration
*/
public AvroTrevniRecordWriterBase(TaskAttemptContext context) throws IOException {
schema = initSchema(context);
meta = filterMetadata(context.getConfiguration());
writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
Path outputPath = FileOutputFormat.getOutputPath(context);
String dir = FileOutputFormat.getUniqueFile(context, "part", "");
dirPath = new Path(outputPath.toString() + "/" + dir);
fs = dirPath.getFileSystem(context.getConfiguration());
fs.mkdirs(dirPath);
blockSize = fs.getDefaultBlockSize(dirPath);
}
/**
* Use the task context to construct a schema for writing
*
* @throws IOException
*/
abstract protected Schema initSchema(TaskAttemptContext context);
/**
* A Trevni flush will close the current file and prep a new writer
*
* @throws IOException
*/
public void flush() throws IOException {
try (OutputStream out = fs.create(new Path(dirPath, "part-" + (part++) + EXT))) {
writer.writeTo(out);
}
writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
}
/** {@inheritDoc} */
@Override
public void close(TaskAttemptContext arg0) throws IOException, InterruptedException {
flush();
}
static ColumnFileMetaData filterMetadata(final Configuration configuration) {
final ColumnFileMetaData meta = new ColumnFileMetaData();
for (Entry<String, String> confEntry : configuration) {
if (confEntry.getKey().startsWith(META_PREFIX))
meta.put(confEntry.getKey().substring(META_PREFIX.length()),
confEntry.getValue().getBytes(StandardCharsets.UTF_8));
}
return meta;
}
}
| 7,446 |
0 | Create_ds/avro/lang/java/android/src/test/java/org/apache/avro/util | Create_ds/avro/lang/java/android/src/test/java/org/apache/avro/util/internal/TestClassValueCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.internal;
import org.junit.jupiter.api.Test;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestClassValueCache {
@Test
void basic() {
ClassValueCache<String> cache = new ClassValueCache<>(Class::toString);
String fromCache = cache.apply(String.class);
assertThat(fromCache, is("class java.lang.String"));
// Unlike the core ClassValueUtil, this always creates a new instance
assertThat(cache.apply(String.class), not(sameInstance(fromCache)));
}
}
| 7,447 |
0 | Create_ds/avro/lang/java/android/src/main/java/org/apache/avro/util | Create_ds/avro/lang/java/android/src/main/java/org/apache/avro/util/internal/ClassValueCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.internal;
import java.util.function.Function;
/**
* The Android environment doesn't support {@link ClassValue}. This utility
* bypasses its use in Avro to always recalculate the value without caching.
* <p>
* This may have a performance impact in Android.
*
* @param <R> Return type of the ClassValue
*/
public class ClassValueCache<R> implements Function<Class<?>, R> {
private final Function<Class<?>, R> ifAbsent;
/**
* @param ifAbsent The function that calculates the value to be used from the
* class instance.
*/
public ClassValueCache(Function<Class<?>, R> ifAbsent) {
this.ifAbsent = ifAbsent;
}
@Override
public R apply(Class<?> c) {
return ifAbsent.apply(c);
}
}
| 7,448 |
0 | Create_ds/avro/lang/java/android/src/main/java/org/apache/avro/util | Create_ds/avro/lang/java/android/src/main/java/org/apache/avro/util/internal/ThreadLocalWithInitial.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.util.internal;
import java.util.function.Supplier;
/**
* Wraps a {@link ThreadLocal#withInitial(Supplier)} so it can be overridden in
* an android environment, where this method is not available until API 26.
*/
public class ThreadLocalWithInitial {
/** Delegate a ThreadLocal instance with the supplier. */
@SuppressWarnings("AnonymousHasLambdaAlternative")
public static <T> ThreadLocal<T> of(Supplier<? extends T> supplier) {
return new ThreadLocal<T>() {
@Override
protected T initialValue() {
return supplier.get();
}
};
}
}
| 7,449 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflect.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.ipc.Server;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.reflect.ReflectRequestor;
import org.apache.avro.ipc.reflect.ReflectResponder;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
import java.net.InetSocketAddress;
import java.util.Random;
import java.io.IOException;
public class TestProtocolReflect {
public static class TestRecord {
private String name;
public int hashCode() {
return this.name.hashCode();
}
public boolean equals(Object that) {
return this.name.equals(((TestRecord) that).name);
}
}
public interface Simple {
String hello(String greeting);
TestRecord echo(TestRecord record);
int add(int arg1, int arg2);
byte[] echoBytes(byte[] data);
void error() throws SimpleException;
}
private static boolean throwUndeclaredError;
public static class TestImpl implements Simple {
public String hello(String greeting) {
return "goodbye";
}
public int add(int arg1, int arg2) {
return arg1 + arg2;
}
public TestRecord echo(TestRecord record) {
return record;
}
public byte[] echoBytes(byte[] data) {
return data;
}
public void error() throws SimpleException {
if (throwUndeclaredError)
throw new RuntimeException("foo");
throw new SimpleException("foo");
}
}
protected static Server server;
protected static Transceiver client;
protected static Simple proxy;
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SocketServer(new ReflectResponder(Simple.class, new TestImpl()), new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
proxy = ReflectRequestor.getClient(Simple.class, client);
}
@Test
void classLoader() throws Exception {
ClassLoader loader = new ClassLoader() {
};
ReflectResponder responder = new ReflectResponder(Simple.class, new TestImpl(), new ReflectData(loader));
assertEquals(responder.getReflectData().getClassLoader(), loader);
ReflectRequestor requestor = new ReflectRequestor(Simple.class, client, new ReflectData(loader));
assertEquals(requestor.getReflectData().getClassLoader(), loader);
}
@Test
void hello() throws IOException {
String response = proxy.hello("bob");
assertEquals("goodbye", response);
}
@Test
void echo() throws IOException {
TestRecord record = new TestRecord();
record.name = "foo";
TestRecord echoed = proxy.echo(record);
assertEquals(record, echoed);
}
@Test
void add() throws IOException {
int result = proxy.add(1, 2);
assertEquals(3, result);
}
@Test
void echoBytes() throws IOException {
Random random = new Random();
int length = random.nextInt(1024 * 16);
byte[] data = new byte[length];
random.nextBytes(data);
byte[] echoed = proxy.echoBytes(data);
assertArrayEquals(data, echoed);
}
@Test
void error() throws IOException {
SimpleException error = null;
try {
proxy.error();
} catch (SimpleException e) {
error = e;
}
assertNotNull(error);
assertEquals("foo", error.getMessage());
}
@Test
void undeclaredError() throws Exception {
this.throwUndeclaredError = true;
RuntimeException error = null;
try {
proxy.error();
} catch (AvroRuntimeException e) {
error = e;
} finally {
this.throwUndeclaredError = false;
}
assertNotNull(error);
assertTrue(error.toString().contains("foo"));
}
@AfterAll
public static void testStopServer() throws IOException {
client.close();
server.close();
}
}
| 7,450 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolParsing.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.io.IOException;
import org.junit.jupiter.api.Test;
import org.apache.avro.Protocol.Message;
public class TestProtocolParsing {
public static Protocol getSimpleProtocol() throws IOException {
File file = new File("../../../share/test/schemas/simple.avpr");
return Protocol.parse(file);
}
@Test
void parsing() throws IOException {
Protocol protocol = getSimpleProtocol();
assertEquals(protocol.getDoc(), "Protocol used for testing.");
assertEquals(6, protocol.getMessages().size());
assertEquals("Pretend you're in a cave!", protocol.getMessages().get("echo").getDoc());
}
private static Message parseMessage(String message) throws Exception {
return Protocol.parse("{\"protocol\": \"org.foo.Bar\"," + "\"types\": []," + "\"messages\": {" + message + "}}")
.getMessages().values().iterator().next();
}
@Test
void oneWay() throws Exception {
Message m;
// permit one-way messages w/ null response
m = parseMessage("\"ack\": {" + "\"request\": []," + "\"response\": \"null\"," + "\"one-way\": true}");
assertTrue(m.isOneWay());
// permit one-way messages w/o response
m = parseMessage("\"ack\": {" + "\"request\": []," + "\"one-way\": true}");
assertTrue(m.isOneWay());
}
@Test
void oneWayResponse() throws Exception {
assertThrows(SchemaParseException.class, () -> {
// prohibit one-way messages with a non-null response type
parseMessage("\"ack\": {" + "\"request\": [\"string\"]," + "\"response\": \"string\"," + "\"one-way\": true}");
});
}
@Test
void oneWayError() throws Exception {
assertThrows(SchemaParseException.class, () -> {
// prohibit one-way messages with errors
parseMessage("\"ack\": {" + "\"request\": [\"string\"]," + "\"errors\": []," + "\"one-way\": true}");
});
}
@Test
void messageFieldAliases() throws IOException {
Protocol protocol = getSimpleProtocol();
final Message msg = protocol.getMessages().get("hello");
assertNotNull(msg);
final Schema.Field field = msg.getRequest().getField("greeting");
assertNotNull(field);
assertTrue(field.aliases().contains("salute"));
}
@Test
void messageCustomProperties() throws IOException {
Protocol protocol = getSimpleProtocol();
final Message msg = protocol.getMessages().get("hello");
assertNotNull(msg);
final Schema.Field field = msg.getRequest().getField("greeting");
assertNotNull(field);
assertEquals("customValue", field.getProp("customProp"));
}
}
| 7,451 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.compiler.specific.TestSpecificCompiler;
import org.apache.avro.data.Json;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.RandomData;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.io.TempDir;
import static org.junit.jupiter.api.Assertions.*;
public class TestSchema {
@TempDir
public File DIR;
public static final String LISP_SCHEMA = "{\"type\": \"record\", \"name\": \"Lisp\", \"fields\": ["
+ "{\"name\":\"value\", \"type\":[\"null\", \"string\","
+ "{\"type\": \"record\", \"name\": \"Cons\", \"fields\": [" + "{\"name\":\"car\", \"type\":\"Lisp\"},"
+ "{\"name\":\"cdr\", \"type\":\"Lisp\"}]}]}]}";
public static final String BASIC_ENUM_SCHEMA = "{\"type\":\"enum\", \"name\":\"Test\","
+ "\"symbols\": [\"A\", \"B\"]}";
public static final String SCHEMA_WITH_DOC_TAGS = "{\n" + " \"type\": \"record\",\n"
+ " \"name\": \"outer_record\",\n" + " \"doc\": \"This is not a world record.\",\n" + " \"fields\": [\n"
+ " { \"type\": { \"type\": \"fixed\", \"doc\": \"Very Inner Fixed\", "
+ " \"name\": \"very_inner_fixed\", \"size\": 1 },\n"
+ " \"doc\": \"Inner Fixed\", \"name\": \"inner_fixed\" },\n" + " { \"type\": \"string\",\n"
+ " \"name\": \"inner_string\",\n" + " \"doc\": \"Inner String\" },\n"
+ " { \"type\": { \"type\": \"enum\", \"doc\": \"Very Inner Enum\", \n"
+ " \"name\": \"very_inner_enum\", \n"
+ " \"symbols\": [ \"A\", \"B\", \"C\" ] },\n"
+ " \"doc\": \"Inner Enum\", \"name\": \"inner_enum\" },\n"
+ " { \"type\": [\"string\", \"int\"], \"doc\": \"Inner Union\", \n" + " \"name\": \"inner_union\" }\n"
+ " ]\n" + "}\n";
private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "30"));
@Test
void testNull(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.NULL), new Schema.Parser().parse("\"null\""));
assertEquals(Schema.create(Type.NULL), new Schema.Parser().parse("{\"type\":\"null\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"null\"", "null", null);
}
@Test
void testBoolean(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.BOOLEAN), new Schema.Parser().parse("\"boolean\""));
assertEquals(Schema.create(Type.BOOLEAN), new Schema.Parser().parse("{\"type\":\"boolean\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"boolean\"", "true", Boolean.TRUE);
}
@Test
void string(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.STRING), new Schema.Parser().parse("\"string\""));
assertEquals(Schema.create(Type.STRING), new Schema.Parser().parse("{\"type\":\"string\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"string\"", "\"foo\"", new Utf8("foo"));
}
@Test
void bytes(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.BYTES), new Schema.Parser().parse("\"bytes\""));
assertEquals(Schema.create(Type.BYTES), new Schema.Parser().parse("{\"type\":\"bytes\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"bytes\"", "\"\\u0000ABC\\u00FF\"",
ByteBuffer.wrap(new byte[] { 0, 65, 66, 67, -1 }));
}
@Test
void testInt(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.INT), new Schema.Parser().parse("\"int\""));
assertEquals(Schema.create(Type.INT), new Schema.Parser().parse("{\"type\":\"int\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"int\"", "9", 9);
}
@Test
void testLong(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.LONG), new Schema.Parser().parse("\"long\""));
assertEquals(Schema.create(Type.LONG), new Schema.Parser().parse("{\"type\":\"long\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"long\"", "11", 11L);
}
@Test
void testFloat(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.FLOAT), new Schema.Parser().parse("\"float\""));
assertEquals(Schema.create(Type.FLOAT), new Schema.Parser().parse("{\"type\":\"float\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"float\"", "1.1", 1.1f);
checkDefault("\"float\"", "\"NaN\"", Float.NaN);
checkDefault("\"float\"", "\"Infinity\"", Float.POSITIVE_INFINITY);
checkDefault("\"float\"", "\"-Infinity\"", Float.NEGATIVE_INFINITY);
}
@Test
void testDouble(TestInfo testInfo) throws Exception {
assertEquals(Schema.create(Type.DOUBLE), new Schema.Parser().parse("\"double\""));
assertEquals(Schema.create(Type.DOUBLE), new Schema.Parser().parse("{\"type\":\"double\"}"));
check(new File(DIR, testInfo.getTestMethod().get().getName()), "\"double\"", "1.2", 1.2);
checkDefault("\"double\"", "\"NaN\"", Double.NaN);
checkDefault("\"double\"", "\"Infinity\"", Double.POSITIVE_INFINITY);
checkDefault("\"double\"", "\"-Infinity\"", Double.NEGATIVE_INFINITY);
}
@Test
void array(TestInfo testInfo) throws Exception {
String json = "{\"type\":\"array\", \"items\": \"long\"}";
Schema schema = new Schema.Parser().parse(json);
Collection<Long> array = new GenericData.Array<>(1, schema);
array.add(1L);
check(new File(DIR, testInfo.getTestMethod().get().getName()), json, "[1]", array);
array = new ArrayList<>(1);
array.add(1L);
check(new File(DIR, testInfo.getTestMethod().get().getName()), json, "[1]", array);
checkParseError("{\"type\":\"array\"}"); // items required
}
@Test
void map(TestInfo testInfo) throws Exception {
HashMap<Utf8, Long> map = new HashMap<>();
map.put(new Utf8("a"), 1L);
check(new File(DIR, testInfo.getTestMethod().get().getName()), "{\"type\":\"map\", \"values\":\"long\"}",
"{\"a\":1}", map);
checkParseError("{\"type\":\"map\"}"); // values required
}
@Test
void unionMap(TestInfo testInfo) throws Exception {
String unionMapSchema = "{\"name\":\"foo\", \"type\":\"record\"," + " \"fields\":[ {\"name\":\"mymap\", \"type\":"
+ " [{\"type\":\"map\", \"values\":" + " [\"int\",\"long\",\"float\",\"string\"]}," + " \"null\"]"
+ " }]" + " }";
check(new File(DIR, testInfo.getTestMethod().get().getName()), unionMapSchema, true);
}
@Test
void record(TestInfo testInfo) throws Exception {
String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+ "[{\"name\":\"f\", \"type\":\"long\", \"foo\":\"bar\"}]}";
Schema schema = new Schema.Parser().parse(recordJson);
GenericData.Record record = new GenericData.Record(schema);
record.put("f", 11L);
check(new File(DIR, testInfo.getTestMethod().get().getName()), recordJson, "{\"f\":11}", record, false);
// test field props
assertEquals("bar", schema.getField("f").getProp("foo"));
assertEquals("bar", new Schema.Parser().parse(schema.toString()).getField("f").getProp("foo"));
schema.getField("f").addProp("baz", "boo");
assertEquals("boo", schema.getField("f").getProp("baz"));
checkParseError("{\"type\":\"record\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":\"Y\"}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"name\":\"f\"}]}"); // no type
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"type\":\"long\"}]}"); // no name
// check invalid record names
checkParseError("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}");
checkParseError("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}");
// check invalid field names
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":[" + "{\"name\":\"1f\",\"type\":\"int\"}]}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":[" + "{\"name\":\"f$\",\"type\":\"int\"}]}");
checkParseError("{\"type\":\"record\",\"name\":\"X\",\"fields\":[" + "{\"name\":\"f.g\",\"type\":\"int\"}]}");
}
@Test
void invalidNameTolerance() {
new Schema.Parser(Schema.NameValidator.NO_VALIDATION).parse("{\"type\":\"record\",\"name\":\"1X\",\"fields\":[]}");
new Schema.Parser(Schema.NameValidator.NO_VALIDATION).parse("{\"type\":\"record\",\"name\":\"X-\",\"fields\":[]}");
new Schema.Parser(Schema.NameValidator.NO_VALIDATION).parse("{\"type\":\"record\",\"name\":\"X$\",\"fields\":[]}");
}
@Test
void mapInRecord(TestInfo testInfo) throws Exception {
String json = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+ "[{\"name\":\"f\", \"type\": {\"type\":\"map\", \"values\":\"long\"}}]}";
Schema schema = new Schema.Parser().parse(json);
HashMap<Utf8, Long> map = new HashMap<>();
map.put(new Utf8("a"), 1L);
GenericData.Record record = new GenericData.Record(schema);
record.put("f", map);
check(new File(DIR, testInfo.getTestMethod().get().getName()), json, "{\"f\":{\"a\":1}}", record, false);
}
@Test
void testEnum(TestInfo testInfo) throws Exception {
check(new File(DIR, testInfo.getTestMethod().get().getName()), BASIC_ENUM_SCHEMA, "\"B\"",
new GenericData.EnumSymbol(new Schema.Parser().parse(BASIC_ENUM_SCHEMA), "B"), false);
checkParseError("{\"type\":\"enum\"}"); // symbols required
checkParseError("{\"type\":\"enum\",\"symbols\": [\"X\"]}"); // name reqd
// check no duplicate symbols
checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X\",\"X\"]}");
// check no invalid symbols
checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"1X\"]}");
checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X$\"]}");
checkParseError("{\"type\":\"enum\",\"name\":\"X\",\"symbols\":[\"X.Y\"]}");
}
@Test
void fixed(TestInfo testInfo) throws Exception {
String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
Schema schema = new Schema.Parser().parse(json);
check(new File(DIR, testInfo.getTestMethod().get().getName()), json, "\"a\"",
new GenericData.Fixed(schema, new byte[] { (byte) 'a' }), false);
checkParseError("{\"type\":\"fixed\"}"); // size required
}
@Test
void recursive(TestInfo testInfo) throws Exception {
check(new File(DIR, testInfo.getTestMethod().get().getName()),
"{\"type\": \"record\", \"name\": \"Node\", \"fields\": [" + "{\"name\":\"label\", \"type\":\"string\"},"
+ "{\"name\":\"children\", \"type\":" + "{\"type\": \"array\", \"items\": \"Node\" }}]}",
false);
}
@Test
void recursiveEquals() throws Exception {
String jsonSchema = "{\"type\":\"record\", \"name\":\"List\", \"fields\": ["
+ "{\"name\":\"next\", \"type\":\"List\"}]}";
Schema s1 = new Schema.Parser().parse(jsonSchema);
Schema s2 = new Schema.Parser().parse(jsonSchema);
assertEquals(s1, s2);
s1.hashCode(); // test no stackoverflow
}
/**
* Test that equals() and hashCode() don't require exponential time on certain
* pathological schemas.
*/
@Test
void schemaExplosion() throws Exception {
for (int i = 1; i < 15; i++) { // 15 is big enough to trigger
// create a list of records, each with a single field whose type is a
// union of all of the records.
List<Schema> recs = new ArrayList<>();
for (int j = 0; j < i; j++)
recs.add(Schema.createRecord("" + (char) ('A' + j), null, null, false));
for (Schema s : recs) {
Schema union = Schema.createUnion(recs);
Field f = new Field("x", union, null, null);
List<Field> fields = new ArrayList<>();
fields.add(f);
s.setFields(fields);
}
// check that equals and hashcode are correct and complete in a
// reasonable amount of time
for (Schema s1 : recs) {
Schema s2 = new Schema.Parser().parse(s1.toString());
assertEquals(s1.hashCode(), s2.hashCode());
assertEquals(s1, s2);
}
}
}
@Test
void lisp(TestInfo testInfo) throws Exception {
check(new File(DIR, testInfo.getTestMethod().get().getName()), LISP_SCHEMA, false);
}
@Test
void union(TestInfo testInfo) throws Exception {
check(new File(DIR, testInfo.getTestMethod().get().getName()), "[\"string\", \"long\"]", false);
checkDefault("[\"double\", \"long\"]", "1.1", 1.1);
checkDefault("[\"double\", \"string\"]", "\"TheString\"", new Utf8("TheString"));
// test that erroneous default values cause errors
for (String type : new String[] { "int", "long", "float", "double", "string", "bytes", "boolean" }) {
// checkValidateDefaults("[\"" + type + "\", \"null\"]", "null"); // schema parse time
checkDefault("[\"" + type + "\", \"null\"]", "null", null); // read time
}
checkDefault("[\"null\", \"int\"]", "0", 0);
checkDefault("[\"null\", \"long\"]", "0", 0l);
checkDefault("[\"null\", \"float\"]", "0.0", 0.0f);
checkDefault("[\"null\", \"double\"]", "0.0", 0.0d);
checkDefault("[\"null\", \"string\"]", "\"Hi\"", new Utf8("Hi"));
checkDefault("[\"null\", \"bytes\"]", "\"01\"", ByteBuffer.wrap("01".getBytes(StandardCharsets.UTF_8)));
checkDefault("[\"null\", \"boolean\"]", "true", true);
// check union json
String record = "{\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]}";
String fixed = "{\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1}";
String enu = "{\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
Schema union = new Schema.Parser().parse("[\"null\",\"string\"," + record + "," + enu + "," + fixed + "]");
checkJson(union, null, "null");
checkJson(union, new Utf8("foo"), "{\"string\":\"foo\"}");
checkJson(union, new GenericData.Record(new Schema.Parser().parse(record)), "{\"Foo\":{}}");
checkJson(union, new GenericData.Fixed(new Schema.Parser().parse(fixed), new byte[] { (byte) 'a' }),
"{\"Bar\":\"a\"}");
checkJson(union, new GenericData.EnumSymbol(new Schema.Parser().parse(enu), "X"), "{\"Baz\":\"X\"}");
}
@Test
void complexUnions(TestInfo testInfo) throws Exception {
// one of each unnamed type and two of named types
String partial = "[\"int\", \"long\", \"float\", \"double\", \"boolean\", \"bytes\","
+ " \"string\", {\"type\":\"array\", \"items\": \"long\"}," + " {\"type\":\"map\", \"values\":\"long\"}";
String namedTypes = ", {\"type\":\"record\",\"name\":\"Foo\",\"fields\":[]},"
+ " {\"type\":\"fixed\",\"name\":\"Bar\",\"size\": 1},"
+ " {\"type\":\"enum\",\"name\":\"Baz\",\"symbols\": [\"X\"]}";
String namedTypes2 = ", {\"type\":\"record\",\"name\":\"Foo2\",\"fields\":[]},"
+ " {\"type\":\"fixed\",\"name\":\"Bar2\",\"size\": 1},"
+ " {\"type\":\"enum\",\"name\":\"Baz2\",\"symbols\": [\"X\"]}";
check(new File(DIR, testInfo.getTestMethod().get().getName()), partial + namedTypes + "]", false);
check(new File(DIR, testInfo.getTestMethod().get().getName()), partial + namedTypes + namedTypes2 + "]", false);
checkParseError(partial + namedTypes + namedTypes + "]");
// fail with two branches of the same unnamed type
checkUnionError(new Schema[] { Schema.create(Type.INT), Schema.create(Type.INT) });
checkUnionError(new Schema[] { Schema.create(Type.LONG), Schema.create(Type.LONG) });
checkUnionError(new Schema[] { Schema.create(Type.FLOAT), Schema.create(Type.FLOAT) });
checkUnionError(new Schema[] { Schema.create(Type.DOUBLE), Schema.create(Type.DOUBLE) });
checkUnionError(new Schema[] { Schema.create(Type.BOOLEAN), Schema.create(Type.BOOLEAN) });
checkUnionError(new Schema[] { Schema.create(Type.BYTES), Schema.create(Type.BYTES) });
checkUnionError(new Schema[] { Schema.create(Type.STRING), Schema.create(Type.STRING) });
checkUnionError(
new Schema[] { Schema.createArray(Schema.create(Type.INT)), Schema.createArray(Schema.create(Type.INT)) });
checkUnionError(
new Schema[] { Schema.createMap(Schema.create(Type.INT)), Schema.createMap(Schema.create(Type.INT)) });
List<String> symbols = new ArrayList<>();
symbols.add("NOTHING");
// succeed with two branches of the same named type, if different names
Schema u;
u = buildUnion(new Schema[] { new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"x.A\",\"fields\":[]}"),
new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"y.A\",\"fields\":[]}") });
check(new File(DIR, testInfo.getTestMethod().get().getName()), u.toString(), false);
u = buildUnion(new Schema[] { new Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"x.A\",\"symbols\":[\"X\"]}"),
new Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"y.A\",\"symbols\":[\"Y\"]}") });
check(new File(DIR, testInfo.getTestMethod().get().getName()), u.toString(), false);
u = buildUnion(new Schema[] { new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"x.A\",\"size\":4}"),
new Schema.Parser().parse("{\"type\":\"fixed\",\"name\":\"y.A\",\"size\":8}") });
check(new File(DIR, testInfo.getTestMethod().get().getName()), u.toString(), false);
// fail with two branches of the same named type, but same names
checkUnionError(new Schema[] { Schema.createRecord("Foo", null, "org.test", false),
Schema.createRecord("Foo", null, "org.test", false) });
checkUnionError(new Schema[] { Schema.createEnum("Bar", null, "org.test", symbols),
Schema.createEnum("Bar", null, "org.test", symbols) });
checkUnionError(new Schema[] { Schema.createFixed("Baz", null, "org.test", 2),
Schema.createFixed("Baz", null, "org.test", 1) });
Schema union = buildUnion(new Schema[] { Schema.create(Type.INT) });
// fail if creating a union of a union
checkUnionError(new Schema[] { union });
}
@Test
void complexProp() {
String json = "{\"type\":\"null\", \"foo\": [0]}";
Schema s = new Schema.Parser().parse(json);
assertNull(s.getProp("foo"));
}
@Test
void propOrdering() {
String json = "{\"type\":\"int\",\"z\":\"c\",\"yy\":\"b\",\"x\":\"a\"}";
Schema s = new Schema.Parser().parse(json);
assertEquals(json, s.toString());
}
@Test
void parseInputStream() throws IOException {
Schema s = new Schema.Parser().parse(new ByteArrayInputStream("\"boolean\"".getBytes(StandardCharsets.UTF_8)));
assertEquals(new Schema.Parser().parse("\"boolean\""), s);
}
@Test
void namespaceScope() {
String z = "{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}";
String y = "{\"type\":\"record\",\"name\":\"q.Y\",\"fields\":[" + "{\"name\":\"f\",\"type\":" + z + "}]}";
String x = "{\"type\":\"record\",\"name\":\"p.X\",\"fields\":[" + "{\"name\":\"f\",\"type\":" + y + "},"
+ "{\"name\":\"g\",\"type\":" + z + "}" + "]}";
Schema xs = new Schema.Parser().parse(x);
Schema ys = xs.getField("f").schema();
assertEquals("p.Z", xs.getField("g").schema().getFullName());
assertEquals("q.Z", ys.getField("f").schema().getFullName());
}
@Test
void namespaceNesting() {
String y = "{\"type\":\"record\",\"name\":\"y.Y\",\"fields\":[" + "{\"name\":\"f\",\"type\":\"x.X\"}]}";
String x = "{\"type\":\"record\",\"name\":\"x.X\",\"fields\":[" + "{\"name\":\"f\",\"type\":" + y + "}" + "]}";
Schema xs = new Schema.Parser().parse(x);
assertEquals(xs, new Schema.Parser().parse(xs.toString()));
}
@Test
void nestedNullNamespace() {
Schema inner = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
Schema outer = Schema.createRecord("Outer", null, "space", false);
outer.setFields(Collections.singletonList(new Field("f", inner, null, null)));
assertEquals(outer, new Schema.Parser().parse(outer.toString()));
}
@Test
void deeplyNestedNullNamespace() {
Schema inner = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":["
+ "{\"name\":\"x\",\"type\":{\"type\":\"record\",\"name\":\"Deeper\",\"fields\":["
+ "{\"name\":\"y\",\"type\":\"int\"}]}}]}");
Schema outer = Schema.createRecord("Outer", null, "space", false);
outer.setFields(Collections.singletonList(new Field("f", inner, null, null)));
assertEquals(outer, new Schema.Parser().parse(outer.toString()));
}
@Test
void nestedNullNamespaceReferencing() {
Schema inner = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
Schema outer = Schema.createRecord("Outer", null, "space", false);
outer.setFields(Arrays.asList(new Field("f1", inner, null, null), new Field("f2", inner, null, null)));
assertEquals(outer, new Schema.Parser().parse(outer.toString()));
}
@Test
void nestedNullNamespaceReferencingWithUnion() {
Schema inner = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Inner\",\"fields\":[]}");
Schema innerUnion = Schema.createUnion(Arrays.asList(inner, Schema.create(Type.NULL)));
Schema outer = Schema.createRecord("Outer", null, "space", false);
outer.setFields(Arrays.asList(new Field("f1", innerUnion, null, null), new Field("f2", innerUnion, null, null)));
assertEquals(outer, new Schema.Parser().parse(outer.toString()));
}
@Test
void nestedNonNullNamespace1() {
Schema inner1 = Schema.createEnum("InnerEnum", null, "space", Collections.singletonList("x"));
Schema inner2 = new Schema.Parser()
.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":" + "\"InnerRecord\",\"fields\":[]}");
Schema nullOuter = Schema.createRecord("Outer", null, null, false);
nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null), new Field("f2", inner2, null, null)));
assertEquals(nullOuter, new Schema.Parser().parse(nullOuter.toString()));
}
@Test
void nestedNonNullNamespace2() {
Schema inner1 = Schema.createFixed("InnerFixed", null, "space", 1);
Schema inner2 = new Schema.Parser()
.parse("{\"type\":\"record\",\"namespace\":\"space\",\"name\":" + "\"InnerRecord\",\"fields\":[]}");
Schema nullOuter = Schema.createRecord("Outer", null, null, false);
nullOuter.setFields(Arrays.asList(new Field("f1", inner1, null, null), new Field("f2", inner2, null, null)));
assertEquals(nullOuter, new Schema.Parser().parse(nullOuter.toString()));
}
@Test
void nullNamespaceAlias() {
Schema s = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"Z\",\"fields\":[]}");
Schema t = new Schema.Parser()
.parse("{\"type\":\"record\",\"name\":\"x.Y\",\"aliases\":[\".Z\"]," + "\"fields\":[]}");
Schema u = Schema.applyAliases(s, t);
assertEquals("x.Y", u.getFullName());
}
@Test
void nullPointer() throws Exception {
String recordJson = "{\"type\":\"record\", \"name\":\"Test\", \"fields\":"
+ "[{\"name\":\"x\", \"type\":\"string\"}]}";
Schema schema = new Schema.Parser().parse(recordJson);
GenericData.Record record = new GenericData.Record(schema);
assertThrows(NullPointerException.class,
() -> checkBinary(schema, record, new GenericDatumWriter<>(), new GenericDatumReader<>()));
}
private static void checkParseError(String json) {
try {
new Schema.Parser().parse(json);
} catch (AvroRuntimeException e) {
return;
}
fail("Should not have parsed: " + json);
}
private static void checkUnionError(Schema[] branches) {
List<Schema> branchList = Arrays.asList(branches);
try {
Schema.createUnion(branchList);
fail("Union should not have constructed from: " + branchList);
} catch (AvroRuntimeException ignored) {
}
}
private static Schema buildUnion(Schema[] branches) {
List<Schema> branchList = Arrays.asList(branches);
return Schema.createUnion(branchList);
}
/**
* Makes sure that "doc" tags are transcribed in the schemas. Note that there
* are docs both for fields and for the records themselves.
*/
@Test
void docs() {
Schema schema = new Schema.Parser().parse(SCHEMA_WITH_DOC_TAGS);
assertEquals("This is not a world record.", schema.getDoc());
assertEquals("Inner Fixed", schema.getField("inner_fixed").doc());
assertEquals("Very Inner Fixed", schema.getField("inner_fixed").schema().getDoc());
assertEquals("Inner String", schema.getField("inner_string").doc());
assertEquals("Inner Enum", schema.getField("inner_enum").doc());
assertEquals("Very Inner Enum", schema.getField("inner_enum").schema().getDoc());
assertEquals("Inner Union", schema.getField("inner_union").doc());
}
@Test
void fieldDocs() {
String schemaStr = "{\"name\": \"Rec\",\"type\": \"record\",\"fields\" : ["
+ "{\"name\": \"f\", \"type\": \"int\", \"doc\": \"test\"}]}";
// check field doc is parsed correctly
Schema schema = new Schema.Parser().parse(schemaStr);
assertEquals("test", schema.getField("f").doc());
// check print/read cycle preserves field doc
schema = new Schema.Parser().parse(schema.toString());
assertEquals("test", schema.getField("f").doc());
}
@Test
void aliases() {
String t1 = "{\"type\":\"record\",\"name\":\"a.b\",\"fields\":[" + "{\"name\":\"f\",\"type\":\"long\"},"
+ "{\"name\":\"h\",\"type\":\"int\"}]}";
String t2 = "{\"type\":\"record\",\"name\":\"x.y\",\"aliases\":[\"a.b\"],"
+ "\"fields\":[{\"name\":\"g\",\"type\":\"long\",\"aliases\":[\"f\"]}," + "{\"name\":\"h\",\"type\":\"int\"}]}";
Schema s1 = new Schema.Parser().parse(t1);
Schema s2 = new Schema.Parser().parse(t2);
assertEquals(s1.getAliases(), Collections.emptySet());
assertEquals(s1.getField("f").aliases(), Collections.emptySet());
assertEquals(s2.getAliases(), Collections.singleton("a.b"));
assertEquals(s2.getField("g").aliases(), Collections.singleton("f"));
Schema s3 = Schema.applyAliases(s1, s2);
assertNotSame(s2, s3);
assertEquals(s2, s3);
t1 = "{\"type\":\"enum\",\"name\":\"a.b\"," + "\"symbols\":[\"x\"]}";
t2 = "{\"type\":\"enum\",\"name\":\"a.c\",\"aliases\":[\"b\"]," + "\"symbols\":[\"x\"]}";
s1 = new Schema.Parser().parse(t1);
s2 = new Schema.Parser().parse(t2);
s3 = Schema.applyAliases(s1, s2);
assertNotSame(s2, s3);
assertEquals(s2, s3);
t1 = "{\"type\":\"fixed\",\"name\":\"a\"," + "\"size\": 5}";
t2 = "{\"type\":\"fixed\",\"name\":\"b\",\"aliases\":[\"a\"]," + "\"size\": 5}";
s1 = new Schema.Parser().parse(t1);
s2 = new Schema.Parser().parse(t2);
s3 = Schema.applyAliases(s1, s2);
assertNotSame(s2, s3);
assertEquals(s2, s3);
}
@Test
void aliasesSelfReferential() {
String t1 = "{\"type\":\"record\",\"name\":\"a\",\"fields\":[{\"name\":\"f\",\"type\":{\"type\":\"record\",\"name\":\"C\",\"fields\":[{\"name\":\"c\",\"type\":{\"type\":\"array\",\"items\":[\"null\",\"C\"]}}]}}]}";
String t2 = "{\"type\":\"record\",\"name\":\"x\",\"fields\":[{\"name\":\"f\",\"type\":{\"type\":\"record\",\"name\":\"C\",\"fields\":[{\"name\":\"d\",\"type\":{\"type\":\"array\",\"items\":[\"null\",\"C\"]},\"aliases\":[\"c\"]}]}}],\"aliases\":[\"a\"]}";
Schema s1 = new Schema.Parser().parse(t1);
Schema s2 = new Schema.Parser().parse(t2);
assertEquals(s1.getAliases(), Collections.emptySet());
assertEquals(s2.getAliases(), Collections.singleton("a"));
Schema s3 = Schema.applyAliases(s1, s2);
assertNotSame(s2, s3);
assertEquals(s2, s3);
}
private static void check(File dst, String schemaJson, String defaultJson, Object defaultValue) throws Exception {
check(dst, schemaJson, defaultJson, defaultValue, true);
}
private static void check(File dst, String schemaJson, String defaultJson, Object defaultValue, boolean induce)
throws Exception {
check(dst, schemaJson, induce);
checkDefault(schemaJson, defaultJson, defaultValue);
}
private static void check(File dst, String jsonSchema, boolean induce) throws Exception {
Schema schema = new Schema.Parser().parse(jsonSchema);
checkProp(schema);
Object reuse = null;
for (Object datum : new RandomData(schema, COUNT, true)) {
if (induce) {
Schema induced = GenericData.get().induce(datum);
assertEquals(schema, induced, "Induced schema does not match.");
}
assertTrue(GenericData.get().validate(schema, datum), "Datum does not validate against schema " + datum);
checkBinary(schema, datum, new GenericDatumWriter<>(), new GenericDatumReader<>(), null);
reuse = checkBinary(schema, datum, new GenericDatumWriter<>(), new GenericDatumReader<>(), reuse);
checkDirectBinary(schema, datum, new GenericDatumWriter<>(), new GenericDatumReader<>());
checkBlockingBinary(schema, datum, new GenericDatumWriter<>(), new GenericDatumReader<>());
checkJson(schema, datum, new GenericDatumWriter<>(), new GenericDatumReader<>());
// Check that we can generate the code for every schema we see.
TestSpecificCompiler.assertCompiles(dst, schema, false);
// Check that we can read/write the json of every schema we see.
checkBinaryJson(jsonSchema);
}
}
private static void checkProp(Schema s0) throws Exception {
if (s0.getType().equals(Schema.Type.UNION))
return; // unions have no props
assertNull(s0.getProp("foo"));
Schema s1 = new Schema.Parser().parse(s0.toString());
s1.addProp("foo", "bar");
assertEquals("bar", s1.getProp("foo"));
assertNotEquals(s0, s1);
Schema s2 = new Schema.Parser().parse(s1.toString());
assertEquals("bar", s2.getProp("foo"));
assertEquals(s1, s2);
assertNotEquals(s0, s2);
}
public static void checkBinary(Schema schema, Object datum, DatumWriter<Object> writer, DatumReader<Object> reader)
throws IOException {
checkBinary(schema, datum, writer, reader, null);
}
public static Object checkBinary(Schema schema, Object datum, DatumWriter<Object> writer, DatumReader<Object> reader,
Object reuse) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
reader.setSchema(schema);
Object decoded = reader.read(reuse, DecoderFactory.get().binaryDecoder(data, null));
assertEquals(datum, decoded, "Decoded data does not match.");
return decoded;
}
public static void checkDirectBinary(Schema schema, Object datum, DatumWriter<Object> writer,
DatumReader<Object> reader) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
writer.write(datum, encoder);
// no flush for direct
byte[] data = out.toByteArray();
reader.setSchema(schema);
Object decoded = reader.read(null, DecoderFactory.get().binaryDecoder(data, null));
assertEquals(datum, decoded, "Decoded data does not match.");
}
public static void checkBlockingBinary(Schema schema, Object datum, DatumWriter<Object> writer,
DatumReader<Object> reader) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
Encoder encoder = EncoderFactory.get().blockingBinaryEncoder(out, null);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
reader.setSchema(schema);
Object decoded = reader.read(null, DecoderFactory.get().binaryDecoder(data, null));
assertEquals(datum, decoded, "Decoded data does not match.");
}
private static void checkJson(Schema schema, Object datum, DatumWriter<Object> writer, DatumReader<Object> reader)
throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
writer.setSchema(schema);
writer.write(datum, encoder);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
reader.setSchema(schema);
Decoder decoder = DecoderFactory.get().jsonDecoder(schema, new ByteArrayInputStream(data));
Object decoded = reader.read(null, decoder);
assertEquals(datum, decoded, "Decoded data does not match.");
decoded = reader.read(decoded, decoder);
assertEquals(datum, decoded, "Decoded data does not match.");
}
private static void checkJson(Schema schema, Object datum, String json) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
DatumWriter<Object> writer = new GenericDatumWriter<>();
writer.setSchema(schema);
writer.write(datum, encoder);
encoder.flush();
byte[] data = out.toByteArray();
String encoded = new String(data, StandardCharsets.UTF_8);
assertEquals(json, encoded, "Encoded data does not match.");
DatumReader<Object> reader = new GenericDatumReader<>();
reader.setSchema(schema);
Object decoded = reader.read(null, DecoderFactory.get().jsonDecoder(schema, new ByteArrayInputStream(data)));
assertEquals(datum, decoded, "Decoded data does not match.");
}
public static void checkBinaryJson(String json) throws Exception {
Object node = Json.parseJson(json);
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<Object> writer = new Json.ObjectWriter();
Encoder encoder = EncoderFactory.get().binaryEncoder(out, null);
encoder = EncoderFactory.get().validatingEncoder(Json.SCHEMA, encoder);
writer.write(node, encoder);
encoder.flush();
byte[] bytes = out.toByteArray();
DatumReader<Object> reader = new Json.ObjectReader();
Decoder decoder = DecoderFactory.get().binaryDecoder(bytes, null);
decoder = DecoderFactory.get().validatingDecoder(Json.SCHEMA, decoder);
Object decoded = reader.read(null, decoder);
assertEquals(Json.toString(node), Json.toString(decoded), "Decoded json does not match.");
}
private static final Schema ACTUAL = // an empty record schema
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[]}");
private static void checkDefault(String schemaJson, String defaultJson, Object defaultValue) throws Exception {
String recordJson = "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", " + "\"type\":"
+ schemaJson + ", " + "\"default\":" + defaultJson + "}]}";
Schema expected = new Schema.Parser().parse(recordJson);
DatumReader<Object> in = new GenericDatumReader<>(ACTUAL, expected);
GenericData.Record record = (GenericData.Record) in.read(null,
DecoderFactory.get().binaryDecoder(new byte[0], null));
assertEquals(defaultValue, record.get("f"), "Wrong default.");
assertEquals(expected, new Schema.Parser().parse(expected.toString()), "Wrong toString");
}
private static void checkValidateDefaults(String schemaJson, String defaultJson) {
try {
Schema.Parser parser = new Schema.Parser();
String recordJson = "{\"type\":\"record\", \"name\":\"Foo\", \"fields\":[{\"name\":\"f\", " + "\"type\":"
+ schemaJson + ", " + "\"default\":" + defaultJson + "}]}";
parser.parse(recordJson);
fail("Schema of type " + schemaJson + " should not have default " + defaultJson);
} catch (AvroTypeException ignored) {
}
}
@Test
void noDefaultField() throws Exception {
assertThrows(AvroTypeException.class, () -> {
Schema expected = new Schema.Parser()
.parse("{\"type\":\"record\", \"name\":\"Foo\", \"fields\":" + "[{\"name\":\"f\", \"type\": \"string\"}]}");
DatumReader<Object> in = new GenericDatumReader<>(ACTUAL, expected);
in.read(null, DecoderFactory.get().binaryDecoder(new ByteArrayInputStream(new byte[0]), null));
});
}
@Test
void enumMismatch() throws Exception {
Schema actual = new Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"X\",\"Y\"]}");
Schema expected = new Schema.Parser().parse("{\"type\":\"enum\",\"name\":\"E\",\"symbols\":[\"Y\",\"Z\"]}");
ByteArrayOutputStream out = new ByteArrayOutputStream();
DatumWriter<Object> writer = new GenericDatumWriter<>(actual);
Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);
writer.write(new GenericData.EnumSymbol(actual, "Y"), encoder);
writer.write(new GenericData.EnumSymbol(actual, "X"), encoder);
encoder.flush();
byte[] data = out.toByteArray();
Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
DatumReader<String> in = new GenericDatumReader<>(actual, expected);
assertEquals(new GenericData.EnumSymbol(expected, "Y"), in.read(null, decoder), "Wrong value");
try {
in.read(null, decoder);
fail("Should have thrown exception.");
} catch (AvroTypeException e) {
// expected
}
}
@Test
void recordWithPrimitiveName() {
assertThrows(AvroTypeException.class, () -> {
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"string\", \"fields\": []}");
});
}
@Test
void enumWithPrimitiveName() {
assertThrows(AvroTypeException.class, () -> {
new Schema.Parser().parse("{\"type\":\"enum\", \"name\":\"null\", \"symbols\": [\"A\"]}");
});
}
private static Schema enumSchema() {
return new Schema.Parser().parse("{ \"type\": \"enum\", \"name\": \"e\", " + "\"symbols\": [\"a\", \"b\"]}");
}
@Test
void immutability1() {
assertThrows(AvroRuntimeException.class, () -> {
Schema s = enumSchema();
s.addProp("p1", "1");
s.addProp("p1", "2");
});
}
@Test
void immutability2() {
assertThrows(AvroRuntimeException.class, () -> {
Schema s = enumSchema();
s.addProp("p1", null);
});
}
private static List<String> lockedArrayList() {
return new Schema.LockableArrayList<>(Arrays.asList("a", "b", "c")).lock();
}
@Test
void lockedArrayList1() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().add("p");
});
}
@Test
void lockedArrayList2() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().remove("a");
});
}
@Test
void lockedArrayList3() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().addAll(Collections.singletonList("p"));
});
}
@Test
void lockedArrayList4() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().addAll(0, Collections.singletonList("p"));
});
}
@Test
void lockedArrayList5() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().removeAll(Collections.singletonList("a"));
});
}
@Test
void lockedArrayList6() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().retainAll(Collections.singletonList("a"));
});
}
@Test
void lockedArrayList7() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().clear();
});
}
@Test
void lockedArrayList8() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().iterator().remove();
});
}
@Test
void lockedArrayList9() {
assertThrows(IllegalStateException.class, () -> {
Iterator<String> it = lockedArrayList().iterator();
it.next();
it.remove();
});
}
@Test
void lockedArrayList10() {
assertThrows(IllegalStateException.class, () -> {
lockedArrayList().remove(1);
});
}
@Test
void names_GetWithInheritedNamespace() {
Schema schema = Schema.create(Type.STRING);
Schema.Names names = new Schema.Names("space");
names.put(new Schema.Name("Name", "space"), schema);
assertEquals(schema, names.get(new Schema.Name("Name", "space")));
assertEquals(schema, names.get("Name"));
}
@Test
void names_GetWithNullNamespace() {
Schema schema = Schema.create(Type.STRING);
Schema.Names names = new Schema.Names("space");
names.put(new Schema.Name("Name", ""), schema);
assertEquals(schema, names.get(new Schema.Name("Name", "")));
assertEquals(schema, names.get("Name"));
}
@Test
void names_GetNotFound() {
Schema.Names names = new Schema.Names("space");
names.put(new Schema.Name("Name", "otherspace"), Schema.create(Type.STRING));
assertNull(names.get("Name"));
}
}
| 7,452 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestCompare.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.junit.jupiter.api.Test;
import java.io.ByteArrayOutputStream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.io.BinaryData;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.apache.avro.test.TestRecord;
import org.apache.avro.test.Kind;
import org.apache.avro.test.MD5;
public class TestCompare {
@Test
void testNull() throws Exception {
Schema schema = new Schema.Parser().parse("\"null\"");
byte[] b = render(null, schema, new GenericDatumWriter<>());
assertEquals(0, BinaryData.compare(b, 0, b, 0, schema));
}
@Test
void testBoolean() throws Exception {
check("\"boolean\"", Boolean.FALSE, Boolean.TRUE);
}
@Test
void string() throws Exception {
check("\"string\"", new Utf8(""), new Utf8("a"));
check("\"string\"", new Utf8("a"), new Utf8("b"));
check("\"string\"", new Utf8("a"), new Utf8("ab"));
check("\"string\"", new Utf8("ab"), new Utf8("b"));
}
@Test
void bytes() throws Exception {
check("\"bytes\"", ByteBuffer.wrap(new byte[] {}), ByteBuffer.wrap(new byte[] { 1 }));
check("\"bytes\"", ByteBuffer.wrap(new byte[] { 1 }), ByteBuffer.wrap(new byte[] { 2 }));
check("\"bytes\"", ByteBuffer.wrap(new byte[] { 1, 2 }), ByteBuffer.wrap(new byte[] { 2 }));
}
@Test
void testInt() throws Exception {
check("\"int\"", -1, 0);
check("\"int\"", 0, 1);
}
@Test
void testLong() throws Exception {
check("\"long\"", 11L, 12L);
check("\"long\"", (long) -1, 1L);
}
@Test
void testFloat() throws Exception {
check("\"float\"", 1.1f, 1.2f);
check("\"float\"", (float) -1.1, 1.0f);
}
@Test
void testDouble() throws Exception {
check("\"double\"", 1.2, 1.3);
check("\"double\"", -1.2, 1.3);
}
@Test
void array() throws Exception {
String json = "{\"type\":\"array\", \"items\": \"long\"}";
Schema schema = new Schema.Parser().parse(json);
GenericArray<Long> a1 = new GenericData.Array<>(1, schema);
a1.add(1L);
GenericArray<Long> a2 = new GenericData.Array<>(1, schema);
a2.add(1L);
a2.add(0L);
check(json, a1, a2);
}
@Test
void record() throws Exception {
String fields = " \"fields\":[" + "{\"name\":\"f\",\"type\":\"int\",\"order\":\"ignore\"},"
+ "{\"name\":\"g\",\"type\":\"int\",\"order\":\"descending\"}," + "{\"name\":\"h\",\"type\":\"int\"}]}";
String recordJson = "{\"type\":\"record\", \"name\":\"Test\"," + fields;
Schema schema = new Schema.Parser().parse(recordJson);
GenericData.Record r1 = new GenericData.Record(schema);
r1.put("f", 1);
r1.put("g", 13);
r1.put("h", 41);
GenericData.Record r2 = new GenericData.Record(schema);
r2.put("f", 0);
r2.put("g", 12);
r2.put("h", 41);
check(recordJson, r1, r2);
r2.put("f", 0);
r2.put("g", 13);
r2.put("h", 42);
check(recordJson, r1, r2);
String record2Json = "{\"type\":\"record\", \"name\":\"Test2\"," + fields;
Schema schema2 = new Schema.Parser().parse(record2Json);
GenericData.Record r3 = new GenericData.Record(schema2);
r3.put("f", 1);
r3.put("g", 13);
r3.put("h", 41);
assert (!r1.equals(r3)); // same fields, diff name
}
@Test
void testEnum() throws Exception {
String json = "{\"type\":\"enum\", \"name\":\"Test\",\"symbols\": [\"A\", \"B\"]}";
Schema schema = new Schema.Parser().parse(json);
check(json, new GenericData.EnumSymbol(schema, "A"), new GenericData.EnumSymbol(schema, "B"));
}
@Test
void fixed() throws Exception {
String json = "{\"type\": \"fixed\", \"name\":\"Test\", \"size\": 1}";
Schema schema = new Schema.Parser().parse(json);
check(json, new GenericData.Fixed(schema, new byte[] { (byte) 'a' }),
new GenericData.Fixed(schema, new byte[] { (byte) 'b' }));
}
@Test
void union() throws Exception {
check("[\"string\", \"long\"]", new Utf8("a"), new Utf8("b"), false);
check("[\"string\", \"long\"]", 1L, 2L, false);
check("[\"string\", \"long\"]", new Utf8("a"), 1L, false);
}
@Test
void specificRecord() throws Exception {
TestRecord s1 = new TestRecord();
TestRecord s2 = new TestRecord();
s1.setName("foo");
s1.setKind(Kind.BAZ);
s1.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
s2.setName("bar");
s2.setKind(Kind.BAR);
s2.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 6 }));
Schema schema = SpecificData.get().getSchema(TestRecord.class);
check(schema, s1, s2, true, new SpecificDatumWriter<>(schema), SpecificData.get());
s2.setKind(Kind.BAZ);
check(schema, s1, s2, true, new SpecificDatumWriter<>(schema), SpecificData.get());
}
private static <T> void check(String schemaJson, T o1, T o2) throws Exception {
check(schemaJson, o1, o2, true);
}
private static <T> void check(String schemaJson, T o1, T o2, boolean comparable) throws Exception {
check(new Schema.Parser().parse(schemaJson), o1, o2, comparable, new GenericDatumWriter<>(), GenericData.get());
}
private static <T> void check(Schema schema, T o1, T o2, boolean comparable, DatumWriter<T> writer,
GenericData comparator) throws Exception {
byte[] b1 = render(o1, schema, writer);
byte[] b2 = render(o2, schema, writer);
assertEquals(-1, BinaryData.compare(b1, 0, b2, 0, schema));
assertEquals(1, BinaryData.compare(b2, 0, b1, 0, schema));
assertEquals(0, BinaryData.compare(b1, 0, b1, 0, schema));
assertEquals(0, BinaryData.compare(b2, 0, b2, 0, schema));
assertEquals(-1, compare(o1, o2, schema, comparable, comparator));
assertEquals(1, compare(o2, o1, schema, comparable, comparator));
assertEquals(0, compare(o1, o1, schema, comparable, comparator));
assertEquals(0, compare(o2, o2, schema, comparable, comparator));
assert (o1.equals(o1));
assert (o2.equals(o2));
assert (!o1.equals(o2));
assert (!o2.equals(o1));
assert (!o1.equals(new Object()));
assert (!o2.equals(new Object()));
assert (!o1.equals(null));
assert (!o2.equals(null));
assert (o1.hashCode() != o2.hashCode());
// check BinaryData.hashCode against Object.hashCode
if (schema.getType() != Schema.Type.ENUM) {
assertEquals(o1.hashCode(), BinaryData.hashCode(b1, 0, b1.length, schema));
assertEquals(o2.hashCode(), BinaryData.hashCode(b2, 0, b2.length, schema));
}
// check BinaryData.hashCode against GenericData.hashCode
assertEquals(comparator.hashCode(o1, schema), BinaryData.hashCode(b1, 0, b1.length, schema));
assertEquals(comparator.hashCode(o2, schema), BinaryData.hashCode(b2, 0, b2.length, schema));
}
@SuppressWarnings(value = "unchecked")
private static int compare(Object o1, Object o2, Schema schema, boolean comparable, GenericData comparator) {
return comparable ? ((Comparable<Object>) o1).compareTo(o2) : comparator.compare(o1, o2, schema);
}
private static <T> byte[] render(T datum, Schema schema, DatumWriter<T> writer) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.setSchema(schema);
Encoder enc = new EncoderFactory().directBinaryEncoder(out, null);
writer.write(datum, enc);
enc.flush();
return out.toByteArray();
}
}
| 7,453 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolReflectMeta.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.reflect.ReflectRequestor;
import org.apache.avro.ipc.reflect.ReflectResponder;
import java.net.InetSocketAddress;
import org.junit.jupiter.api.BeforeEach;
public class TestProtocolReflectMeta extends TestProtocolReflect {
@BeforeEach
@Override
public void testStartServer() throws Exception {
if (server != null)
return;
ReflectResponder rresp = new ReflectResponder(Simple.class, new TestImpl());
rresp.addRPCPlugin(new RPCMetaTestPlugin("key1"));
rresp.addRPCPlugin(new RPCMetaTestPlugin("key2"));
server = new SocketServer(rresp, new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
ReflectRequestor requestor = new ReflectRequestor(Simple.class, client);
requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
requestor.addRPCPlugin(new RPCMetaTestPlugin("key2"));
proxy = ReflectRequestor.getClient(Simple.class, requestor);
}
}
| 7,454 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/SimpleException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
/**
* This should be a static nested class in TestProtocolReflect, but that breaks
* CheckStyle (http://jira.codehaus.org/browse/MPCHECKSTYLE-20).
*/
public class SimpleException extends Exception {
SimpleException() {
}
SimpleException(String message) {
super(message);
}
}
| 7,455 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecificMeta.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.net.InetSocketAddress;
import org.apache.avro.ipc.Requestor;
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Server;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.Transceiver;
public class TestProtocolSpecificMeta extends TestProtocolSpecific {
@Override
public Server createServer(Responder testResponder) throws Exception {
responder.addRPCPlugin(new RPCMetaTestPlugin("key1"));
responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
return new SocketServer(responder, new InetSocketAddress(0));
}
@Override
public Transceiver createTransceiver() throws Exception {
return new SocketTransceiver(new InetSocketAddress(server.getPort()));
}
public void addRpcPlugins(Requestor req) {
req.addRPCPlugin(new RPCMetaTestPlugin("key1"));
req.addRPCPlugin(new RPCMetaTestPlugin("key2"));
}
}
| 7,456 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGenericMeta.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.net.InetSocketAddress;
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.junit.jupiter.api.BeforeEach;
public class TestProtocolGenericMeta extends TestProtocolGeneric {
@BeforeEach
@Override
public void testStartServer() throws Exception {
if (server != null)
return;
Responder responder = new TestResponder();
responder.addRPCPlugin(new RPCMetaTestPlugin("key1"));
responder.addRPCPlugin(new RPCMetaTestPlugin("key2"));
server = new SocketServer(responder, new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
requestor = new GenericRequestor(PROTOCOL, client);
requestor.addRPCPlugin(new RPCMetaTestPlugin("key1"));
requestor.addRPCPlugin(new RPCMetaTestPlugin("key2"));
}
}
| 7,457 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestAnnotation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.lang.annotation.ElementType;
import java.lang.annotation.Target;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@Target({ ElementType.TYPE, ElementType.FIELD, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
public @interface TestAnnotation {
}
| 7,458 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestDataFileSpecific.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.File;
import java.io.IOException;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData.Record;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestDataFileSpecific {
@TempDir
public File DIR;
/*
* Test when using SpecificDatumReader<T>() constructor to read from a file with
* a different schema that both reader & writer schemas are found.
*/
@Test
void specificDatumReaderDefaultCtor() throws IOException {
File file = new File(DIR.getPath(), "testSpecificDatumReaderDefaultCtor");
// like the specific Foo, but with another field
Schema s1 = new Schema.Parser()
.parse("{\"type\":\"record\",\"name\":\"Foo\"," + "\"namespace\":\"org.apache.avro\",\"fields\":["
+ "{\"name\":\"label\",\"type\":\"string\"}," + "{\"name\":\"id\",\"type\":\"int\"}]}");
// write a file using generic objects
try (DataFileWriter<Record> writer = new DataFileWriter<>(new GenericDatumWriter<Record>(s1)).create(s1, file)) {
for (int i = 0; i < 10; i++) {
Record r = new Record(s1);
r.put("label", "" + i);
r.put("id", i);
writer.append(r);
}
}
// read using a 'new SpecificDatumReader<T>()' to force inference of
// reader's schema from runtime
try (DataFileReader<Foo> reader = new DataFileReader<>(file, new SpecificDatumReader<>())) {
int i = 0;
for (Foo f : reader) {
assertEquals("" + (i++), f.getLabel());
}
assertEquals(10, i);
}
}
}
| 7,459 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceReflect.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.reflect.ReflectRequestor;
import org.apache.avro.ipc.reflect.ReflectResponder;
import org.apache.avro.test.namespace.TestNamespace;
import java.net.InetSocketAddress;
import org.junit.jupiter.api.BeforeEach;
public class TestNamespaceReflect extends TestNamespaceSpecific {
@BeforeEach
@Override
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SocketServer(new ReflectResponder(TestNamespace.class, new TestImpl()), new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
proxy = ReflectRequestor.getClient(TestNamespace.class, client);
}
}
| 7,460 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/RPCMetaTestPlugin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.avro.ipc.RPCContext;
import org.apache.avro.ipc.RPCPlugin;
/**
* An implementation of an RPC metadata plugin API designed for unit testing.
* This plugin tests handshake and call state by passing a string as metadata,
* slowly building it up at each instrumentation point, testing it as it goes.
* Finally, after the call or handshake is complete, the constructed string is
* tested. It also tests that RPC context data is appropriately filled in along
* the way by Requestor and Responder classes.
*/
public final class RPCMetaTestPlugin extends RPCPlugin {
protected final String key;
public RPCMetaTestPlugin(String keyname) {
key = keyname;
}
@Override
public void clientStartConnect(RPCContext context) {
ByteBuffer buf = ByteBuffer.wrap("ap".getBytes(StandardCharsets.UTF_8));
context.requestHandshakeMeta().put(key, buf);
}
@Override
public void serverConnecting(RPCContext context) {
assertNotNull(context.requestHandshakeMeta());
assertNotNull(context.responseHandshakeMeta());
assertNull(context.getRequestPayload());
assertNull(context.getResponsePayload());
if (!context.requestHandshakeMeta().containsKey(key))
return;
ByteBuffer buf = context.requestHandshakeMeta().get(key);
assertNotNull(buf);
assertNotNull(buf.array());
String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
assertNotNull(partialstr);
assertEquals("ap", partialstr, "partial string mismatch");
buf = ByteBuffer.wrap((partialstr + "ac").getBytes(StandardCharsets.UTF_8));
assertTrue(buf.remaining() > 0);
context.responseHandshakeMeta().put(key, buf);
}
@Override
public void clientFinishConnect(RPCContext context) {
Map<String, ByteBuffer> handshakeMeta = context.responseHandshakeMeta();
assertNull(context.getRequestPayload());
assertNull(context.getResponsePayload());
assertNotNull(handshakeMeta);
if (!handshakeMeta.containsKey(key))
return;
ByteBuffer buf = handshakeMeta.get(key);
assertNotNull(buf);
assertNotNull(buf.array());
String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
assertNotNull(partialstr);
assertEquals("apac", partialstr, "partial string mismatch");
buf = ByteBuffer.wrap((partialstr + "he").getBytes(StandardCharsets.UTF_8));
assertTrue(buf.remaining() > 0);
handshakeMeta.put(key, buf);
checkRPCMetaMap(handshakeMeta);
}
@Override
public void clientSendRequest(RPCContext context) {
ByteBuffer buf = ByteBuffer.wrap("ap".getBytes(StandardCharsets.UTF_8));
context.requestCallMeta().put(key, buf);
assertNotNull(context.getMessage());
assertNotNull(context.getRequestPayload());
assertNull(context.getResponsePayload());
}
@Override
public void serverReceiveRequest(RPCContext context) {
Map<String, ByteBuffer> meta = context.requestCallMeta();
assertNotNull(meta);
assertNotNull(context.getMessage());
assertNull(context.getResponsePayload());
if (!meta.containsKey(key))
return;
ByteBuffer buf = meta.get(key);
assertNotNull(buf);
assertNotNull(buf.array());
String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
assertNotNull(partialstr);
assertEquals("ap", partialstr, "partial string mismatch");
buf = ByteBuffer.wrap((partialstr + "a").getBytes(StandardCharsets.UTF_8));
assertTrue(buf.remaining() > 0);
meta.put(key, buf);
}
@Override
public void serverSendResponse(RPCContext context) {
assertNotNull(context.requestCallMeta());
assertNotNull(context.responseCallMeta());
assertNotNull(context.getResponsePayload());
if (!context.requestCallMeta().containsKey(key))
return;
ByteBuffer buf = context.requestCallMeta().get(key);
assertNotNull(buf);
assertNotNull(buf.array());
String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
assertNotNull(partialstr);
assertEquals("apa", partialstr, "partial string mismatch");
buf = ByteBuffer.wrap((partialstr + "c").getBytes(StandardCharsets.UTF_8));
assertTrue(buf.remaining() > 0);
context.responseCallMeta().put(key, buf);
}
@Override
public void clientReceiveResponse(RPCContext context) {
assertNotNull(context.responseCallMeta());
assertNotNull(context.getRequestPayload());
if (!context.responseCallMeta().containsKey(key))
return;
ByteBuffer buf = context.responseCallMeta().get(key);
assertNotNull(buf);
assertNotNull(buf.array());
String partialstr = new String(buf.array(), StandardCharsets.UTF_8);
assertNotNull(partialstr);
assertEquals("apac", partialstr, "partial string mismatch");
buf = ByteBuffer.wrap((partialstr + "he").getBytes(StandardCharsets.UTF_8));
assertTrue(buf.remaining() > 0);
context.responseCallMeta().put(key, buf);
checkRPCMetaMap(context.responseCallMeta());
}
protected void checkRPCMetaMap(Map<String, ByteBuffer> rpcMeta) {
assertNotNull(rpcMeta);
assertTrue(rpcMeta.containsKey(key), "key not present in map");
ByteBuffer keybuf = rpcMeta.get(key);
assertNotNull(keybuf);
assertTrue(keybuf.remaining() > 0, "key BB had nothing remaining");
String str = new String(keybuf.array(), StandardCharsets.UTF_8);
assertEquals("apache", str);
}
}
| 7,461 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolDatagram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import java.net.InetSocketAddress;
import java.util.Random;
import org.apache.avro.ipc.DatagramServer;
import org.apache.avro.ipc.DatagramTransceiver;
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Server;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.avro.test.Simple;
public class TestProtocolDatagram extends TestProtocolSpecific {
@Override
public Server createServer(Responder testResponder) throws Exception {
return new DatagramServer(new SpecificResponder(Simple.class, new TestImpl()),
new InetSocketAddress("localhost", new Random().nextInt(10000) + 10000));
}
@Override
public Transceiver createTransceiver() throws Exception {
return new DatagramTransceiver(new InetSocketAddress("localhost", server.getPort()));
}
@Override
protected int getExpectedHandshakeCount() {
return 0;
}
}
| 7,462 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestNamespaceSpecific.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.avro.test.namespace.TestNamespace;
import org.apache.avro.test.util.MD5;
import org.apache.avro.test.errors.TestError;
import org.apache.avro.test.namespace.TestRecord;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import java.io.IOException;
import java.net.InetSocketAddress;
public class TestNamespaceSpecific {
public static class TestImpl implements TestNamespace {
public TestRecord echo(TestRecord record) {
return record;
}
public void error() throws TestError {
throw TestError.newBuilder().setMessage$("an error").build();
}
}
protected static SocketServer server;
protected static Transceiver client;
protected static TestNamespace proxy;
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SocketServer(new SpecificResponder(TestNamespace.class, new TestImpl()), new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
proxy = SpecificRequestor.getClient(TestNamespace.class, client);
}
@Test
void echo() throws IOException {
TestRecord record = new TestRecord();
record.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
TestRecord echoed = proxy.echo(record);
assertEquals(record, echoed);
assertEquals(record.hashCode(), echoed.hashCode());
}
@Test
void error() throws IOException {
TestError error = null;
try {
proxy.error();
} catch (TestError e) {
error = e;
}
assertNotNull(error);
assertEquals("an error", error.getMessage$());
}
@AfterAll
public static void testStopServer() throws IOException {
client.close();
server.close();
}
}
| 7,463 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolSpecific.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.LineNumberReader;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import java.util.Random;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.ipc.HttpTransceiver;
import org.apache.avro.ipc.RPCContext;
import org.apache.avro.ipc.RPCPlugin;
import org.apache.avro.ipc.Requestor;
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Server;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.avro.specific.SpecificData;
import org.apache.avro.test.Kind;
import org.apache.avro.test.MD5;
import org.apache.avro.test.Simple;
import org.apache.avro.test.TestError;
import org.apache.avro.test.TestRecord;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class TestProtocolSpecific {
protected static final int REPEATING = -1;
public static int ackCount;
private static boolean throwUndeclaredError;
public static class TestImpl implements Simple {
public String hello(String greeting) {
return "goodbye";
}
public int add(int arg1, int arg2) {
return arg1 + arg2;
}
public TestRecord echo(TestRecord record) {
return record;
}
public ByteBuffer echoBytes(ByteBuffer data) {
return data;
}
public void error() throws TestError {
if (throwUndeclaredError)
throw new RuntimeException("foo");
throw TestError.newBuilder().setMessage$("an error").build();
}
public void ack() {
ackCount++;
}
}
protected static Server server;
protected static Transceiver client;
protected static Simple proxy;
protected static SpecificResponder responder;
protected static HandshakeMonitor monitor;
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
responder = new SpecificResponder(Simple.class, new TestImpl());
server = createServer(responder);
server.start();
client = createTransceiver();
SpecificRequestor req = new SpecificRequestor(Simple.class, client);
addRpcPlugins(req);
proxy = SpecificRequestor.getClient(Simple.class, req);
monitor = new HandshakeMonitor();
responder.addRPCPlugin(monitor);
}
public void addRpcPlugins(Requestor requestor) {
}
public Server createServer(Responder testResponder) throws Exception {
return server = new SocketServer(testResponder, new InetSocketAddress(0));
}
public Transceiver createTransceiver() throws Exception {
return new SocketTransceiver(new InetSocketAddress(server.getPort()));
}
@Test
void classLoader() throws Exception {
ClassLoader loader = new ClassLoader() {
};
SpecificResponder responder = new SpecificResponder(Simple.class, new TestImpl(), new SpecificData(loader));
assertEquals(responder.getSpecificData().getClassLoader(), loader);
SpecificRequestor requestor = new SpecificRequestor(Simple.class, client, new SpecificData(loader));
assertEquals(requestor.getSpecificData().getClassLoader(), loader);
}
@Test
void getRemote() throws IOException {
assertEquals(Simple.PROTOCOL, SpecificRequestor.getRemote(proxy));
}
@Test
void hello() throws IOException {
String response = proxy.hello("bob");
assertEquals("goodbye", response);
}
@Test
void testHashCode() throws IOException {
TestError error = new TestError();
error.hashCode();
}
@Test
void echo() throws IOException {
TestRecord record = new TestRecord();
record.setName("foo");
record.setKind(Kind.BAR);
record.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
TestRecord echoed = proxy.echo(record);
assertEquals(record, echoed);
assertEquals(record.hashCode(), echoed.hashCode());
}
@Test
void add() throws IOException {
int result = proxy.add(1, 2);
assertEquals(3, result);
}
@Test
void echoBytes() throws IOException {
Random random = new Random();
int length = random.nextInt(1024 * 16);
ByteBuffer data = ByteBuffer.allocate(length);
random.nextBytes(data.array());
data.flip();
ByteBuffer echoed = proxy.echoBytes(data);
assertEquals(data, echoed);
}
@Test
void emptyEchoBytes() throws IOException {
ByteBuffer data = ByteBuffer.allocate(0);
ByteBuffer echoed = proxy.echoBytes(data);
data.flip();
assertEquals(data, echoed);
}
@Test
void error() throws IOException {
TestError error = null;
try {
proxy.error();
} catch (TestError e) {
error = e;
}
assertNotNull(error);
assertEquals("an error", error.getMessage$());
}
@Test
void undeclaredError() throws Exception {
this.throwUndeclaredError = true;
RuntimeException error = null;
try {
proxy.error();
} catch (RuntimeException e) {
error = e;
} finally {
this.throwUndeclaredError = false;
}
assertNotNull(error);
assertTrue(error.toString().contains("foo"));
}
@Test
void oneWay() throws IOException {
ackCount = 0;
proxy.ack();
proxy.hello("foo"); // intermix normal req
proxy.ack();
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
assertEquals(2, ackCount);
}
@Test
void repeatedAccess() throws Exception {
for (int x = 0; x < 1000; x++) {
proxy.hello("hi!");
}
}
@Test
void connectionRefusedOneWay() throws IOException {
assertThrows(Exception.class, () -> {
Transceiver client = new HttpTransceiver(new URL("http://localhost:4444"));
SpecificRequestor req = new SpecificRequestor(Simple.class, client);
addRpcPlugins(req);
Simple proxy = SpecificRequestor.getClient(Simple.class, req);
proxy.ack();
});
}
/**
* Construct and use a protocol whose "hello" method has an extra argument to
* check that schema is sent to parse request.
*/
@Test
void paramVariation() throws Exception {
Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
List<Schema.Field> fields = new ArrayList<>();
fields.add(new Schema.Field("extra", Schema.create(Schema.Type.BOOLEAN), null, null));
fields.add(new Schema.Field("greeting", Schema.create(Schema.Type.STRING), null, null));
Protocol.Message message = protocol.createMessage("hello", null /* doc */, new LinkedHashMap<String, String>(),
Schema.createRecord(fields), Schema.create(Schema.Type.STRING), Schema.createUnion(new ArrayList<>()));
protocol.getMessages().put("hello", message);
try (Transceiver t = createTransceiver()) {
GenericRequestor r = new GenericRequestor(protocol, t);
addRpcPlugins(r);
GenericRecord params = new GenericData.Record(message.getRequest());
params.put("extra", Boolean.TRUE);
params.put("greeting", "bob");
String response = r.request("hello", params).toString();
assertEquals("goodbye", response);
}
}
@AfterAll
public static void testHandshakeCount() throws IOException {
monitor.assertHandshake();
}
@AfterAll
public static void testStopServer() throws IOException {
client.close();
server.close();
server = null;
}
public class HandshakeMonitor extends RPCPlugin {
private int handshakes;
private HashSet<String> seenProtocols = new HashSet<>();
@Override
public void serverConnecting(RPCContext context) {
handshakes++;
int expected = getExpectedHandshakeCount();
if (expected > 0 && handshakes > expected) {
throw new IllegalStateException(
"Expected number of Protocol negotiation handshakes exceeded expected " + expected + " was " + handshakes);
}
// check that a given client protocol is only sent once
String clientProtocol = context.getHandshakeRequest().getClientProtocol();
if (clientProtocol != null) {
assertFalse(seenProtocols.contains(clientProtocol));
seenProtocols.add(clientProtocol);
}
}
public void assertHandshake() {
int expected = getExpectedHandshakeCount();
if (expected != REPEATING) {
assertEquals(expected, handshakes, "Expected number of handshakes did not take place.");
}
}
}
protected int getExpectedHandshakeCount() {
return 3;
}
public static class InteropTest {
private static File SERVER_PORTS_DIR;
static {
try {
SERVER_PORTS_DIR = Files.createTempDirectory(TestProtocolSpecific.class.getSimpleName()).toFile();
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
void client() throws Exception {
for (File f : Objects.requireNonNull(SERVER_PORTS_DIR.listFiles())) {
try (LineNumberReader reader = new LineNumberReader(new FileReader(f))) {
int port = Integer.parseInt(reader.readLine());
System.out.println("Validating java client to " + f.getName() + " - " + port);
Transceiver client = new SocketTransceiver(new InetSocketAddress("localhost", port));
proxy = SpecificRequestor.getClient(Simple.class, client);
TestProtocolSpecific proto = new TestProtocolSpecific();
proto.hello();
proto.echo();
proto.echoBytes();
proto.error();
System.out.println("Done! Validation java client to " + f.getName() + " - " + port);
}
}
}
/**
* Starts the RPC server.
*/
public static void main(String[] args) throws Exception {
SocketServer server = new SocketServer(new SpecificResponder(Simple.class, new TestImpl()),
new InetSocketAddress(0));
server.start();
File portFile = new File(SERVER_PORTS_DIR, "java-port");
try (FileWriter w = new FileWriter(portFile)) {
w.write(Integer.toString(server.getPort()));
}
}
}
}
| 7,464 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/TestProtocolGeneric.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import org.apache.avro.Protocol.Message;
import org.apache.avro.Schema.Field;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.ipc.SocketServer;
import org.apache.avro.ipc.SocketTransceiver;
import org.apache.avro.ipc.Transceiver;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.apache.avro.ipc.generic.GenericResponder;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Random;
public class TestProtocolGeneric {
private static final Logger LOG = LoggerFactory.getLogger(TestProtocolGeneric.class);
protected static final File FILE = new File("../../../share/test/schemas/simple.avpr");
protected static final Protocol PROTOCOL;
static {
try {
PROTOCOL = Protocol.parse(FILE);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static boolean throwUndeclaredError;
protected static class TestResponder extends GenericResponder {
public TestResponder() {
super(PROTOCOL);
}
public Object respond(Message message, Object request) throws AvroRemoteException {
GenericRecord params = (GenericRecord) request;
if ("hello".equals(message.getName())) {
LOG.info("hello: " + params.get("greeting"));
return new Utf8("goodbye");
}
if ("echo".equals(message.getName())) {
Object record = params.get("record");
LOG.info("echo: " + record);
return record;
}
if ("echoBytes".equals(message.getName())) {
Object data = params.get("data");
LOG.info("echoBytes: " + data);
return data;
}
if ("error".equals(message.getName())) {
if (throwUndeclaredError)
throw new RuntimeException("foo");
GenericRecord error = new GenericData.Record(PROTOCOL.getType("TestError"));
error.put("message", new Utf8("an error"));
throw new AvroRemoteException(error);
}
throw new AvroRuntimeException("unexpected message: " + message.getName());
}
}
protected static SocketServer server;
protected static Transceiver client;
protected static GenericRequestor requestor;
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SocketServer(new TestResponder(), new InetSocketAddress(0));
server.start();
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
requestor = new GenericRequestor(PROTOCOL, client);
}
@Test
void hello() throws Exception {
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
params.put("greeting", new Utf8("bob"));
Utf8 response = (Utf8) requestor.request("hello", params);
assertEquals(new Utf8("goodbye"), response);
}
@Test
void echo() throws Exception {
GenericRecord record = new GenericData.Record(PROTOCOL.getType("TestRecord"));
record.put("name", new Utf8("foo"));
record.put("kind", new GenericData.EnumSymbol(PROTOCOL.getType("Kind"), "BAR"));
record.put("hash",
new GenericData.Fixed(PROTOCOL.getType("MD5"), new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("echo").getRequest());
params.put("record", record);
Object echoed = requestor.request("echo", params);
assertEquals(record, echoed);
}
@Test
void echoBytes() throws Exception {
Random random = new Random();
int length = random.nextInt(1024 * 16);
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("echoBytes").getRequest());
ByteBuffer data = ByteBuffer.allocate(length);
random.nextBytes(data.array());
data.flip();
params.put("data", data);
Object echoed = requestor.request("echoBytes", params);
assertEquals(data, echoed);
}
@Test
void error() throws Exception {
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("error").getRequest());
AvroRemoteException error = null;
try {
requestor.request("error", params);
} catch (AvroRemoteException e) {
error = e;
}
assertNotNull(error);
assertEquals("an error", ((GenericRecord) error.getValue()).get("message").toString());
}
@Test
void undeclaredError() throws Exception {
this.throwUndeclaredError = true;
RuntimeException error = null;
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("error").getRequest());
try {
requestor.request("error", params);
} catch (RuntimeException e) {
error = e;
} finally {
this.throwUndeclaredError = false;
}
assertNotNull(error);
assertTrue(error.toString().contains("foo"));
}
/**
* Construct and use a different protocol whose "hello" method has an extra
* argument to check that schema is sent to parse request.
*/
@Test
public void handshake() throws Exception {
Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
List<Field> fields = new ArrayList<>();
fields.add(new Schema.Field("extra", Schema.create(Schema.Type.BOOLEAN), null, null));
fields.add(new Schema.Field("greeting", Schema.create(Schema.Type.STRING), null, null));
Protocol.Message message = protocol.createMessage("hello", null /* doc */, new LinkedHashMap<String, String>(),
Schema.createRecord(fields), Schema.create(Schema.Type.STRING), Schema.createUnion(new ArrayList<>()));
protocol.getMessages().put("hello", message);
try (Transceiver t = new SocketTransceiver(new InetSocketAddress(server.getPort()))) {
GenericRequestor r = new GenericRequestor(protocol, t);
GenericRecord params = new GenericData.Record(message.getRequest());
params.put("extra", Boolean.TRUE);
params.put("greeting", new Utf8("bob"));
Utf8 response = (Utf8) r.request("hello", params);
assertEquals(new Utf8("goodbye"), response);
}
}
/**
* Construct and use a different protocol whose "echo" response has an extra
* field to check that correct schema is used to parse response.
*/
@Test
public void responseChange() throws Exception {
List<Field> fields = new ArrayList<>();
for (Field f : PROTOCOL.getType("TestRecord").getFields())
fields.add(new Field(f.name(), f.schema(), null, null));
fields.add(new Field("extra", Schema.create(Schema.Type.BOOLEAN), null, true));
Schema record = Schema.createRecord("TestRecord", null, "org.apache.avro.test", false);
record.setFields(fields);
Protocol protocol = new Protocol("Simple", "org.apache.avro.test");
List<Field> params = new ArrayList<>();
params.add(new Field("record", record, null, null));
Protocol.Message message = protocol.createMessage("echo", null, new LinkedHashMap<String, String>(),
Schema.createRecord(params), record, Schema.createUnion(new ArrayList<>()));
protocol.getMessages().put("echo", message);
try (Transceiver t = new SocketTransceiver(new InetSocketAddress(server.getPort()))) {
GenericRequestor r = new GenericRequestor(protocol, t);
GenericRecord args = new GenericData.Record(message.getRequest());
GenericRecord rec = new GenericData.Record(record);
rec.put("name", new Utf8("foo"));
rec.put("kind", new GenericData.EnumSymbol(PROTOCOL.getType("Kind"), "BAR"));
rec.put("hash", new GenericData.Fixed(PROTOCOL.getType("MD5"),
new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
rec.put("extra", Boolean.TRUE);
args.put("record", rec);
GenericRecord response = (GenericRecord) r.request("echo", args);
assertEquals(rec, response);
}
}
@AfterAll
public static void testStopServer() throws Exception {
client.close();
server.close();
}
}
| 7,465 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/DataFileInteropTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.io.DatumReader;
import org.apache.avro.specific.SpecificDatumReader;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class DataFileInteropTest {
private static final File DATAFILE_DIR = new File(System.getProperty("test.dir", "/tmp"));
@BeforeAll
public static void printDir() {
System.out.println("Reading data files from directory: " + DATAFILE_DIR.getAbsolutePath());
}
@Test
void generatedGeneric() throws IOException {
System.out.println("Reading with generic:");
DatumReaderProvider<Object> provider = GenericDatumReader::new;
readFiles(provider);
}
@Test
void generatedSpecific() throws IOException {
System.out.println("Reading with specific:");
DatumReaderProvider<Interop> provider = SpecificDatumReader::new;
readFiles(provider);
}
// Can't use same Interop.java as specific for reflect.
// This used to be the case because one used Utf8 and the other String, but
// we use CharSequence now.
// The current incompatibility is now that one uses byte[] and the other
// ByteBuffer
// We could
// fix this by defining a reflect-specific version of Interop.java, but we'd
// need to put it on a different classpath than the specific one.
// I think changing Specific to generate more flexible code would help too --
// it could convert ByteBuffer to byte[] or vice/versa.
// Additionally, some complication arises because of IndexedRecord's simplicity
// @Test
// public void testGeneratedReflect() throws IOException {
// DatumReaderProvider<Interop> provider = new DatumReaderProvider<Interop>() {
// @Override public DatumReader<Interop> get() {
// return new ReflectDatumReader<Interop>(Interop.class);
// }
// };
// readFiles(provider);
// }
private <T extends Object> void readFiles(DatumReaderProvider<T> provider) throws IOException {
for (File f : Objects.requireNonNull(DATAFILE_DIR.listFiles())) {
System.out.println("Reading: " + f.getName());
try (DataFileReader<? extends Object> reader = (DataFileReader<? extends Object>) DataFileReader.openReader(f,
provider.get())) {
byte[] user_metadata = reader.getMeta("user_metadata");
if (user_metadata != null) {
assertArrayEquals("someByteArray".getBytes(StandardCharsets.UTF_8), user_metadata);
}
int i = 0;
for (Object datum : reader) {
i++;
assertNotNull(datum);
}
assertNotEquals(0, i);
}
}
}
interface DatumReaderProvider<T extends Object> {
DatumReader<T> get();
}
}
| 7,466 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import org.apache.avro.FooBarSpecificRecord;
import org.apache.avro.FooBarSpecificRecord.Builder;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.Test;
import test.StringablesRecord;
public class TestSpecificDatumReader {
public static byte[] serializeRecord(FooBarSpecificRecord fooBarSpecificRecord) throws IOException {
SpecificDatumWriter<FooBarSpecificRecord> datumWriter = new SpecificDatumWriter<>(FooBarSpecificRecord.SCHEMA$);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(fooBarSpecificRecord, encoder);
encoder.flush();
return byteArrayOutputStream.toByteArray();
}
public static byte[] serializeRecord(StringablesRecord stringablesRecord) throws IOException {
SpecificDatumWriter<StringablesRecord> datumWriter = new SpecificDatumWriter<>(StringablesRecord.SCHEMA$);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
datumWriter.write(stringablesRecord, encoder);
encoder.flush();
return byteArrayOutputStream.toByteArray();
}
@Test
void read() throws IOException {
Builder newBuilder = FooBarSpecificRecord.newBuilder();
newBuilder.setId(42);
newBuilder.setName("foo");
newBuilder.setNicknames(Collections.singletonList("bar"));
newBuilder.setRelatedids(Arrays.asList(1, 2, 3));
FooBarSpecificRecord specificRecord = newBuilder.build();
byte[] recordBytes = serializeRecord(specificRecord);
Decoder decoder = DecoderFactory.get().binaryDecoder(recordBytes, null);
SpecificDatumReader<FooBarSpecificRecord> specificDatumReader = new SpecificDatumReader<>(
FooBarSpecificRecord.SCHEMA$);
FooBarSpecificRecord deserialized = new FooBarSpecificRecord();
specificDatumReader.read(deserialized, decoder);
assertEquals(specificRecord, deserialized);
}
@Test
void stringables() throws IOException {
StringablesRecord.Builder newBuilder = StringablesRecord.newBuilder();
newBuilder.setValue(new BigDecimal("42.11"));
HashMap<String, BigDecimal> mapWithBigDecimalElements = new HashMap<>();
mapWithBigDecimalElements.put("test", new BigDecimal("11.11"));
newBuilder.setMapWithBigDecimalElements(mapWithBigDecimalElements);
HashMap<BigInteger, String> mapWithBigIntKeys = new HashMap<>();
mapWithBigIntKeys.put(BigInteger.ONE, "test");
newBuilder.setMapWithBigIntKeys(mapWithBigIntKeys);
StringablesRecord stringablesRecord = newBuilder.build();
byte[] recordBytes = serializeRecord(stringablesRecord);
Decoder decoder = DecoderFactory.get().binaryDecoder(recordBytes, null);
SpecificDatumReader<StringablesRecord> specificDatumReader = new SpecificDatumReader<>(StringablesRecord.SCHEMA$);
StringablesRecord deserialized = new StringablesRecord();
specificDatumReader.read(deserialized, decoder);
assertEquals(stringablesRecord, deserialized);
}
}
| 7,467 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificBuilderTree.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import static org.junit.jupiter.api.Assertions.*;
import java.util.ArrayList;
import java.util.Optional;
import org.apache.avro.AvroMissingFieldException;
import org.apache.avro.test.http.HttpMethod;
import org.apache.avro.test.http.HttpRequest;
import org.apache.avro.test.http.HttpURI;
import org.apache.avro.test.http.NetworkType;
import org.apache.avro.test.http.QueryParameter;
import org.apache.avro.test.http.Request;
import org.apache.avro.test.http.UserAgent;
import org.apache.avro.test.nullable.RecordWithNullables;
import org.junit.jupiter.api.Test;
public class TestSpecificBuilderTree {
private Request.Builder createPartialBuilder() {
Request.Builder requestBuilder = Request.newBuilder();
requestBuilder.setTimestamp(1234567890);
requestBuilder.getConnectionBuilder().setNetworkType(NetworkType.IPv4);
requestBuilder.getHttpRequestBuilder().getUserAgentBuilder().setUseragent("Chrome 123").setId("Foo");
requestBuilder.getHttpRequestBuilder().getURIBuilder().setMethod(HttpMethod.GET).setPath("/index.html");
if (!requestBuilder.getHttpRequestBuilder().getURIBuilder().hasParameters()) {
requestBuilder.getHttpRequestBuilder().getURIBuilder().setParameters(new ArrayList<>());
}
requestBuilder.getHttpRequestBuilder().getURIBuilder().getParameters()
.add(QueryParameter.newBuilder().setName("Foo").setValue("Bar").build());
return requestBuilder;
}
@Test
void failOnIncompleteTree() {
assertThrows(AvroMissingFieldException.class, () -> {
try {
createPartialBuilder().build();
} catch (AvroMissingFieldException amfe) {
assertEquals("Field networkAddress type:STRING pos:1 not set and has no default value", amfe.getMessage());
assertEquals("Path in schema: --> connection --> networkAddress", amfe.toString());
throw amfe;
}
fail("Should NEVER get here");
});
}
@Test
void copyBuilder() {
Request.Builder requestBuilder1 = createPartialBuilder();
Request.Builder requestBuilder2 = Request.newBuilder(requestBuilder1);
requestBuilder1.getConnectionBuilder().setNetworkAddress("1.1.1.1");
requestBuilder2.getConnectionBuilder().setNetworkAddress("2.2.2.2");
requestBuilder2.getHttpRequestBuilder().getUserAgentBuilder().setId("Bar");
Request request1 = requestBuilder1.build();
Request request2 = requestBuilder2.build();
assertEquals(NetworkType.IPv4, request1.getConnection().getNetworkType());
assertEquals("1.1.1.1", request1.getConnection().getNetworkAddress());
assertEquals("Chrome 123", request1.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Foo", request1.getHttpRequest().getUserAgent().getId());
assertEquals(HttpMethod.GET, request1.getHttpRequest().getURI().getMethod());
assertEquals("/index.html", request1.getHttpRequest().getURI().getPath());
assertEquals(1, request1.getHttpRequest().getURI().getParameters().size());
assertEquals("Foo", request1.getHttpRequest().getURI().getParameters().get(0).getName());
assertEquals("Bar", request1.getHttpRequest().getURI().getParameters().get(0).getValue());
assertEquals(NetworkType.IPv4, request2.getConnection().getNetworkType());
assertEquals("2.2.2.2", request2.getConnection().getNetworkAddress());
assertEquals("Chrome 123", request2.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Bar", request2.getHttpRequest().getUserAgent().getId());
assertEquals(HttpMethod.GET, request2.getHttpRequest().getURI().getMethod());
assertEquals("/index.html", request2.getHttpRequest().getURI().getPath());
assertEquals(1, request2.getHttpRequest().getURI().getParameters().size());
assertEquals("Foo", request2.getHttpRequest().getURI().getParameters().get(0).getName());
assertEquals("Bar", request2.getHttpRequest().getURI().getParameters().get(0).getValue());
}
@Test
void createBuilderFromInstance() {
Request.Builder requestBuilder1 = createPartialBuilder();
requestBuilder1.getConnectionBuilder().setNetworkAddress("1.1.1.1");
Request request1 = requestBuilder1.build();
Request.Builder requestBuilder2 = Request.newBuilder(request1);
requestBuilder2.getConnectionBuilder().setNetworkAddress("2.2.2.2");
requestBuilder2.getHttpRequestBuilder().getUserAgentBuilder().setId("Bar");
requestBuilder2.getHttpRequestBuilder().getURIBuilder().setMethod(HttpMethod.POST);
requestBuilder2.getHttpRequestBuilder().getUserAgentBuilder().setUseragent("Firefox 456");
Request request2 = requestBuilder2.build();
assertEquals(NetworkType.IPv4, request1.getConnection().getNetworkType());
assertEquals("1.1.1.1", request1.getConnection().getNetworkAddress());
assertEquals("Chrome 123", request1.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Foo", request1.getHttpRequest().getUserAgent().getId());
assertEquals(HttpMethod.GET, request1.getHttpRequest().getURI().getMethod());
assertEquals("/index.html", request1.getHttpRequest().getURI().getPath());
assertEquals(1, request1.getHttpRequest().getURI().getParameters().size());
assertEquals("Foo", request1.getHttpRequest().getURI().getParameters().get(0).getName());
assertEquals("Bar", request1.getHttpRequest().getURI().getParameters().get(0).getValue());
assertEquals(NetworkType.IPv4, request2.getConnection().getNetworkType());
assertEquals("2.2.2.2", request2.getConnection().getNetworkAddress());
assertEquals("Firefox 456", request2.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Bar", request2.getHttpRequest().getUserAgent().getId());
assertEquals(HttpMethod.POST, request2.getHttpRequest().getURI().getMethod());
assertEquals("/index.html", request2.getHttpRequest().getURI().getPath());
assertEquals(1, request2.getHttpRequest().getURI().getParameters().size());
assertEquals("Foo", request2.getHttpRequest().getURI().getParameters().get(0).getName());
assertEquals("Bar", request2.getHttpRequest().getURI().getParameters().get(0).getValue());
}
private Request.Builder createLastOneTestsBuilder() {
Request.Builder requestBuilder = Request.newBuilder();
requestBuilder.setTimestamp(1234567890);
requestBuilder.getConnectionBuilder().setNetworkType(NetworkType.IPv4).setNetworkAddress("1.1.1.1");
return requestBuilder;
}
@Test
void lastOneWins_Setter() {
Request.Builder requestBuilder = createLastOneTestsBuilder();
requestBuilder.getHttpRequestBuilder().getURIBuilder().setMethod(HttpMethod.GET).setPath("/index.html");
requestBuilder.getHttpRequestBuilder().getUserAgentBuilder().setUseragent("Chrome 123").setId("Foo");
HttpRequest httpRequest = HttpRequest.newBuilder().setUserAgent(new UserAgent("Bar", "Firefox 321"))
.setURI(HttpURI.newBuilder().setMethod(HttpMethod.POST).setPath("/login.php").build()).build();
Request request = requestBuilder.setHttpRequest(httpRequest).build();
assertEquals(NetworkType.IPv4, request.getConnection().getNetworkType());
assertEquals("1.1.1.1", request.getConnection().getNetworkAddress());
assertEquals(0, request.getHttpRequest().getURI().getParameters().size());
assertEquals("Firefox 321", request.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Bar", request.getHttpRequest().getUserAgent().getId());
assertEquals(HttpMethod.POST, request.getHttpRequest().getURI().getMethod());
assertEquals("/login.php", request.getHttpRequest().getURI().getPath());
}
@Test
void lastOneWins_Builder() {
Request.Builder requestBuilder = createLastOneTestsBuilder();
HttpRequest httpRequest = HttpRequest.newBuilder().setUserAgent(new UserAgent("Bar", "Firefox 321"))
.setURI(HttpURI.newBuilder().setMethod(HttpMethod.POST).setPath("/login.php").build()).build();
requestBuilder.setHttpRequest(httpRequest);
requestBuilder.getHttpRequestBuilder().getURIBuilder().setMethod(HttpMethod.GET).setPath("/index.html");
requestBuilder.getHttpRequestBuilder().getUserAgentBuilder().setUseragent("Chrome 123").setId("Foo");
Request request = requestBuilder.build();
assertEquals(NetworkType.IPv4, request.getConnection().getNetworkType());
assertEquals("1.1.1.1", request.getConnection().getNetworkAddress());
assertEquals("Chrome 123", request.getHttpRequest().getUserAgent().getUseragent());
assertEquals("Foo", request.getHttpRequest().getUserAgent().getId());
assertEquals(0, request.getHttpRequest().getURI().getParameters().size());
assertEquals(HttpMethod.GET, request.getHttpRequest().getURI().getMethod());
assertEquals("/index.html", request.getHttpRequest().getURI().getPath());
}
@Test
void copyBuilderWithNullables() {
RecordWithNullables.Builder builder = RecordWithNullables.newBuilder();
assertFalse(builder.hasNullableRecordBuilder());
assertFalse(builder.hasNullableRecord());
assertFalse(builder.hasNullableString());
assertFalse(builder.hasNullableLong());
assertFalse(builder.hasNullableInt());
assertFalse(builder.hasNullableMap());
assertFalse(builder.hasNullableArray());
RecordWithNullables.Builder builderCopy = RecordWithNullables.newBuilder(builder);
assertFalse(builderCopy.hasNullableRecordBuilder());
assertFalse(builderCopy.hasNullableRecord());
assertFalse(builderCopy.hasNullableString());
assertFalse(builderCopy.hasNullableLong());
assertFalse(builderCopy.hasNullableInt());
assertFalse(builderCopy.hasNullableMap());
assertFalse(builderCopy.hasNullableArray());
builderCopy.getNullableRecordBuilder();
}
@Test
void copyBuilderWithNullablesAndSetToNull() {
// Create builder with all values default to null, yet unset.
RecordWithNullables.Builder builder = RecordWithNullables.newBuilder();
// Ensure all values have not been set
assertFalse(builder.hasNullableRecordBuilder());
assertFalse(builder.hasNullableRecord());
assertFalse(builder.hasNullableString());
assertFalse(builder.hasNullableLong());
assertFalse(builder.hasNullableInt());
assertFalse(builder.hasNullableMap());
assertFalse(builder.hasNullableArray());
// Set all values to null
builder.setNullableRecordBuilder(null);
builder.setNullableRecord(null);
builder.setNullableString(null);
builder.setNullableLong(null);
builder.setNullableInt(null);
builder.setNullableMap(null);
builder.setNullableArray(null);
// A Builder remains False because it is null
assertFalse(builder.hasNullableRecordBuilder());
// Ensure all values have been set
assertTrue(builder.hasNullableRecord());
assertTrue(builder.hasNullableString());
assertTrue(builder.hasNullableLong());
assertTrue(builder.hasNullableInt());
assertTrue(builder.hasNullableMap());
assertTrue(builder.hasNullableArray());
// Implicitly create a builder instance and clear the actual value.
builder.getNullableRecordBuilder();
assertTrue(builder.hasNullableRecordBuilder());
assertFalse(builder.hasNullableRecord());
// Create a copy of this builder.
RecordWithNullables.Builder builderCopy = RecordWithNullables.newBuilder(builder);
// Ensure all values are still the same
assertTrue(builder.hasNullableRecordBuilder());
assertFalse(builder.hasNullableRecord());
assertTrue(builder.hasNullableString());
assertTrue(builder.hasNullableLong());
assertTrue(builder.hasNullableInt());
assertTrue(builder.hasNullableMap());
assertTrue(builder.hasNullableArray());
}
@Test
void getBuilderForRecordWithNullRecord() {
// Create a record with all nullable fields set to the default value : null
RecordWithNullables recordWithNullables = RecordWithNullables.newBuilder().build();
// Now create a Builder using this record as the base
RecordWithNullables.Builder builder = RecordWithNullables.newBuilder(recordWithNullables);
// In the past this caused an NPE
builder.getNullableRecordBuilder();
}
@Test
void getBuilderForNullRecord() {
// In the past this caused an NPE
RecordWithNullables.newBuilder((RecordWithNullables) null);
}
@Test
void getBuilderForNullBuilder() {
// In the past this caused an NPE
RecordWithNullables.newBuilder((RecordWithNullables.Builder) null);
}
@Test
void validateBrowsingOptionals() {
Request.Builder requestBuilder = Request.newBuilder();
requestBuilder.setTimestamp(1234567890);
requestBuilder.getHttpRequestBuilder().getUserAgentBuilder().setUseragent("Chrome 123");
requestBuilder.getHttpRequestBuilder().getURIBuilder().setMethod(HttpMethod.GET).setPath("/index.html");
Request request = requestBuilder.build();
assertEquals("Chrome 123", Optional.of(request).flatMap(Request::getOptionalHttpRequest)
.flatMap(HttpRequest::getOptionalUserAgent).flatMap(UserAgent::getOptionalUseragent).orElse("UNKNOWN"));
assertFalse(Optional.of(request).flatMap(Request::getOptionalHttpRequest).flatMap(HttpRequest::getOptionalUserAgent)
.flatMap(UserAgent::getOptionalId).isPresent());
assertEquals(HttpMethod.GET, Optional.of(request).flatMap(Request::getOptionalHttpRequest)
.flatMap(HttpRequest::getOptionalURI).flatMap(HttpURI::getOptionalMethod).orElse(null));
assertEquals("/index.html", Optional.of(request).flatMap(Request::getOptionalHttpRequest)
.flatMap(HttpRequest::getOptionalURI).flatMap(HttpURI::getOptionalPath).orElse(null));
}
}
| 7,468 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificRecordBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import static org.junit.jupiter.api.Assertions.*;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Foo;
import org.apache.avro.Interop;
import org.apache.avro.Kind;
import org.apache.avro.MD5;
import org.apache.avro.Node;
import org.apache.avro.ipc.specific.PageView;
import org.apache.avro.ipc.specific.Person;
import org.apache.avro.ipc.specific.ProductPage;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
/**
* Unit test for the SpecificRecordBuilder class.
*/
public class TestSpecificRecordBuilder {
@Test
void specificBuilder() {
// Create a new builder, and leave some fields with default values empty:
Person.Builder builder = Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setState("CA");
assertTrue(builder.hasName());
assertEquals("James Gosling", builder.getName());
assertTrue(builder.hasYearOfBirth());
assertEquals(1955, builder.getYearOfBirth());
assertFalse(builder.hasCountry());
assertNull(builder.getCountry());
assertTrue(builder.hasState());
assertEquals("CA", builder.getState());
assertFalse(builder.hasFriends());
assertNull(builder.getFriends());
assertFalse(builder.hasLanguages());
assertNull(builder.getLanguages());
Person person = builder.build();
assertEquals("James Gosling", person.getName());
assertEquals(1955, person.getYearOfBirth());
assertEquals("US", person.getCountry()); // country should default to "US"
assertEquals("CA", person.getState());
assertNotNull(person.getFriends()); // friends should default to an empty list
assertEquals(0, person.getFriends().size());
assertNotNull(person.getLanguages()); // Languages should now be "English" and "Java"
assertEquals(2, person.getLanguages().size());
assertEquals("English", person.getLanguages().get(0));
assertEquals("Java", person.getLanguages().get(1));
// Test copy constructors:
assertEquals(builder, Person.newBuilder(builder));
assertEquals(person, Person.newBuilder(person).build());
Person.Builder builderCopy = Person.newBuilder(person);
assertEquals("James Gosling", builderCopy.getName());
assertEquals(1955, builderCopy.getYearOfBirth());
assertEquals("US", builderCopy.getCountry()); // country should default to "US"
assertEquals("CA", builderCopy.getState());
assertNotNull(builderCopy.getFriends()); // friends should default to an empty list
assertEquals(0, builderCopy.getFriends().size());
// Test clearing fields:
builderCopy.clearFriends().clearCountry();
assertFalse(builderCopy.hasFriends());
assertFalse(builderCopy.hasCountry());
assertNull(builderCopy.getFriends());
assertNull(builderCopy.getCountry());
Person person2 = builderCopy.build();
assertNotNull(person2.getFriends());
assertTrue(person2.getFriends().isEmpty());
}
@Test
void unions() {
long datetime = 1234L;
String product = "widget";
PageView p = PageView.newBuilder().setDatetime(1234L)
.setPageContext(ProductPage.newBuilder().setProduct(product).build()).build();
assertEquals(datetime, p.getDatetime());
assertEquals(ProductPage.class, p.getPageContext().getClass());
assertEquals(product, ((ProductPage) p.getPageContext()).getProduct());
PageView p2 = PageView.newBuilder(p).build();
assertEquals(datetime, p2.getDatetime());
assertEquals(ProductPage.class, p2.getPageContext().getClass());
assertEquals(product, ((ProductPage) p2.getPageContext()).getProduct());
assertEquals(p, p2);
}
@Test
void interop() {
Interop interop = Interop.newBuilder().setNullField(null).setArrayField(Arrays.asList(3.14159265, 6.022))
.setBoolField(true).setBytesField(ByteBuffer.allocate(4).put(new byte[] { 3, 2, 1, 0 })).setDoubleField(1.41421)
.setEnumField(Kind.C).setFixedField(new MD5(new byte[] { 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3 }))
.setFloatField(1.61803f).setIntField(64).setLongField(1024)
.setMapField(Collections.singletonMap("Foo1", new Foo())).setRecordField(new Node()).setStringField("MyInterop")
.setUnionField(2.71828).build();
Interop copy = Interop.newBuilder(interop).build();
assertEquals(interop.getArrayField().size(), copy.getArrayField().size());
assertEquals(interop.getArrayField(), copy.getArrayField());
assertEquals(interop.getBoolField(), copy.getBoolField());
assertEquals(interop.getBytesField(), copy.getBytesField());
assertEquals(interop.getDoubleField(), copy.getDoubleField(), 0.001);
assertEquals(interop.getEnumField(), copy.getEnumField());
assertEquals(interop.getFixedField(), copy.getFixedField());
assertEquals(interop.getFloatField(), copy.getFloatField(), 0.001);
assertEquals(interop.getIntField(), copy.getIntField());
assertEquals(interop.getLongField(), copy.getLongField());
assertEquals(interop.getMapField(), copy.getMapField());
assertEquals(interop.getRecordField(), copy.getRecordField());
assertEquals(interop.getStringField(), copy.getStringField());
assertEquals(interop.getUnionField(), copy.getUnionField());
assertEquals(interop, copy);
}
@Test
void attemptToSetNonNullableFieldToNull() {
assertThrows(org.apache.avro.AvroRuntimeException.class, () -> {
Person.newBuilder().setName(null);
});
}
@Test
void buildWithoutSettingRequiredFields1() {
assertThrows(org.apache.avro.AvroRuntimeException.class, () -> {
Person.newBuilder().build();
});
}
@Test
void buildWithoutSettingRequiredFields2() {
// Omit required non-primitive field
try {
Person.newBuilder().setYearOfBirth(1900).setState("MA").build();
fail("Should have thrown " + AvroRuntimeException.class.getCanonicalName());
} catch (AvroRuntimeException e) {
// Exception should mention that the 'name' field has not been set
assertTrue(e.getMessage().contains("name"));
}
}
@Test
void buildWithoutSettingRequiredFields3() {
// Omit required primitive field
try {
Person.newBuilder().setName("Anon").setState("CA").build();
fail("Should have thrown " + AvroRuntimeException.class.getCanonicalName());
} catch (AvroRuntimeException e) {
// Exception should mention that the 'year_of_birth' field has not been set
assertTrue(e.getMessage().contains("year_of_birth"));
}
}
@Disabled
@Test
void builderPerformance() {
int count = 1000000;
List<Person> friends = new ArrayList<>(0);
List<String> languages = new ArrayList<>(Arrays.asList("English", "Java"));
long startTimeNanos = System.nanoTime();
for (int ii = 0; ii < count; ii++) {
Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setCountry("US").setState("CA")
.setFriends(friends).setLanguages(languages).build();
}
long durationNanos = System.nanoTime() - startTimeNanos;
double durationMillis = durationNanos / 1e6d;
System.out.println("Built " + count + " records in " + durationMillis + "ms (" + (count / (durationMillis / 1000d))
+ " records/sec, " + (durationMillis / count) + "ms/record");
}
@Disabled
@Test
void builderPerformanceWithDefaultValues() {
int count = 1000000;
long startTimeNanos = System.nanoTime();
for (int ii = 0; ii < count; ii++) {
Person.newBuilder().setName("James Gosling").setYearOfBirth(1955).setState("CA").build();
}
long durationNanos = System.nanoTime() - startTimeNanos;
double durationMillis = durationNanos / 1e6d;
System.out.println("Built " + count + " records in " + durationMillis + "ms (" + (count / (durationMillis / 1000d))
+ " records/sec, " + (durationMillis / count) + "ms/record");
}
@Disabled
@Test
@SuppressWarnings("deprecation")
void manualBuildPerformance() {
int count = 1000000;
List<Person> friends = new ArrayList<>(0);
List<String> languages = new ArrayList<>(Arrays.asList("English", "Java"));
long startTimeNanos = System.nanoTime();
for (int ii = 0; ii < count; ii++) {
Person person = new Person();
person.setName("James Gosling");
person.setYearOfBirth(1955);
person.setState("CA");
person.setCountry("US");
person.setFriends(friends);
person.setLanguages(languages);
}
long durationNanos = System.nanoTime() - startTimeNanos;
double durationMillis = durationNanos / 1e6d;
System.out.println("Built " + count + " records in " + durationMillis + "ms (" + (count / (durationMillis / 1000d))
+ " records/sec, " + (durationMillis / count) + "ms/record");
}
}
| 7,469 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificDatumWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.ArrayRecord;
import org.apache.avro.AvroTypeException;
import org.apache.avro.MapRecord;
import org.apache.avro.MapRecordEnum;
import org.apache.avro.RecordWithRequiredFields;
import org.apache.avro.Schema;
import org.apache.avro.UnionRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.JsonEncoder;
import org.apache.avro.test.Kind;
import org.apache.avro.test.MD5;
import org.apache.avro.test.TestRecordWithUnion;
import org.apache.avro.test.TestRecord;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class TestSpecificDatumWriter {
@Test
void resolveUnion() throws IOException {
final SpecificDatumWriter<TestRecordWithUnion> writer = new SpecificDatumWriter<>();
Schema schema = TestRecordWithUnion.SCHEMA$;
ByteArrayOutputStream out = new ByteArrayOutputStream();
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
writer.setSchema(schema);
TestRecordWithUnion c = TestRecordWithUnion.newBuilder().setKind(Kind.BAR).setValue("rab").build();
writer.write(c, encoder);
encoder.flush();
out.close();
String expectedJson = String.format("{'kind':{'org.apache.avro.test.Kind':'%s'},'value':{'string':'%s'}}",
c.getKind().toString(), c.getValue()).replace('\'', '"');
assertEquals(expectedJson, out.toString("UTF-8"));
}
@Test
void incompleteRecord() throws IOException {
final SpecificDatumWriter<TestRecord> writer = new SpecificDatumWriter<>();
Schema schema = TestRecord.SCHEMA$;
ByteArrayOutputStream out = new ByteArrayOutputStream();
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out);
writer.setSchema(schema);
TestRecord testRecord = new TestRecord();
testRecord.setKind(Kind.BAR);
testRecord.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
try {
writer.write(testRecord, encoder);
fail("Exception not thrown");
} catch (NullPointerException e) {
assertTrue(e.getMessage().contains("null value for (non-nullable) string at TestRecord.name"));
} finally {
out.close();
}
}
@Test
void nestedNPEErrorClarity() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
topLevelRecord.getUnionField().getArrayField().get(0).getMapField().get("a").setStrField(null);
try {
writeObject(topLevelRecord, false);
fail("expected to throw");
} catch (NullPointerException expected) {
assertTrue(
expected.getMessage()
.contains("RecordWithRequiredFields.unionField[UnionRecord].arrayField[0].mapField[\"a\"].strField"),
"unexpected message " + expected.getMessage());
}
}
@Test
void nestedNPEErrorClarityWithCustomCoders() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
topLevelRecord.getUnionField().getArrayField().get(0).getMapField().get("a").setEnumField(null);
try {
writeObject(topLevelRecord, true);
fail("expected to throw");
} catch (NullPointerException expected) {
assertTrue(expected.getMessage().contains("custom coders were used"),
"unexpected message " + expected.getMessage());
}
}
@Test
void nPEForMapKeyErrorClarity() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
Map<String, MapRecord> map = topLevelRecord.getUnionField().getArrayField().get(0).getMapField();
map.put(null, map.get("a")); // value is valid, but key is null
try {
writeObject(topLevelRecord, false);
fail("expected to throw");
} catch (NullPointerException expected) {
assertTrue(
expected.getMessage()
.contains("null key in map at RecordWithRequiredFields.unionField[UnionRecord].arrayField[0].mapField"),
"unexpected message " + expected.getMessage());
}
}
@Test
void nPEForMapKeyErrorClarityWithCustomCoders() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
Map<String, MapRecord> map = topLevelRecord.getUnionField().getArrayField().get(0).getMapField();
map.put(null, map.get("a")); // value is valid, but key is null
try {
writeObject(topLevelRecord, true);
fail("expected to throw");
} catch (NullPointerException expected) {
assertTrue(expected.getMessage().contains("custom coders were used"),
"unexpected message " + expected.getMessage());
}
}
@Test
void nestedATEErrorClarity() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
topLevelRecord.getUnionField().getArrayField().get(0).getMapField().get("a").setEnumField(null); // not an enum
try {
writeObject(topLevelRecord, false);
fail("expected to throw");
} catch (AvroTypeException expected) {
assertTrue(
expected.getMessage()
.contains("RecordWithRequiredFields.unionField[UnionRecord].arrayField[0].mapField[\"a\"].enumField"),
"unexpected message " + expected.getMessage());
}
}
@Test
void nestedATEErrorClarityWithCustomCoders() throws Exception {
RecordWithRequiredFields topLevelRecord = buildComplexRecord();
topLevelRecord.getUnionField().getArrayField().get(0).getMapField().get("a").setEnumField(null); // not an enum
try {
writeObject(topLevelRecord, true);
fail("expected to throw");
} catch (NullPointerException expected) {
// with custom coders this gets us an NPE ...
assertTrue(expected.getMessage().contains("custom coders were used"),
"unexpected message " + expected.getMessage());
}
}
private RecordWithRequiredFields buildComplexRecord() {
RecordWithRequiredFields topLevelRecord = new RecordWithRequiredFields();
UnionRecord unionRecord = new UnionRecord();
ArrayRecord arrayRecord1 = new ArrayRecord();
ArrayRecord arrayRecord2 = new ArrayRecord();
MapRecord mapRecordA = new MapRecord();
mapRecordA.setEnumField(MapRecordEnum.B);
mapRecordA.setStrField("4");
arrayRecord1.setStrField("2");
Map<String, MapRecord> map1 = new HashMap<>();
map1.put("a", mapRecordA);
arrayRecord1.setMapField(map1);
arrayRecord2.setStrField("2");
Map<String, MapRecord> map2 = new HashMap<>();
map2.put("a", mapRecordA);
arrayRecord2.setMapField(map2);
unionRecord.setStrField("1");
unionRecord.setArrayField(Arrays.asList(arrayRecord1, arrayRecord2));
topLevelRecord.setStrField("0");
topLevelRecord.setUnionField(unionRecord);
return topLevelRecord;
}
private void writeObject(IndexedRecord datum, boolean useCustomCoders) throws Exception {
writeObject(datum.getSchema(), datum, useCustomCoders);
}
private void writeObject(Schema schema, Object datum, boolean useCustomCoders) throws Exception {
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(new ByteArrayOutputStream(), null);
SpecificData specificData = new SpecificData();
specificData.setCustomCoders(useCustomCoders);
SpecificDatumWriter<Object> writer = new SpecificDatumWriter<>(schema, specificData);
writer.write(datum, encoder);
encoder.flush();
}
}
| 7,470 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificErrorBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import static org.junit.jupiter.api.Assertions.*;
import org.apache.avro.test.errors.TestError;
import org.junit.jupiter.api.Test;
/**
* Unit test for the SpecificErrorBuilderBase class.
*/
public class TestSpecificErrorBuilder {
@Test
void specificErrorBuilder() {
TestError.Builder testErrorBuilder = TestError.newBuilder().setValue("value").setCause(new NullPointerException())
.setMessage$("message$");
// Test has methods
assertTrue(testErrorBuilder.hasValue());
assertNotNull(testErrorBuilder.getValue());
assertTrue(testErrorBuilder.hasCause());
assertNotNull(testErrorBuilder.getCause());
assertTrue(testErrorBuilder.hasMessage$());
assertNotNull(testErrorBuilder.getMessage$());
TestError testError = testErrorBuilder.build();
assertEquals("value", testError.getValue());
assertEquals("value", testError.getMessage());
assertEquals("message$", testError.getMessage$());
// Test copy constructor
assertEquals(testErrorBuilder, TestError.newBuilder(testErrorBuilder));
assertEquals(testErrorBuilder, TestError.newBuilder(testError));
TestError error = new TestError("value", new NullPointerException());
error.setMessage$("message");
assertEquals(error,
TestError.newBuilder().setValue("value").setCause(new NullPointerException()).setMessage$("message").build());
// Test clear
testErrorBuilder.clearValue();
assertFalse(testErrorBuilder.hasValue());
assertNull(testErrorBuilder.getValue());
testErrorBuilder.clearCause();
assertFalse(testErrorBuilder.hasCause());
assertNull(testErrorBuilder.getCause());
testErrorBuilder.clearMessage$();
assertFalse(testErrorBuilder.hasMessage$());
assertNull(testErrorBuilder.getMessage$());
}
@Test
void attemptToSetNonNullableFieldToNull() {
assertThrows(org.apache.avro.AvroRuntimeException.class, () -> {
TestError.newBuilder().setMessage$(null);
});
}
}
| 7,471 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/specific/TestSpecificData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.specific;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.ByteArrayInputStream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.apache.avro.FooBarSpecificRecord;
import org.apache.avro.Schema;
import org.apache.avro.TestSchema;
import org.apache.avro.TypeEnum;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.test.Kind;
import org.apache.avro.test.MD5;
import org.apache.avro.test.Reserved;
import org.apache.avro.test.TestRecord;
import org.junit.jupiter.api.Test;
public class TestSpecificData {
/** Make sure that even with nulls, hashCode() doesn't throw NPE. */
@Test
void testHashCode() {
new TestRecord().hashCode();
SpecificData.get().hashCode(null, TestRecord.SCHEMA$);
}
/** Make sure that even with nulls, toString() doesn't throw NPE. */
@Test
void testToString() {
new TestRecord().toString();
}
private static class X {
public Map<String, String> map;
}
@Test
void getMapSchema() throws Exception {
SpecificData.get().getSchema(X.class.getField("map").getGenericType());
}
/** Test nesting of specific data within generic. */
@Test
void specificWithinGeneric() throws Exception {
// define a record with a field that's a generated TestRecord
Schema schema = Schema.createRecord("Foo", "", "x.y.z", false);
List<Schema.Field> fields = new ArrayList<>();
fields.add(new Schema.Field("f", TestRecord.SCHEMA$, "", null));
schema.setFields(fields);
// create a generic instance of this record
TestRecord nested = new TestRecord();
nested.setName("foo");
nested.setKind(Kind.BAR);
nested.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
GenericData.Record record = new GenericData.Record(schema);
record.put("f", nested);
// test that this instance can be written & re-read
TestSchema.checkBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
TestSchema.checkDirectBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
TestSchema.checkBlockingBinary(schema, record, new SpecificDatumWriter<>(), new SpecificDatumReader<>());
}
@Test
void convertGenericToSpecific() {
GenericRecord generic = new GenericData.Record(TestRecord.SCHEMA$);
generic.put("name", "foo");
generic.put("kind", new GenericData.EnumSymbol(Kind.SCHEMA$, "BAR"));
generic.put("hash",
new GenericData.Fixed(MD5.SCHEMA$, new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
TestRecord specific = (TestRecord) SpecificData.get().deepCopy(TestRecord.SCHEMA$, generic);
}
@Test
void getClassSchema() throws Exception {
assertEquals(TestRecord.getClassSchema(), TestRecord.SCHEMA$);
assertEquals(MD5.getClassSchema(), MD5.SCHEMA$);
assertEquals(Kind.getClassSchema(), Kind.SCHEMA$);
}
@Test
void specificRecordToString() throws IOException {
FooBarSpecificRecord foo = FooBarSpecificRecord.newBuilder().setId(123).setName("foo")
.setNicknames(Collections.singletonList("bar")).setRelatedids(Arrays.asList(1, 2, 3)).setTypeEnum(TypeEnum.c)
.build();
String json = foo.toString();
JsonFactory factory = new JsonFactory();
JsonParser parser = factory.createParser(json);
ObjectMapper mapper = new ObjectMapper();
// will throw exception if string is not parsable json
mapper.readTree(parser);
}
@Test
void externalizeable() throws Exception {
TestRecord before = new TestRecord();
before.setName("foo");
before.setKind(Kind.BAR);
before.setHash(new MD5(new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5 }));
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bytes);
out.writeObject(before);
out.close();
ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(bytes.toByteArray()));
TestRecord after = (TestRecord) in.readObject();
assertEquals(before, after);
}
@Test
void reservedEnumSymbol() throws Exception {
assertEquals(Reserved.default$, SpecificData.get().createEnum("default", Reserved.SCHEMA$));
}
}
| 7,472 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/io/Perf.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.io;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.avro.FooBarSpecificRecord;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.TypeEnum;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.avro.util.Utf8;
/**
* Performance tests for various low level operations of Avro encoding and
* decoding.
*
* @deprecated Use avro-perf module
*/
@Deprecated
public class Perf {
private static final int COUNT // needs to be a multiple of 4
= Integer.parseInt(System.getProperty("org.apache.avro.io.perf.count", "250000"));
private static final int CYCLES = Integer.parseInt(System.getProperty("org.apache.avro.io.perf.cycles", "800"));
/**
* Use a fixed value seed for random number generation to allow for better
* cross-run comparisons.
*/
private static final long SEED = 19781210;
protected static Random newRandom() {
return new Random(SEED);
}
private static class TestDescriptor {
Class<? extends Test> test;
String param;
TestDescriptor(Class<? extends Test> test, String param) {
this.test = test;
this.param = param;
}
void add(List<TestDescriptor> typeList) {
ALL_TESTS.put(param, this);
typeList.add(this);
}
}
private static final List<TestDescriptor> BASIC = new ArrayList<>();
private static final List<TestDescriptor> RECORD = new ArrayList<>();
private static final List<TestDescriptor> GENERIC = new ArrayList<>();
private static final List<TestDescriptor> GENERIC_ONETIME = new ArrayList<>();
private static final List<TestDescriptor> SPECIFIC = new ArrayList<>();
private static final List<TestDescriptor> REFLECT = new ArrayList<>();
private static final LinkedHashMap<String, TestDescriptor> ALL_TESTS;
private static final LinkedHashMap<String, List<TestDescriptor>> BATCHES;
static {
ALL_TESTS = new LinkedHashMap<>();
BATCHES = new LinkedHashMap<>();
BATCHES.put("-basic", BASIC);
new TestDescriptor(IntTest.class, "-i").add(BASIC);
new TestDescriptor(SmallLongTest.class, "-ls").add(BASIC);
new TestDescriptor(LongTest.class, "-l").add(BASIC);
new TestDescriptor(FloatTest.class, "-f").add(BASIC);
new TestDescriptor(DoubleTest.class, "-d").add(BASIC);
new TestDescriptor(BoolTest.class, "-b").add(BASIC);
new TestDescriptor(BytesTest.class, "-by").add(BASIC);
new TestDescriptor(StringTest.class, "-s").add(BASIC);
new TestDescriptor(ArrayTest.class, "-a").add(BASIC);
new TestDescriptor(MapTest.class, "-m").add(BASIC);
new TestDescriptor(ExtendedEnumResolveTest.class, "-ee").add(BASIC);
new TestDescriptor(UnchangedUnionResolveTest.class, "-uu").add(BASIC);
BATCHES.put("-record", RECORD);
new TestDescriptor(RecordTest.class, "-R").add(RECORD);
new TestDescriptor(ValidatingRecord.class, "-Rv").add(RECORD);
new TestDescriptor(ResolvingRecord.class, "-Rr").add(RECORD);
new TestDescriptor(RecordWithDefault.class, "-Rd").add(RECORD);
new TestDescriptor(RecordWithOutOfOrder.class, "-Ro").add(RECORD);
new TestDescriptor(RecordWithPromotion.class, "-Rp").add(RECORD);
BATCHES.put("-generic", GENERIC);
new TestDescriptor(GenericTest.class, "-G").add(GENERIC);
new TestDescriptor(GenericStrings.class, "-Gs").add(GENERIC);
new TestDescriptor(GenericNested.class, "-Gn").add(GENERIC);
new TestDescriptor(GenericNestedFake.class, "-Gf").add(GENERIC);
new TestDescriptor(GenericWithDefault.class, "-Gd").add(GENERIC);
new TestDescriptor(GenericWithOutOfOrder.class, "-Go").add(GENERIC);
new TestDescriptor(GenericWithPromotion.class, "-Gp").add(GENERIC);
BATCHES.put("-generic-onetime", GENERIC_ONETIME);
new TestDescriptor(GenericOneTimeDecoderUse.class, "-Gotd").add(GENERIC_ONETIME);
new TestDescriptor(GenericOneTimeReaderUse.class, "-Gotr").add(GENERIC_ONETIME);
new TestDescriptor(GenericOneTimeUse.class, "-Got").add(GENERIC_ONETIME);
BATCHES.put("-specific", SPECIFIC);
new TestDescriptor(FooBarSpecificRecordTest.class, "-Sf").add(SPECIFIC);
BATCHES.put("-reflect", REFLECT);
new TestDescriptor(ReflectRecordTest.class, "-REFr").add(REFLECT);
new TestDescriptor(ReflectBigRecordTest.class, "-REFbr").add(REFLECT);
new TestDescriptor(ReflectFloatTest.class, "-REFf").add(REFLECT);
new TestDescriptor(ReflectDoubleTest.class, "-REFd").add(REFLECT);
new TestDescriptor(ReflectIntArrayTest.class, "-REFia").add(REFLECT);
new TestDescriptor(ReflectLongArrayTest.class, "-REFla").add(REFLECT);
new TestDescriptor(ReflectDoubleArrayTest.class, "-REFda").add(REFLECT);
new TestDescriptor(ReflectFloatArrayTest.class, "-REFfa").add(REFLECT);
new TestDescriptor(ReflectNestedFloatArrayTest.class, "-REFnf").add(REFLECT);
new TestDescriptor(ReflectNestedObjectArrayTest.class, "-REFno").add(REFLECT);
new TestDescriptor(ReflectNestedLargeFloatArrayTest.class, "-REFnlf").add(REFLECT);
new TestDescriptor(ReflectNestedLargeFloatArrayBlockedTest.class, "-REFnlfb").add(REFLECT);
}
private static final int NAME_FIELD = 0;
private static final int TIME_FIELD = 1;
private static final int BYTES_PS_FIELD = 2;
private static final int ENTRIES_PS_FIELD = 3;
private static final int BYTES_PC_FIELD = 4;
private static final int MIN_TIME_FIELD = 5;
private static final int MAX_FIELD_TAG = 5;
private static void usage() {
StringBuilder usage = new StringBuilder("Usage: Perf [-o <file>] [-c <spec>] { -nowrite | -noread }");
StringBuilder details = new StringBuilder();
details.append(" -o file (send output to a file)\n");
details.append(
" -c [n][t][e][b][c][m] (format as no-header CSV; include Name, Time, Entries/sec, Bytes/sec, bytes/Cycle, and/or min time/op; no spec=all fields)\n");
details.append(" -nowrite (do not execute write tests)\n");
details.append(" -noread (do not execute write tests)\n");
for (Map.Entry<String, List<TestDescriptor>> entry : BATCHES.entrySet()) {
List<TestDescriptor> lt = entry.getValue();
String param = entry.getKey();
String paramName = param.substring(1);
usage.append(param).append(" | ");
details.append(" ").append(param).append(" (executes all ").append(paramName).append(" tests):\n");
for (TestDescriptor t : lt) {
usage.append(t.param).append(" | ");
details.append(" ").append(t.param).append(" (").append(t.test.getSimpleName()).append(")\n");
}
}
usage.setLength(usage.length() - 2);
usage.append("}\n");
System.out.println(usage.toString());
System.out.print(details.toString());
}
public static void main(String[] args) throws Exception {
if (0 != (COUNT % 4)) {
System.out.println("Property 'org.apache.avro.io.perf.count' must be a multiple of 4.");
System.exit(1);
}
List<Test> tests = new ArrayList<>();
boolean writeTests = true;
boolean readTests = true;
String outputfilename = null;
PrintStream out = System.out;
boolean[] csvFormat = null;
String csvFormatString = null;
for (int i = 0; i < args.length; i++) {
String a = args[i];
TestDescriptor t = ALL_TESTS.get(a);
if (null != t) {
tests.add(t.test.newInstance());
continue;
}
List<TestDescriptor> lt = BATCHES.get(a);
if (null != lt) {
for (TestDescriptor td : lt) {
tests.add(td.test.newInstance());
}
continue;
}
if (i < args.length - 1 && "-o".equals(a)) {
outputfilename = args[++i];
out = new PrintStream(new FileOutputStream(outputfilename));
continue;
}
if ("-c".equals(a)) {
if (i == args.length - 1 || args[i + 1].startsWith("-")) {
csvFormatString = "ntebcm"; // For diagnostics
csvFormat = new boolean[] { true, true, true, true, true, true };
} else {
csvFormatString = args[++i];
csvFormat = new boolean[MAX_FIELD_TAG + 1];
for (char c : csvFormatString.toCharArray())
switch (c) {
case 'n':
csvFormat[NAME_FIELD] = true;
break;
case 't':
csvFormat[TIME_FIELD] = true;
break;
case 'e':
csvFormat[BYTES_PS_FIELD] = true;
break;
case 'b':
csvFormat[ENTRIES_PS_FIELD] = true;
break;
case 'c':
csvFormat[BYTES_PC_FIELD] = true;
break;
case 'm':
csvFormat[MIN_TIME_FIELD] = true;
break;
default:
usage();
System.exit(1);
}
}
continue;
}
if ("-nowrite".equals(a)) {
writeTests = false;
continue;
}
if ("-noread".equals(a)) {
readTests = false;
continue;
}
usage();
System.exit(1);
}
if (tests.isEmpty()) {
for (Map.Entry<String, TestDescriptor> entry : ALL_TESTS.entrySet()) {
TestDescriptor t = entry.getValue();
Test test = t.test.newInstance();
tests.add(test);
}
}
System.out.println("Executing tests: \n" + tests + "\n readTests:" + readTests + "\n writeTests:" + writeTests
+ "\n cycles=" + CYCLES + "\n count=" + (COUNT / 1000) + "K");
if (out != System.out)
System.out.println(" Writing to: " + outputfilename);
if (csvFormat != null)
System.out.println(" CSV format: " + csvFormatString);
TestResult tr = new TestResult();
for (Test t : tests) {
try {
// get everything to compile once
t.init();
if (t.isReadTest()) {
t.readTest();
}
if (t.isWriteTest()) {
t.writeTest();
}
t.reset();
} catch (Exception e) {
System.out.println("Failed to execute test: " + t.getClass().getSimpleName());
throw e;
}
}
if (csvFormat == null)
printHeader();
for (Test t : tests) {
// warmup JVM
t.init();
if (t.isReadTest() && readTests) {
for (int i = 0; i < t.cycles / 2; i++) {
t.readTest();
}
}
if (t.isWriteTest() && writeTests) {
for (int i = 0; i < t.cycles / 2; i++) {
t.writeTest();
}
}
// test
System.gc();
if (t.isReadTest() && readTests) {
tr.reset();
for (int i = 0; i < t.cycles; i++) {
tr.update(t.readTest());
}
printResult(out, csvFormat, tr, t, t.name + "Read");
}
if (t.isWriteTest() && writeTests) {
tr.reset();
for (int i = 0; i < t.cycles; i++) {
tr.update(t.writeTest());
}
printResult(out, csvFormat, tr, t, t.name + "Write");
}
t.reset();
}
}
private static class TestResult {
public long totalTime;
public long minTime;
public void reset() {
totalTime = 0L;
minTime = Long.MAX_VALUE;
}
public long update(long t) {
totalTime += t;
minTime = Math.min(t, minTime);
return t;
}
}
private static void printHeader() {
String header = String.format("%60s time M entries/sec M bytes/sec bytes/cycle", "test name");
System.out.println(header.toString());
}
private static void printResult(PrintStream o, boolean[] csv, TestResult tr, Test t, String name) {
long s = tr.totalTime / 1000;
double entries = (t.cycles * (double) t.count);
double bytes = t.cycles * (double) t.encodedSize;
StringBuilder result = new StringBuilder();
if (csv != null) {
boolean commaneeded = false;
for (int i = 0; i <= MAX_FIELD_TAG; i++) {
if (!csv[i])
continue;
if (commaneeded)
result.append(",");
else
commaneeded = true;
switch (i) {
case NAME_FIELD:
result.append(name);
break;
case TIME_FIELD:
result.append(String.format("%d", (s / 1000)));
break;
case BYTES_PS_FIELD:
result.append(String.format("%.3f", (entries / s)));
break;
case ENTRIES_PS_FIELD:
result.append(String.format("%.3f", (bytes / s)));
break;
case BYTES_PC_FIELD:
result.append(String.format("%d", t.encodedSize));
break;
case MIN_TIME_FIELD:
result.append(String.format("%d", tr.minTime));
break;
}
}
} else {
result.append(String.format("%42s: %6d ms ", name, (s / 1000)));
result.append(String.format("%10.3f %11.3f %11d", (entries / s), (bytes / s), t.encodedSize));
}
o.println(result.toString());
}
private abstract static class Test {
/**
* Name of the test.
*/
public final String name;
public final int count;
public final int cycles;
public long encodedSize = 0;
protected boolean isReadTest = true;
protected boolean isWriteTest = true;
static DecoderFactory decoder_factory = new DecoderFactory();
static EncoderFactory encoder_factory = new EncoderFactory();
public Test(String name, int cycles, int count) {
this.name = name;
this.cycles = cycles;
this.count = count;
}
/**
* Reads data from a Decoder and returns the time taken in nanoseconds.
*/
abstract long readTest() throws IOException;
/**
* Writes data to an Encoder and returns the time taken in nanoseconds.
*/
abstract long writeTest() throws IOException;
final boolean isWriteTest() {
return isWriteTest;
}
final boolean isReadTest() {
return isReadTest;
}
/** initializes data for read and write tests **/
abstract void init() throws IOException;
/**
* clears generated data arrays and other large objects created during
* initialization
**/
abstract void reset();
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
/**
* the basic test writes a simple schema directly to an encoder or reads from an
* array. It does not use GenericDatumReader or any higher level constructs,
* just manual serialization.
*/
private static abstract class BasicTest extends Test {
/**
* Switch to using a DirectBinaryEncoder rather than a BufferedBinaryEncoder for
* writing tests. DirectBinaryEncoders are noticably slower than Buffered ones,
* but they can be more consistent in their performance, which can make it
* easier to detect small performance improvements.
*/
public static boolean USE_DIRECT_ENCODER = Boolean
.parseBoolean(System.getProperty("org.apache.avro.io.perf.use-direct", "false"));
protected final Schema schema;
protected byte[] data;
BasicTest(String name, String json) throws IOException {
this(name, json, 1);
}
BasicTest(String name, String json, int factor) throws IOException {
super(name, CYCLES, COUNT / factor);
this.schema = new Schema.Parser().parse(json);
}
@Override
public final long readTest() throws IOException {
Decoder d = getDecoder();
long t = System.nanoTime();
readInternal(d);
return (System.nanoTime() - t);
}
@Override
public final long writeTest() throws IOException {
Encoder e = getEncoder();
long t = System.nanoTime();
writeInternal(e);
e.flush();
return (System.nanoTime() - t);
}
protected Decoder getDecoder() throws IOException {
return newDecoder();
}
private Encoder getEncoder() throws IOException {
return newEncoder(getOutputStream());
}
protected Decoder newDecoder() {
return decoder_factory.binaryDecoder(data, null);
}
protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
Encoder e = (USE_DIRECT_ENCODER ? encoder_factory.directBinaryEncoder(out, null)
: encoder_factory.binaryEncoder(out, null));
// Encoder e = encoder_factory.blockingBinaryEncoder(out, null);
// Encoder e = new LegacyBinaryEncoder(out);
return e;
}
private ByteArrayOutputStream getOutputStream() {
return new ByteArrayOutputStream((int) (encodedSize > 0 ? encodedSize : count));
}
@Override
void init() throws IOException {
genSourceData();
ByteArrayOutputStream baos = getOutputStream();
Encoder e = newEncoder(baos);
writeInternal(e);
e.flush();
data = baos.toByteArray();
encodedSize = data.length;
// System.out.println(this.getClass().getSimpleName() + " encodedSize=" +
// encodedSize);
}
abstract void genSourceData();
abstract void readInternal(Decoder d) throws IOException;
abstract void writeInternal(Encoder e) throws IOException;
}
static class IntTest extends BasicTest {
protected int[] sourceData = null;
public IntTest() throws IOException {
this("Int", "{ \"type\": \"int\"} ");
}
private IntTest(String name, String schema) throws IOException {
super(name, schema);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new int[count];
for (int i = 0; i < sourceData.length; i += 4) {
sourceData[i] = r.nextInt(50); // fits in 1 byte
sourceData[i + 1] = r.nextInt(5000); // fits in 2 bytes
sourceData[i + 2] = r.nextInt(500000); // fits in 3 bytes
sourceData[i + 3] = r.nextInt(150000000); // most in 4, some in 5
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count / 4; i++) {
d.readInt();
d.readInt();
d.readInt();
d.readInt();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeInt(sourceData[i]);
e.writeInt(sourceData[i + 1]);
e.writeInt(sourceData[i + 2]);
e.writeInt(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
// This is the same data as ReadInt, but using readLong.
static class SmallLongTest extends IntTest {
public SmallLongTest() throws IOException {
super("SmallLong", "{ \"type\": \"long\"} ");
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count / 4; i++) {
d.readLong();
d.readLong();
d.readLong();
d.readLong();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeLong(sourceData[i]);
e.writeLong(sourceData[i + 1]);
e.writeLong(sourceData[i + 2]);
e.writeLong(sourceData[i + 3]);
}
}
}
// this tests reading Longs that are sometimes very large
static class LongTest extends BasicTest {
private long[] sourceData = null;
public LongTest() throws IOException {
super("Long", "{ \"type\": \"long\"} ");
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new long[count];
for (int i = 0; i < sourceData.length; i += 4) {
sourceData[i] = r.nextLong() % 0x7FL; // half fit in 1, half in 2
sourceData[i + 1] = r.nextLong() % 0x1FFFFFL; // half fit in <=3, half in 4
sourceData[i + 2] = r.nextLong() % 0x3FFFFFFFFL; // half in <=5, half in 6
sourceData[i + 3] = r.nextLong() % 0x1FFFFFFFFFFFFL; // half in <=8, half in 9
}
// last 16, make full size
for (int i = sourceData.length - 16; i < sourceData.length; i++) {
sourceData[i] = r.nextLong();
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count / 4; i++) {
d.readLong();
d.readLong();
d.readLong();
d.readLong();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeLong(sourceData[i]);
e.writeLong(sourceData[i + 1]);
e.writeLong(sourceData[i + 2]);
e.writeLong(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class FloatTest extends BasicTest {
float[] sourceData = null;
public FloatTest() throws IOException {
this("Float", "{ \"type\": \"float\"} ");
}
public FloatTest(String name, String schema) throws IOException {
super(name, schema);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new float[count];
for (int i = 0; i < sourceData.length;) {
sourceData[i++] = r.nextFloat();
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i += 4) {
d.readFloat();
d.readFloat();
d.readFloat();
d.readFloat();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeFloat(sourceData[i]);
e.writeFloat(sourceData[i + 1]);
e.writeFloat(sourceData[i + 2]);
e.writeFloat(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class DoubleTest extends BasicTest {
double[] sourceData = null;
public DoubleTest() throws IOException {
super("Double", "{ \"type\": \"double\"} ");
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new double[count];
for (int i = 0; i < sourceData.length;) {
sourceData[i++] = r.nextDouble();
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i += 4) {
d.readDouble();
d.readDouble();
d.readDouble();
d.readDouble();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeDouble(sourceData[i]);
e.writeDouble(sourceData[i + 1]);
e.writeDouble(sourceData[i + 2]);
e.writeDouble(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class BoolTest extends BasicTest {
boolean[] sourceData = null;
public BoolTest() throws IOException {
super("Boolean", "{ \"type\": \"boolean\"} ");
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new boolean[count];
for (int i = 0; i < sourceData.length;) {
sourceData[i++] = r.nextBoolean();
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count / 4; i++) {
d.readBoolean();
d.readBoolean();
d.readBoolean();
d.readBoolean();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeBoolean(sourceData[i]);
e.writeBoolean(sourceData[i + 1]);
e.writeBoolean(sourceData[i + 2]);
e.writeBoolean(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class BytesTest extends BasicTest {
byte[][] sourceData = null;
public BytesTest() throws IOException {
super("Bytes", "{ \"type\": \"bytes\"} ", 5);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new byte[count][];
for (int i = 0; i < sourceData.length;) {
byte[] data = new byte[r.nextInt(70)];
r.nextBytes(data);
sourceData[i++] = data;
}
}
@Override
void readInternal(Decoder d) throws IOException {
ByteBuffer bb = ByteBuffer.allocate(70);
for (int i = 0; i < count / 4; i++) {
d.readBytes(bb);
d.readBytes(bb);
d.readBytes(bb);
d.readBytes(bb);
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeBytes(sourceData[i]);
e.writeBytes(sourceData[i + 1]);
e.writeBytes(sourceData[i + 2]);
e.writeBytes(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
private static String randomString(Random r) {
char[] data = new char[r.nextInt(70)];
for (int j = 0; j < data.length; j++) {
data[j] = (char) ('a' + r.nextInt('z' - 'a'));
}
return new String(data);
}
static class StringTest extends BasicTest {
String[] sourceData = null;
public StringTest() throws IOException {
super("String", "{ \"type\": \"string\"} ", 5);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new String[count];
for (int i = 0; i < sourceData.length;)
sourceData[i++] = randomString(r);
}
@Override
void readInternal(Decoder d) throws IOException {
Utf8 utf = new Utf8();
for (int i = 0; i < count / 4; i++) {
d.readString(utf).toString();
d.readString(utf).toString();
d.readString(utf).toString();
d.readString(utf).toString();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (int i = 0; i < sourceData.length; i += 4) {
e.writeString(sourceData[i]);
e.writeString(sourceData[i + 1]);
e.writeString(sourceData[i + 2]);
e.writeString(sourceData[i + 3]);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class ArrayTest extends FloatTest {
public ArrayTest() throws IOException {
super("Array",
"{ \"type\": \"array\", \"items\": " + " { \"type\": \"record\", \"name\":\"Foo\", \"fields\": "
+ " [{\"name\":\"bar\", \"type\":" + " {\"type\": \"array\", \"items\": "
+ " { \"type\": \"record\", \"name\":\"Vals\", \"fields\": ["
+ " {\"name\":\"f1\", \"type\":\"float\"}," + " {\"name\":\"f2\", \"type\":\"float\"},"
+ " {\"name\":\"f3\", \"type\":\"float\"}," + " {\"name\":\"f4\", \"type\":\"float\"}]"
+ " }" + " }" + " }]}}");
}
@Override
void readInternal(Decoder d) throws IOException {
d.readArrayStart();
for (long i = d.readArrayStart(); i != 0; i = d.arrayNext()) {
for (long j = 0; j < i; j++) {
d.readFloat();
d.readFloat();
d.readFloat();
d.readFloat();
}
}
d.arrayNext();
}
@Override
void writeInternal(Encoder e) throws IOException {
int items = sourceData.length / 4;
e.writeArrayStart();
e.setItemCount(1);
e.startItem();
e.writeArrayStart();
e.setItemCount(items);
for (int i = 0; i < sourceData.length; i += 4) {
e.startItem();
e.writeFloat(sourceData[i]);
e.writeFloat(sourceData[i + 1]);
e.writeFloat(sourceData[i + 2]);
e.writeFloat(sourceData[i + 3]);
}
e.writeArrayEnd();
e.writeArrayEnd();
}
}
static class MapTest extends FloatTest {
public MapTest() throws IOException {
super("Map",
"{ \"type\": \"map\", \"values\": " + " { \"type\": \"record\", \"name\":\"Vals\", \"fields\": ["
+ " {\"name\":\"f1\", \"type\":\"float\"}," + " {\"name\":\"f2\", \"type\":\"float\"},"
+ " {\"name\":\"f3\", \"type\":\"float\"}," + " {\"name\":\"f4\", \"type\":\"float\"}]" + " }} ");
}
@Override
void readInternal(Decoder d) throws IOException {
Utf8 key = new Utf8();
for (long i = d.readMapStart(); i != 0; i = d.mapNext()) {
for (long j = 0; j < i; j++) {
key = d.readString(key);
d.readFloat();
d.readFloat();
d.readFloat();
d.readFloat();
}
}
}
@Override
void writeInternal(Encoder e) throws IOException {
int items = sourceData.length / 4;
e.writeMapStart();
e.setItemCount(items);
Utf8 foo = new Utf8("foo");
for (int i = 0; i < sourceData.length; i += 4) {
e.startItem();
e.writeString(foo);
e.writeFloat(sourceData[i]);
e.writeFloat(sourceData[i + 1]);
e.writeFloat(sourceData[i + 2]);
e.writeFloat(sourceData[i + 3]);
}
e.writeMapEnd();
}
}
private static final String RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String NESTED_RECORD_SCHEMA = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \n" + "{ \"type\": \"record\", \"name\": \"D\", \"fields\": [\n"
+ "{\"name\": \"dbl\", \"type\": \"double\" }]\n" + "} },\n" + "{ \"name\": \"f2\", \"type\": \"D\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"D\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static class Rec {
double f1;
double f2;
double f3;
int f4;
int f5;
int f6;
Rec() {
}
Rec(Random r) {
f1 = r.nextDouble();
f2 = r.nextDouble();
f3 = r.nextDouble();
f4 = r.nextInt();
f5 = r.nextInt();
f6 = r.nextInt();
}
}
static class RecordTest extends BasicTest {
Rec[] sourceData = null;
public RecordTest() throws IOException {
this("Record");
}
public RecordTest(String name) throws IOException {
super(name, RECORD_SCHEMA, 6);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new Rec[count];
for (int i = 0; i < sourceData.length; i++) {
sourceData[i] = new Rec(r);
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i++) {
d.readDouble();
d.readDouble();
d.readDouble();
d.readInt();
d.readInt();
d.readInt();
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (Rec r : sourceData) {
e.writeDouble(r.f1);
e.writeDouble(r.f2);
e.writeDouble(r.f3);
e.writeInt(r.f4);
e.writeInt(r.f5);
e.writeInt(r.f6);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class ValidatingRecord extends RecordTest {
ValidatingRecord() throws IOException {
super("ValidatingRecord");
}
@Override
protected Decoder getDecoder() throws IOException {
return new ValidatingDecoder(schema, super.getDecoder());
}
@Override
protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
return encoder_factory.validatingEncoder(schema, super.newEncoder(out));
}
}
static class ResolvingRecord extends RecordTest {
public ResolvingRecord() throws IOException {
super("ResolvingRecord");
isWriteTest = false;
}
@Override
protected Decoder getDecoder() throws IOException {
return new ResolvingDecoder(schema, schema, super.getDecoder());
}
}
private static final String RECORD_SCHEMA_WITH_DEFAULT = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" },\n"
+ "{ \"name\": \"f7\", \"type\": \"string\", " + "\"default\": \"undefined\" },\n"
+ "{ \"name\": \"f8\", \"type\": \"string\"," + "\"default\": \"undefined\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_OUT_OF_ORDER = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f3\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"int\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f4\", \"type\": \"int\" },\n" + "{ \"name\": \"f6\", \"type\": \"int\" }\n" + "] }";
private static final String RECORD_SCHEMA_WITH_PROMOTION = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"double\" },\n" + "{ \"name\": \"f2\", \"type\": \"double\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"double\" },\n" + "{ \"name\": \"f4\", \"type\": \"long\" },\n"
+ "{ \"name\": \"f5\", \"type\": \"long\" },\n" + "{ \"name\": \"f6\", \"type\": \"long\" }\n" + "] }";
/**
* Tests the performance of introducing default values.
*/
static class RecordWithDefault extends RecordTest {
private final Schema readerSchema;
public RecordWithDefault() throws IOException {
super("RecordWithDefault");
readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
isWriteTest = false;
}
@Override
protected Decoder getDecoder() throws IOException {
return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
}
@Override
protected void readInternal(Decoder d) throws IOException {
ResolvingDecoder r = (ResolvingDecoder) d;
Field[] ff = r.readFieldOrder();
for (int i = 0; i < count; i++) {
for (Field f : ff) {
switch (f.pos()) {
case 0:
case 1:
case 2:
r.readDouble();
break;
case 3:
case 4:
case 5:
r.readInt();
break;
case 6:
case 7:
r.readString(null);
break;
}
}
}
}
}
/**
* Tests the performance of resolving a change in field order.
*/
static class RecordWithOutOfOrder extends RecordTest {
private final Schema readerSchema;
public RecordWithOutOfOrder() throws IOException {
super("RecordWithOutOfOrder");
readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
isWriteTest = false;
}
@Override
protected Decoder getDecoder() throws IOException {
return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
}
@Override
protected void readInternal(Decoder d) throws IOException {
ResolvingDecoder r = (ResolvingDecoder) d;
Field[] ff = r.readFieldOrder();
for (int i = 0; i < count; i++) {
for (Field f : ff) {
switch (f.pos()) {
case 0:
case 1:
case 3:
r.readDouble();
break;
case 2:
case 4:
case 5:
r.readInt();
break;
}
}
}
}
}
/**
* Tests the performance of resolving a type promotion.
*/
static class RecordWithPromotion extends RecordTest {
private final Schema readerSchema;
public RecordWithPromotion() throws IOException {
super("RecordWithPromotion");
readerSchema = new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
isWriteTest = false;
}
@Override
protected Decoder getDecoder() throws IOException {
return new ResolvingDecoder(schema, readerSchema, super.getDecoder());
}
@Override
protected void readInternal(Decoder d) throws IOException {
ResolvingDecoder r = (ResolvingDecoder) d;
Field[] ff = r.readFieldOrder();
for (int i = 0; i < count; i++) {
for (Field f : ff) {
switch (f.pos()) {
case 0:
case 1:
case 2:
r.readDouble();
break;
case 3:
case 4:
case 5:
r.readLong();
break;
}
}
}
}
}
static class GenericTest extends BasicTest {
GenericRecord[] sourceData = null;
protected final GenericDatumReader<Object> reader;
public GenericTest() throws IOException {
this("Generic");
}
protected GenericTest(String name) throws IOException {
this(name, RECORD_SCHEMA);
}
protected GenericTest(String name, String writerSchema) throws IOException {
super(name, writerSchema, 12);
reader = newReader();
}
protected GenericDatumReader<Object> getReader() {
return reader;
}
protected GenericDatumReader<Object> newReader() {
return new GenericDatumReader<>(schema);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new GenericRecord[count];
for (int i = 0; i < sourceData.length; i++) {
GenericRecord rec = new GenericData.Record(schema);
rec.put(0, r.nextDouble());
rec.put(1, r.nextDouble());
rec.put(2, r.nextDouble());
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
sourceData[i] = rec;
}
}
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i++) {
getReader().read(null, d);
}
}
@Override
void writeInternal(Encoder e) throws IOException {
GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema);
for (GenericRecord rec : sourceData) {
writer.write(rec, e);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
private static final String GENERIC_STRINGS = "{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n"
+ "{ \"name\": \"f1\", \"type\": \"string\" },\n" + "{ \"name\": \"f2\", \"type\": \"string\" },\n"
+ "{ \"name\": \"f3\", \"type\": \"string\" }\n" + "] }";
static class GenericStrings extends GenericTest {
public GenericStrings() throws IOException {
super("GenericStrings", GENERIC_STRINGS);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new GenericRecord[count];
for (int i = 0; i < sourceData.length; i++) {
GenericRecord rec = new GenericData.Record(schema);
rec.put(0, randomString(r));
rec.put(1, randomString(r));
rec.put(2, randomString(r));
sourceData[i] = rec;
}
}
}
static class GenericNested extends GenericTest {
public GenericNested() throws IOException {
super("GenericNested_", NESTED_RECORD_SCHEMA);
}
@Override
void genSourceData() {
sourceData = generateGenericNested(schema, count);
}
}
static GenericRecord[] generateGenericNested(Schema schema, int count) {
Random r = newRandom();
GenericRecord[] sourceData = new GenericRecord[count];
Schema doubleSchema = schema.getFields().get(0).schema();
for (int i = 0; i < sourceData.length; i++) {
GenericRecord rec = new GenericData.Record(schema);
GenericRecord inner;
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(0, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(1, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, r.nextDouble());
rec.put(2, inner);
rec.put(3, r.nextInt());
rec.put(4, r.nextInt());
rec.put(5, r.nextInt());
sourceData[i] = rec;
}
return sourceData;
}
static class GenericNestedFake extends BasicTest {
// reads and writes generic data, but not using
// GenericDatumReader or GenericDatumWriter
GenericRecord[] sourceData = null;
public GenericNestedFake() throws IOException {
super("GenericNestedFake_", NESTED_RECORD_SCHEMA, 12);
}
@Override
void readInternal(Decoder d) throws IOException {
Schema doubleSchema = schema.getFields().get(0).schema();
for (int i = 0; i < count; i++) {
GenericRecord rec = new GenericData.Record(schema);
GenericRecord inner;
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(0, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(1, inner);
inner = new GenericData.Record(doubleSchema);
inner.put(0, d.readDouble());
rec.put(2, inner);
rec.put(3, d.readInt());
rec.put(4, d.readInt());
rec.put(5, d.readInt());
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (GenericRecord rec : sourceData) {
GenericRecord inner;
inner = (GenericRecord) rec.get(0);
e.writeDouble((Double) inner.get(0));
inner = (GenericRecord) rec.get(1);
e.writeDouble((Double) inner.get(0));
inner = (GenericRecord) rec.get(2);
e.writeDouble((Double) inner.get(0));
e.writeInt((Integer) rec.get(3));
e.writeInt((Integer) rec.get(4));
e.writeInt((Integer) rec.get(5));
}
}
@Override
void genSourceData() {
sourceData = generateGenericNested(schema, count);
}
@Override
void reset() {
data = null;
sourceData = null;
}
}
private static abstract class GenericResolving extends GenericTest {
protected GenericResolving(String name) throws IOException {
super(name);
isWriteTest = false;
}
@Override
protected GenericDatumReader<Object> newReader() {
return new GenericDatumReader<>(schema, getReaderSchema());
}
protected abstract Schema getReaderSchema();
}
static class GenericWithDefault extends GenericResolving {
GenericWithDefault() throws IOException {
super("GenericWithDefault_");
}
@Override
protected Schema getReaderSchema() {
return new Schema.Parser().parse(RECORD_SCHEMA_WITH_DEFAULT);
}
}
static class GenericWithOutOfOrder extends GenericResolving {
GenericWithOutOfOrder() throws IOException {
super("GenericWithOutOfOrder_");
}
@Override
protected Schema getReaderSchema() {
return new Schema.Parser().parse(RECORD_SCHEMA_WITH_OUT_OF_ORDER);
}
}
static class GenericWithPromotion extends GenericResolving {
GenericWithPromotion() throws IOException {
super("GenericWithPromotion_");
}
@Override
protected Schema getReaderSchema() {
return new Schema.Parser().parse(RECORD_SCHEMA_WITH_PROMOTION);
}
}
static class GenericOneTimeDecoderUse extends GenericTest {
public GenericOneTimeDecoderUse() throws IOException {
super("GenericOneTimeDecoderUse_");
isWriteTest = false;
}
@Override
protected Decoder getDecoder() {
return newDecoder();
}
}
static class GenericOneTimeReaderUse extends GenericTest {
public GenericOneTimeReaderUse() throws IOException {
super("GenericOneTimeReaderUse_");
isWriteTest = false;
}
@Override
protected GenericDatumReader<Object> getReader() {
return newReader();
}
}
static class GenericOneTimeUse extends GenericTest {
public GenericOneTimeUse() throws IOException {
super("GenericOneTimeUse_");
isWriteTest = false;
}
@Override
protected GenericDatumReader<Object> getReader() {
return newReader();
}
@Override
protected Decoder getDecoder() {
return newDecoder();
}
}
static abstract class SpecificTest<T extends SpecificRecordBase> extends BasicTest {
protected final SpecificDatumReader<T> reader;
protected final SpecificDatumWriter<T> writer;
private Object[] sourceData;
private T reuse;
protected SpecificTest(String name, String writerSchema) throws IOException {
super(name, writerSchema, 48);
reader = newReader();
writer = newWriter();
}
protected SpecificDatumReader<T> newReader() {
return new SpecificDatumReader<>(schema);
}
protected SpecificDatumWriter<T> newWriter() {
return new SpecificDatumWriter<>(schema);
}
@Override
void genSourceData() {
Random r = newRandom();
sourceData = new Object[count];
for (int i = 0; i < sourceData.length; i++) {
sourceData[i] = genSingleRecord(r);
}
reuse = genSingleRecord(r);
}
protected abstract T genSingleRecord(Random r);
@Override
void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i++) {
reader.read(reuse, d);
}
}
@Override
void writeInternal(Encoder e) throws IOException {
for (Object sourceDatum : sourceData) {
@SuppressWarnings("unchecked")
T rec = (T) sourceDatum;
writer.write(rec, e);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class FooBarSpecificRecordTest extends SpecificTest<FooBarSpecificRecord> {
public FooBarSpecificRecordTest() throws IOException {
super("FooBarSpecificRecordTest", FooBarSpecificRecord.SCHEMA$.toString());
}
@Override
protected FooBarSpecificRecord genSingleRecord(Random r) {
TypeEnum[] typeEnums = TypeEnum.values();
List<Integer> relatedIds = new ArrayList<>(10);
for (int i = 0; i < 10; i++) {
relatedIds.add(r.nextInt());
}
try {
String[] nicknames = { randomString(r), randomString(r) };
return FooBarSpecificRecord.newBuilder().setId(r.nextInt()).setName(randomString(r))
.setNicknames(new ArrayList<>(Arrays.asList(nicknames))).setTypeEnum(typeEnums[r.nextInt(typeEnums.length)])
.setRelatedids(relatedIds).build();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
static abstract class ReflectTest<T> extends BasicTest {
T[] sourceData = null;
ReflectDatumReader<T> reader;
ReflectDatumWriter<T> writer;
Class<T> clazz;
@SuppressWarnings("unchecked")
ReflectTest(String name, T sample, int factor) throws IOException {
super(name, ReflectData.get().getSchema(sample.getClass()).toString(), factor);
clazz = (Class<T>) sample.getClass();
reader = new ReflectDatumReader<>(schema);
writer = new ReflectDatumWriter<>(schema);
}
@SuppressWarnings("unchecked")
@Override
protected final void genSourceData() {
Random r = newRandom();
sourceData = (T[]) Array.newInstance(clazz, count);
for (int i = 0; i < sourceData.length; i++) {
sourceData[i] = createDatum(r);
}
}
protected abstract T createDatum(Random r);
@Override
protected final void readInternal(Decoder d) throws IOException {
for (int i = 0; i < count; i++) {
reader.read(null, d);
}
}
@Override
protected final void writeInternal(Encoder e) throws IOException {
for (T sourceDatum : sourceData) {
writer.write(sourceDatum, e);
}
}
@Override
protected final void reset() {
sourceData = null;
data = null;
}
}
static class ReflectRecordTest extends ReflectTest<Rec> {
ReflectRecordTest() throws IOException {
super("ReflectRecord", new Rec(), 12);
}
@Override
protected Rec createDatum(Random r) {
return new Rec(r);
}
}
static class ReflectFloatTest extends ReflectTest<float[]> {
ReflectFloatTest() throws IOException {
super("ReflectFloat", new float[0], COUNT);
}
@Override
protected float[] createDatum(Random r) {
return populateFloatArray(r, COUNT / count);
}
}
static class ReflectDoubleTest extends ReflectTest<double[]> {
ReflectDoubleTest() throws IOException {
super("ReflectDouble", new double[0], COUNT);
}
@Override
protected double[] createDatum(Random r) {
return populateDoubleArray(r, COUNT / count);
}
}
static class ReflectFloatArrayTest extends ReflectTest<float[]> {
ReflectFloatArrayTest() throws IOException {
super("ReflectFloatArray", new float[0], 10);
}
@Override
protected float[] createDatum(Random r) {
return populateFloatArray(r, false);
}
}
static class ReflectDoubleArrayTest extends ReflectTest<double[]> {
ReflectDoubleArrayTest() throws IOException {
super("ReflectDoubleArray", new double[0], 20);
}
@Override
protected double[] createDatum(Random r) {
return populateDoubleArray(r);
}
}
static class ReflectIntArrayTest extends ReflectTest<int[]> {
ReflectIntArrayTest() throws IOException {
super("ReflectIntArray", new int[0], 12);
}
@Override
protected int[] createDatum(Random r) {
return populateIntArray(r);
}
}
static class ReflectLongArrayTest extends ReflectTest<long[]> {
ReflectLongArrayTest() throws IOException {
super("ReflectLongArray", new long[0], 24);
}
@Override
protected long[] createDatum(Random r) {
return populateLongArray(r);
}
}
static class ReflectNestedObjectArrayTest extends ReflectTest<ReflectNestedObjectArrayTest.Foo> {
ReflectNestedObjectArrayTest() throws IOException {
super("ReflectNestedObjectArray", new Foo(new Random()), 50);
}
@Override
protected Foo createDatum(Random r) {
return new Foo(r);
}
static public class Foo {
Vals[] bar;
Foo() {
}
Foo(Random r) {
bar = new Vals[smallArraySize(r)];
for (int i = 0; i < bar.length; i++) {
bar[i] = new Vals(r);
}
}
}
static class Vals {
float f1;
float f2;
float f3;
float f4;
Vals() {
}
Vals(Random r) {
this.f1 = r.nextFloat();
this.f2 = r.nextFloat();
this.f3 = r.nextFloat();
this.f4 = r.nextFloat();
}
}
}
static public class FloatFoo {
float[] floatBar;
FloatFoo() {
}
FloatFoo(Random r, boolean large) {
floatBar = populateFloatArray(r, large);
}
}
// average of 8, between 1 and 15
private static int smallArraySize(Random r) {
return r.nextInt(15) + 1;
}
// average of 64, between 16 and 112
private static int largeArraySize(Random r) {
return r.nextInt(97) + 16;
}
static float[] populateFloatArray(Random r, boolean large) {
int size = large ? largeArraySize(r) : smallArraySize(r);
return populateFloatArray(r, size);
}
static float[] populateFloatArray(Random r, int size) {
float[] result = new float[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextFloat();
}
return result;
}
static double[] populateDoubleArray(Random r) {
return populateDoubleArray(r, smallArraySize(r));
}
static double[] populateDoubleArray(Random r, int size) {
double[] result = new double[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextDouble();
}
return result;
}
static int[] populateIntArray(Random r) {
int size = smallArraySize(r);
int[] result = new int[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextInt();
}
return result;
}
static long[] populateLongArray(Random r) {
int size = smallArraySize(r);
long[] result = new long[size];
for (int i = 0; i < result.length; i++) {
result[i] = r.nextLong();
}
return result;
}
static class ReflectNestedFloatArrayTest extends ReflectTest<FloatFoo> {
public ReflectNestedFloatArrayTest() throws IOException {
super("ReflectNestedFloatArray", new FloatFoo(new Random(), false), 10);
}
@Override
protected FloatFoo createDatum(Random r) {
return new FloatFoo(r, false);
}
}
static class ReflectNestedLargeFloatArrayTest extends ReflectTest<FloatFoo> {
public ReflectNestedLargeFloatArrayTest() throws IOException {
super("ReflectNestedLargeFloatArray", new FloatFoo(new Random(), true), 60);
}
@Override
protected FloatFoo createDatum(Random r) {
return new FloatFoo(r, true);
}
}
static class ReflectNestedLargeFloatArrayBlockedTest extends ReflectTest<FloatFoo> {
public ReflectNestedLargeFloatArrayBlockedTest() throws IOException {
super("ReflectNestedLargeFloatArrayBlocked", new FloatFoo(new Random(), true), 60);
}
@Override
protected FloatFoo createDatum(Random r) {
return new FloatFoo(r, true);
}
@Override
protected Encoder newEncoder(ByteArrayOutputStream out) throws IOException {
return new EncoderFactory().configureBlockSize(254).blockingBinaryEncoder(out, null);
}
}
@SuppressWarnings("unused")
private static class Rec1 {
double d1;
double d11;
float f2;
float f22;
int f3;
int f33;
long f4;
long f44;
byte f5;
byte f55;
short f6;
short f66;
Rec1() {
}
Rec1(Random r) {
d1 = r.nextDouble();
d11 = r.nextDouble();
f2 = r.nextFloat();
f22 = r.nextFloat();
f3 = r.nextInt();
f33 = r.nextInt();
f4 = r.nextLong();
f44 = r.nextLong();
f5 = (byte) r.nextInt();
f55 = (byte) r.nextInt();
f6 = (short) r.nextInt();
f66 = (short) r.nextInt();
}
}
static class ReflectBigRecordTest extends ReflectTest<Rec1> {
public ReflectBigRecordTest() throws IOException {
super("ReflectBigRecord", new Rec1(new Random()), 20);
}
@Override
protected Rec1 createDatum(Random r) {
return new Rec1(r);
}
}
static abstract class ResolvingTest extends BasicTest {
GenericRecord[] sourceData = null;
Schema writeSchema;
private static String mkSchema(String subschema) {
return ("{ \"type\": \"record\", \"name\": \"R\", \"fields\": [\n" + "{ \"name\": \"f\", \"type\": " + subschema
+ "}\n" + "] }");
}
public ResolvingTest(String name, String r, String w) throws IOException {
super(name, mkSchema(r));
isWriteTest = false;
this.writeSchema = new Schema.Parser().parse(mkSchema(w));
}
@Override
protected Decoder getDecoder() throws IOException {
return new ResolvingDecoder(writeSchema, schema, super.getDecoder());
}
@Override
void readInternal(Decoder d) throws IOException {
GenericDatumReader<Object> reader = new GenericDatumReader<>(schema);
for (int i = 0; i < count; i++) {
reader.read(null, d);
}
}
@Override
void writeInternal(Encoder e) throws IOException {
GenericDatumWriter<Object> writer = new GenericDatumWriter<>(writeSchema);
for (GenericRecord sourceDatum : sourceData) {
writer.write(sourceDatum, e);
}
}
@Override
void reset() {
sourceData = null;
data = null;
}
}
static class ExtendedEnumResolveTest extends ResolvingTest {
private static final String ENUM_WRITER = "{ \"type\": \"enum\", \"name\":\"E\", \"symbols\": [\"A\", \"B\"] }";
private static final String ENUM_READER = "{ \"type\": \"enum\", \"name\":\"E\", \"symbols\": [\"A\",\"B\",\"C\",\"D\",\"E\"] }";
public ExtendedEnumResolveTest() throws IOException {
super("ExtendedEnum", ENUM_READER, ENUM_WRITER);
}
@Override
void genSourceData() {
Random r = newRandom();
Schema eSchema = writeSchema.getField("f").schema();
sourceData = new GenericRecord[count];
for (int i = 0; i < sourceData.length; i++) {
GenericRecord rec = new GenericData.Record(writeSchema);
int tag = r.nextInt(2);
rec.put("f", GenericData.get().createEnum(eSchema.getEnumSymbols().get(tag), eSchema));
sourceData[i] = rec;
}
}
}
static class UnchangedUnionResolveTest extends ResolvingTest {
private static final String UNCHANGED_UNION = "[ \"null\", \"int\" ]";
public UnchangedUnionResolveTest() throws IOException {
super("UnchangedUnion", UNCHANGED_UNION, UNCHANGED_UNION);
}
@Override
void genSourceData() {
Random r = newRandom();
Schema uSchema = writeSchema.getField("f").schema();
sourceData = new GenericRecord[count];
for (int i = 0; i < sourceData.length; i++) {
GenericRecord rec = new GenericData.Record(writeSchema);
int val = r.nextInt(1000000);
Integer v = (val < 750000 ? val : null);
rec.put("f", v);
sourceData[i] = rec;
}
}
}
}
| 7,473 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/message/TestCustomSchemaStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.avro.message;
import org.apache.avro.Schema;
import org.apache.avro.compiler.schema.evolve.NestedEvolve1;
import org.apache.avro.compiler.schema.evolve.NestedEvolve2;
import org.apache.avro.compiler.schema.evolve.NestedEvolve3;
import org.apache.avro.compiler.schema.evolve.TestRecord2;
import org.apache.avro.compiler.schema.evolve.TestRecord3;
import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class TestCustomSchemaStore {
static class CustomSchemaStore implements SchemaStore {
Cache cache;
CustomSchemaStore() {
cache = new Cache();
cache.addSchema(NestedEvolve1.getClassSchema());
cache.addSchema(NestedEvolve2.getClassSchema());
}
@Override
public Schema findByFingerprint(long fingerprint) {
return cache.findByFingerprint(fingerprint);
}
}
private BinaryMessageDecoder<NestedEvolve1> decoder = NestedEvolve1.createDecoder(new CustomSchemaStore());
@Test
void compatibleReadWithSchemaFromSchemaStore() throws Exception {
// Create and encode a NestedEvolve2 record.
NestedEvolve2.Builder rootBuilder = NestedEvolve2.newBuilder().setRootName("RootName");
rootBuilder.setNested(TestRecord2.newBuilder().setName("Name").setValue(1).setData("Data").build());
ByteBuffer nestedEvolve2Buffer = rootBuilder.build().toByteBuffer();
// Decode it
NestedEvolve1 nestedEvolve1 = decoder.decode(nestedEvolve2Buffer);
// Should work
assertEquals(nestedEvolve1.getRootName(), "RootName");
assertEquals(nestedEvolve1.getNested().getName(), "Name");
assertEquals(nestedEvolve1.getNested().getValue(), 1);
}
@Test
void incompatibleReadWithSchemaFromSchemaStore() throws Exception {
assertThrows(MissingSchemaException.class, () -> {
// Create and encode a NestedEvolve3 record.
NestedEvolve3.Builder rootBuilder = NestedEvolve3.newBuilder().setRootName("RootName");
rootBuilder.setNested(TestRecord3.newBuilder().setName("Name").setData("Data").build());
ByteBuffer nestedEvolve3Buffer = rootBuilder.build().toByteBuffer();
// Decode it ... should fail because schema for 'NestedEvolve3' is not available
// in the SchemaStore
decoder.decode(nestedEvolve3Buffer);
});
}
}
| 7,474 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/generic/TestBuilderCopy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.generic;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.HashMap;
import org.junit.jupiter.api.Test;
import test.StringablesRecord;
/** Unit test for performing a builder copy of an object with a schema */
public class TestBuilderCopy {
@Test
void builderCopy() {
StringablesRecord.Builder builder = StringablesRecord.newBuilder();
builder.setValue(new BigDecimal("1314.11"));
HashMap<String, BigDecimal> mapWithBigDecimalElements = new HashMap<>();
mapWithBigDecimalElements.put("testElement", new BigDecimal("220.11"));
builder.setMapWithBigDecimalElements(mapWithBigDecimalElements);
HashMap<BigInteger, String> mapWithBigIntKeys = new HashMap<>();
mapWithBigIntKeys.put(BigInteger.ONE, "testKey");
builder.setMapWithBigIntKeys(mapWithBigIntKeys);
StringablesRecord original = builder.build();
StringablesRecord duplicate = StringablesRecord.newBuilder(original).build();
assertEquals(duplicate, original);
}
}
| 7,475 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/generic/TestDeepCopy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.generic;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.avro.Foo;
import org.apache.avro.Interop;
import org.apache.avro.Kind;
import org.apache.avro.MD5;
import org.apache.avro.Node;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.specific.SpecificData;
import org.junit.jupiter.api.Test;
import test.StringablesRecord;
/** Unit test for performing a deep copy of an object with a schema */
public class TestDeepCopy {
@Test
void deepCopy() {
// Set all non-default fields in an Interop instance:
Interop.Builder interopBuilder = Interop.newBuilder();
interopBuilder.setArrayField(Arrays.asList(1.1, 1.2, 1.3, 1.4));
interopBuilder.setBoolField(true);
interopBuilder.setBytesField(ByteBuffer.wrap(new byte[] { 1, 2, 3, 4 }));
interopBuilder.setDoubleField(3.14d);
interopBuilder.setEnumField(Kind.B);
interopBuilder.setFixedField(new MD5(new byte[] { 4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1, 4, 3, 2, 1 }));
interopBuilder.setFloatField(6.022f);
interopBuilder.setIntField(32);
interopBuilder.setLongField(64L);
Map<java.lang.String, org.apache.avro.Foo> map = new HashMap<>(1);
map.put("foo", Foo.newBuilder().setLabel("bar").build());
interopBuilder.setMapField(map);
interopBuilder.setNullField(null);
Node.Builder rootBuilder = Node.newBuilder().setLabel("/");
Node.Builder homeBuilder = Node.newBuilder().setLabel("home");
homeBuilder.setChildren(new ArrayList<>(0));
rootBuilder.setChildren(Collections.singletonList(homeBuilder.build()));
interopBuilder.setRecordField(rootBuilder.build());
interopBuilder.setStringField("Hello");
interopBuilder.setUnionField(Collections.singletonList(ByteBuffer.wrap(new byte[] { 1, 2 })));
Interop interop = interopBuilder.build();
// Verify that deepCopy works for all fields:
for (Field field : Interop.SCHEMA$.getFields()) {
// Original field and deep copy should be equivalent:
if (interop.get(field.pos()) instanceof ByteBuffer) {
assertTrue(Arrays.equals(((ByteBuffer) interop.get(field.pos())).array(),
((ByteBuffer) GenericData.get().deepCopy(field.schema(), interop.get(field.pos()))).array()));
} else {
assertEquals(interop.get(field.pos()), SpecificData.get().deepCopy(field.schema(), interop.get(field.pos())));
}
// Original field and deep copy should be different instances:
if ((field.schema().getType() != Type.ENUM) && (field.schema().getType() != Type.NULL)
&& (field.schema().getType() != Type.BOOLEAN) && (field.schema().getType() != Type.INT)
&& (field.schema().getType() != Type.LONG) && (field.schema().getType() != Type.FLOAT)
&& (field.schema().getType() != Type.DOUBLE) && (field.schema().getType() != Type.STRING)) {
assertFalse(interop.get(field.pos()) == GenericData.get().deepCopy(field.schema(), interop.get(field.pos())),
"Field " + field.name() + " is same instance in deep copy");
}
}
}
@Test
void javaClassDeepCopy() {
// Test java-class deep copy. See AVRO-2438
StringablesRecord.Builder builder = StringablesRecord.newBuilder();
builder.setValue(new BigDecimal("1314.11"));
HashMap<String, BigDecimal> mapWithBigDecimalElements = new HashMap<>();
mapWithBigDecimalElements.put("testElement", new BigDecimal("220.11"));
builder.setMapWithBigDecimalElements(mapWithBigDecimalElements);
HashMap<BigInteger, String> mapWithBigIntKeys = new HashMap<>();
mapWithBigIntKeys.put(BigInteger.ONE, "testKey");
builder.setMapWithBigIntKeys(mapWithBigIntKeys);
StringablesRecord javaClassString = builder.build();
for (Field field : StringablesRecord.SCHEMA$.getFields()) {
assertEquals(javaClassString.get(field.pos()),
SpecificData.get().deepCopy(field.schema(), javaClassString.get(field.pos())));
}
}
}
| 7,476 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestLocalTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.Protocol;
import org.apache.avro.Protocol.Message;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.apache.avro.ipc.generic.GenericResponder;
import org.apache.avro.util.Utf8;
import org.junit.jupiter.api.Test;
public class TestLocalTransceiver {
Protocol protocol = Protocol.parse("" + "{\"protocol\": \"Minimal\", " + "\"messages\": { \"m\": {"
+ " \"request\": [{\"name\": \"x\", \"type\": \"string\"}], " + " \"response\": \"string\"} } }");
static class TestResponder extends GenericResponder {
public TestResponder(Protocol local) {
super(local);
}
@Override
public Object respond(Message message, Object request) throws AvroRemoteException {
assertEquals(new Utf8("hello"), ((GenericRecord) request).get("x"));
return new Utf8("there");
}
}
@Test
void singleRpc() throws Exception {
Transceiver t = new LocalTransceiver(new TestResponder(protocol));
GenericRecord params = new GenericData.Record(protocol.getMessages().get("m").getRequest());
params.put("x", new Utf8("hello"));
GenericRequestor r = new GenericRequestor(protocol, t);
for (int x = 0; x < 5; x++)
assertEquals(new Utf8("there"), r.request("m", params));
}
}
| 7,477 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslDigestMd5.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.avro.ipc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.net.InetSocketAddress;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.AuthorizeCallback;
import javax.security.sasl.RealmCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.apache.avro.TestProtocolGeneric;
import org.apache.avro.util.Utf8;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class TestSaslDigestMd5 extends TestProtocolGeneric {
private static final Logger LOG = LoggerFactory.getLogger(TestSaslDigestMd5.class);
private static final String HOST = "localhost";
private static final String SERVICE = "avro-test";
private static final String PRINCIPAL = "avro-test-principal";
private static final String PASSWORD = "super secret password";
private static final String REALM = "avro-test-realm";
private static final String DIGEST_MD5_MECHANISM = "DIGEST-MD5";
private static final Map<String, String> DIGEST_MD5_PROPS = new HashMap<>();
static {
DIGEST_MD5_PROPS.put(Sasl.QOP, "auth-int");
if (System.getProperty("java.vendor").contains("IBM")) {
DIGEST_MD5_PROPS.put("com.ibm.security.sasl.digest.realm", REALM);
} else {
DIGEST_MD5_PROPS.put("com.sun.security.sasl.digest.realm", REALM);
}
}
private static class TestSaslCallbackHandler implements CallbackHandler {
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (Callback c : callbacks) {
if (c instanceof NameCallback) {
((NameCallback) c).setName(PRINCIPAL);
} else if (c instanceof PasswordCallback) {
((PasswordCallback) c).setPassword(PASSWORD.toCharArray());
} else if (c instanceof AuthorizeCallback) {
((AuthorizeCallback) c).setAuthorized(true);
} else if (c instanceof RealmCallback) {
((RealmCallback) c).setText(REALM);
} else {
throw new UnsupportedCallbackException(c);
}
}
}
}
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SaslSocketServer(new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM, SERVICE, HOST,
DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
server.start();
SaslClient saslClient = Sasl.createSaslClient(new String[] { DIGEST_MD5_MECHANISM }, PRINCIPAL, SERVICE, HOST,
DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
client = new SaslSocketTransceiver(new InetSocketAddress(server.getPort()), saslClient);
requestor = new GenericRequestor(PROTOCOL, client);
}
@Test
void anonymousClient() throws Exception {
assertThrows(SaslException.class, () -> {
Server s = new SaslSocketServer(new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM, SERVICE,
HOST, DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
s.start();
Transceiver c = new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
params.put("greeting", "bob");
Utf8 response = (Utf8) requestor.request("hello", params);
assertEquals(new Utf8("goodbye"), response);
s.close();
c.close();
});
}
private static class WrongPasswordCallbackHandler implements CallbackHandler {
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
for (Callback c : callbacks) {
if (c instanceof NameCallback) {
((NameCallback) c).setName(PRINCIPAL);
} else if (c instanceof PasswordCallback) {
((PasswordCallback) c).setPassword("wrong".toCharArray());
} else if (c instanceof AuthorizeCallback) {
((AuthorizeCallback) c).setAuthorized(true);
} else if (c instanceof RealmCallback) {
((RealmCallback) c).setText(REALM);
} else {
throw new UnsupportedCallbackException(c);
}
}
}
}
@Test
void wrongPassword() throws Exception {
assertThrows(SaslException.class, () -> {
Server s = new SaslSocketServer(new TestResponder(), new InetSocketAddress(0), DIGEST_MD5_MECHANISM, SERVICE,
HOST, DIGEST_MD5_PROPS, new TestSaslCallbackHandler());
s.start();
SaslClient saslClient = Sasl.createSaslClient(new String[] { DIGEST_MD5_MECHANISM }, PRINCIPAL, SERVICE, HOST,
DIGEST_MD5_PROPS, new WrongPasswordCallbackHandler());
Transceiver c = new SaslSocketTransceiver(new InetSocketAddress(server.getPort()), saslClient);
GenericRequestor requestor = new GenericRequestor(PROTOCOL, c);
GenericRecord params = new GenericData.Record(PROTOCOL.getMessages().get("hello").getRequest());
params.put("greeting", "bob");
Utf8 response = (Utf8) requestor.request("hello", params);
assertEquals(new Utf8("goodbye"), response);
s.close();
c.close();
});
}
@Override
public void handshake() throws IOException {
}
@Override
public void responseChange() throws IOException {
}
}
| 7,478 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestSaslAnonymous.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.avro.ipc.generic.GenericRequestor;
import org.apache.avro.TestProtocolGeneric;
import org.apache.avro.ipc.reflect.ReflectRequestor;
import org.apache.avro.ipc.reflect.ReflectResponder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
public class TestSaslAnonymous extends TestProtocolGeneric {
private static final Logger LOG = LoggerFactory.getLogger(TestSaslAnonymous.class);
@BeforeEach
public void testStartServer() throws Exception {
if (server != null)
return;
server = new SaslSocketServer(new TestResponder(), new InetSocketAddress(0));
server.start();
client = new SaslSocketTransceiver(new InetSocketAddress(server.getPort()));
requestor = new GenericRequestor(PROTOCOL, client);
}
@Override
public void handshake() throws IOException {
}
@Override
public void responseChange() throws IOException {
}
public interface ProtoInterface {
byte[] test(byte[] b);
}
// test big enough to fill socket output buffer
@Test
void test64kRequest() throws Exception {
SaslSocketServer s = new SaslSocketServer(new ReflectResponder(ProtoInterface.class, (ProtoInterface) b -> b),
new InetSocketAddress(0));
s.start();
SaslSocketTransceiver client = new SaslSocketTransceiver(new InetSocketAddress(s.getPort()));
ProtoInterface proxy = ReflectRequestor.getClient(ProtoInterface.class, client);
proxy.test(new byte[64 * 1024]);
client.close();
s.close();
}
}
| 7,479 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/TestRpcPluginOrdering.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.avro.ipc.specific.SpecificRequestor;
import org.apache.avro.ipc.specific.SpecificResponder;
import org.apache.avro.test.Mail;
import org.apache.avro.test.Message;
import org.junit.jupiter.api.Test;
public class TestRpcPluginOrdering {
private static AtomicInteger orderCounter = new AtomicInteger();
public class OrderPlugin extends RPCPlugin {
public void clientStartConnect(RPCContext context) {
assertEquals(0, orderCounter.getAndIncrement());
}
public void clientSendRequest(RPCContext context) {
assertEquals(1, orderCounter.getAndIncrement());
}
public void clientReceiveResponse(RPCContext context) {
assertEquals(6, orderCounter.getAndIncrement());
}
public void clientFinishConnect(RPCContext context) {
assertEquals(5, orderCounter.getAndIncrement());
}
public void serverConnecting(RPCContext context) {
assertEquals(2, orderCounter.getAndIncrement());
}
public void serverReceiveRequest(RPCContext context) {
assertEquals(3, orderCounter.getAndIncrement());
}
public void serverSendResponse(RPCContext context) {
assertEquals(4, orderCounter.getAndIncrement());
}
}
@Test
void rpcPluginOrdering() throws Exception {
OrderPlugin plugin = new OrderPlugin();
SpecificResponder responder = new SpecificResponder(Mail.class, new TestMailImpl());
SpecificRequestor requestor = new SpecificRequestor(Mail.class, new LocalTransceiver(responder));
responder.addRPCPlugin(plugin);
requestor.addRPCPlugin(plugin);
Mail client = SpecificRequestor.getClient(Mail.class, requestor);
Message message = createTestMessage();
client.send(message);
}
private Message createTestMessage() {
Message message = Message.newBuilder().setTo("me@test.com").setFrom("you@test.com").setBody("plugin testing")
.build();
return message;
}
private static class TestMailImpl implements Mail {
public String send(Message message) {
return "Received";
}
public void fireandforget(Message message) {
}
}
}
| 7,480 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/specific/TestSpecificRequestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.specific;
import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.net.URL;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Protocol;
import org.apache.avro.ipc.HttpTransceiver;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
public class TestSpecificRequestor {
public interface SampleSpecificProtocol {
public static final Protocol PROTOCOL = Protocol.parse(
"{\"protocol\":\"SampleSpecificProtocol\",\"namespace\":\"org.apache.avro.ipc.specific\",\"types\":[],\"messages\":{}}");
}
static Object proxy;
@BeforeAll
public static void initializeProxy() throws Exception {
HttpTransceiver client = new HttpTransceiver(new URL("http://localhost"));
SpecificRequestor requestor = new SpecificRequestor(SampleSpecificProtocol.class, client);
proxy = SpecificRequestor.getClient(SampleSpecificProtocol.class, requestor);
}
@Test
void testHashCode() throws IOException {
try {
proxy.hashCode();
} catch (AvroRuntimeException e) {
fail(e.getMessage());
}
}
@Test
void testEquals() throws IOException {
try {
proxy.equals(proxy);
} catch (AvroRuntimeException e) {
fail(e.getMessage());
}
}
@Test
void testToString() throws IOException {
try {
proxy.toString();
} catch (AvroRuntimeException e) {
fail(e.getMessage());
}
}
}
| 7,481 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestHistogram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import static org.junit.jupiter.api.Assertions.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.TreeSet;
import org.apache.avro.ipc.stats.Histogram.Entry;
import org.apache.avro.ipc.stats.Histogram.Segmenter;
import org.junit.jupiter.api.Test;
public class TestHistogram {
@Test
void basicOperation() {
Segmenter<String, Integer> s = new Histogram.TreeMapSegmenter<>(new TreeSet<>(Arrays.asList(0, 1, 2, 4, 8, 16)));
Histogram<String, Integer> h = new Histogram<>(s);
for (int i = 0; i < 20; ++i) {
h.add(i);
}
assertEquals(20, h.getCount());
assertArrayEquals(new int[] { 1, 1, 2, 4, 8, 4 }, h.getHistogram());
assertEquals("[0,1)=1;[1,2)=1;[2,4)=2;[4,8)=4;[8,16)=8;[16,infinity)=4", h.toString());
String[] correctBucketLabels = { "[0,1)", "[1,2)", "[2,4)", "[4,8)", "[8,16)", "[16,infinity)" };
// test bucket iterator
int pos = 0;
Iterator<String> it = h.getSegmenter().getBuckets();
while (it.hasNext()) {
assertEquals(correctBucketLabels[pos], it.next());
pos = pos + 1;
}
assertEquals(correctBucketLabels.length, pos);
List<String> labels = h.getSegmenter().getBucketLabels();
assertEquals(correctBucketLabels.length, labels.size());
if (labels.size() == correctBucketLabels.length) {
for (int i = 0; i < labels.size(); i++) {
assertEquals(correctBucketLabels[i], labels.get(i));
}
}
String[] correctBoundryLabels = { "0", "1", "2", "4", "8", "16" };
List<String> boundryLabels = h.getSegmenter().getBoundaryLabels();
assertEquals(correctBoundryLabels.length, boundryLabels.size());
if (boundryLabels.size() == correctBoundryLabels.length) {
for (int i = 0; i < boundryLabels.size(); i++) {
assertEquals(correctBoundryLabels[i], boundryLabels.get(i));
}
}
List<Entry<String>> entries = new ArrayList<>();
for (Entry<String> entry : h.entries()) {
entries.add(entry);
}
assertEquals("[0,1)", entries.get(0).bucket);
assertEquals(4, entries.get(5).count);
assertEquals(6, entries.size());
h.add(1010);
h.add(9191);
List<Integer> recent = h.getRecentAdditions();
assertTrue(recent.contains(1010));
assertTrue(recent.contains(9191));
}
@Test
void badValue() {
assertThrows(Histogram.SegmenterException.class, () -> {
Segmenter<String, Long> s = new Histogram.TreeMapSegmenter<>(
new TreeSet<>(Arrays.asList(0L, 1L, 2L, 4L, 8L, 16L)));
Histogram<String, Long> h = new Histogram<>(s);
h.add(-1L);
});
}
/** Only has one bucket */
static class SingleBucketSegmenter implements Segmenter<String, Float> {
@Override
public Iterator<String> getBuckets() {
return Collections.singletonList("X").iterator();
}
public List<String> getBoundaryLabels() {
return Collections.singletonList("X");
}
public List<String> getBucketLabels() {
return Collections.singletonList("X");
}
@Override
public int segment(Float value) {
return 0;
}
@Override
public int size() {
return 1;
}
}
@Test
void floatHistogram() {
FloatHistogram<String> h = new FloatHistogram<>(new SingleBucketSegmenter());
h.add(12.0f);
h.add(10.0f);
h.add(20.0f);
assertEquals(3, h.getCount());
assertEquals(14.0f, h.getMean(), 0.0001);
assertEquals(5.291f, h.getUnbiasedStdDev(), 0.001);
}
}
| 7,482 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/TestStopwatch.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
public class TestStopwatch {
@Test
void normal() {
FakeTicks f = new FakeTicks();
Stopwatch s = new Stopwatch(f);
f.passTime(10);
s.start();
f.passTime(20);
assertEquals(20, s.elapsedNanos());
f.passTime(40);
s.stop();
f.passTime(80);
assertEquals(60, s.elapsedNanos());
}
@Test
void notStarted1() {
assertThrows(IllegalStateException.class, () -> {
FakeTicks f = new FakeTicks();
Stopwatch s = new Stopwatch(f);
s.elapsedNanos();
});
}
@Test
void notStarted2() {
assertThrows(IllegalStateException.class, () -> {
FakeTicks f = new FakeTicks();
Stopwatch s = new Stopwatch(f);
s.stop();
});
}
@Test
void twiceStarted() {
assertThrows(IllegalStateException.class, () -> {
FakeTicks f = new FakeTicks();
Stopwatch s = new Stopwatch(f);
s.start();
s.start();
});
}
@Test
void twiceStopped() {
assertThrows(IllegalStateException.class, () -> {
FakeTicks f = new FakeTicks();
Stopwatch s = new Stopwatch(f);
s.start();
s.stop();
s.stop();
});
}
@Test
void systemStopwatch() {
Stopwatch s = new Stopwatch(Stopwatch.SYSTEM_TICKS);
s.start();
s.stop();
assertTrue(s.elapsedNanos() >= 0);
}
}
| 7,483 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/ipc/stats/FakeTicks.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc.stats;
import org.apache.avro.ipc.stats.Stopwatch.Ticks;
/** Implements Ticks with manual time-winding. */
public class FakeTicks implements Ticks {
long time = 0;
@Override
public long ticks() {
return time;
}
public void passTime(long nanos) {
time += nanos;
}
}
| 7,484 |
0 | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/compiler | Create_ds/avro/lang/java/ipc/src/test/java/org/apache/avro/compiler/specific/TestSpecificCompiler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.compiler.specific;
import static org.junit.jupiter.api.Assertions.*;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import javax.tools.JavaCompiler;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import javax.tools.JavaCompiler.CompilationTask;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
import org.apache.avro.Schema.Type;
import org.apache.avro.TestProtocolParsing;
import org.apache.avro.TestSchema;
import org.apache.avro.TestAnnotation;
import org.apache.avro.generic.GenericData.StringType;
import org.apache.avro.test.Simple;
import org.apache.avro.test.TestRecord;
import org.apache.avro.test.MD5;
import org.apache.avro.test.Kind;
import org.apache.avro.compiler.specific.SpecificCompiler.OutputFile;
import org.junit.Rule;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.io.TempDir;
import org.junit.rules.TestName;
public class TestSpecificCompiler {
@TempDir
public File INPUT_DIR;
@TempDir
public File OUTPUT_DIR;
static final String PROTOCOL = "" + "{ \"protocol\": \"default\",\n" + " \"types\":\n" + " [\n" + " {\n"
+ " \"name\": \"finally\",\n" + " \"type\": \"error\",\n"
+ " \"fields\": [{\"name\": \"catch\", \"type\": \"boolean\"}]\n" + " }\n" + " ],\n"
+ " \"messages\": { \"goto\":\n" + " { \"request\": [{\"name\": \"break\", \"type\": \"string\"}],\n"
+ " \"response\": \"string\",\n" + " \"errors\": [\"finally\"]\n" + " }" + " }\n" + "}\n";
@Test
void esc() {
assertEquals("\\\"", SpecificCompiler.javaEscape("\""));
}
@Test
void makePath() {
SpecificCompiler compiler = new SpecificCompiler();
assertEquals("foo/bar/Baz.java".replace("/", File.separator), compiler.makePath("Baz", "foo.bar"));
assertEquals("baz.java", compiler.makePath("baz", ""));
}
@Test
void primitiveSchemaGeneratesNothing() {
assertEquals(0, new SpecificCompiler(new Schema.Parser().parse("\"double\"")).compile().size());
}
@Test
void simpleEnumSchema(TestInfo testInfo) throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(TestSchema.BASIC_ENUM_SCHEMA))
.compile();
assertEquals(1, outputs.size());
OutputFile o = outputs.iterator().next();
assertEquals(o.path, "Test.java");
assertTrue(o.contents.contains("public enum Test"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
@Test
void mangleIfReserved() {
assertEquals("foo", SpecificCompiler.mangle("foo"));
assertEquals("goto$", SpecificCompiler.mangle("goto"));
}
@Test
void manglingForProtocols(TestInfo testInfo) throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(Protocol.parse(PROTOCOL)).compile();
Iterator<OutputFile> i = outputs.iterator();
String errType = i.next().contents;
String protocol = i.next().contents;
assertTrue(errType.contains("public class finally$ extends org.apache.avro.specific.SpecificExceptionBase"));
assertTrue(errType.contains("private boolean catch$;"));
assertTrue(protocol.contains("java.lang.CharSequence goto$(java.lang.CharSequence break$)"));
assertTrue(protocol.contains("public interface default$"));
assertTrue(protocol.contains(" finally$"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
private static String SCHEMA = "{ \"name\": \"volatile\", \"type\": \"record\", "
+ " \"fields\": [ {\"name\": \"package\", \"type\": \"string\" },"
+ " {\"name\": \"data\", \"type\": \"int\" },"
+ " {\"name\": \"value\", \"type\": \"int\" },"
+ " {\"name\": \"defaultValue\", \"type\": \"int\" },"
+ " {\"name\": \"other\", \"type\": \"int\" },"
+ " {\"name\": \"short\", \"type\": \"volatile\" } ] }";
@Test
void manglingForRecords(TestInfo testInfo) throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(SCHEMA)).compile();
assertEquals(1, outputs.size());
String contents = outputs.iterator().next().contents;
assertTrue(contents.contains("private java.lang.CharSequence package$;"));
assertTrue(contents.contains("class volatile$ extends"));
assertTrue(contents.contains("volatile$ short$;"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
@Test
void manglingForEnums(TestInfo testInfo) throws IOException {
String enumSchema = "" + "{ \"name\": \"instanceof\", \"type\": \"enum\","
+ " \"symbols\": [\"new\", \"super\", \"switch\"] }";
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(enumSchema)).compile();
assertEquals(1, outputs.size());
String contents = outputs.iterator().next().contents;
assertTrue(contents.contains("new$"));
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), outputs);
}
@Test
void schemaSplit(TestInfo testInfo) throws IOException {
SpecificCompiler compiler = new SpecificCompiler(new Schema.Parser().parse(SCHEMA));
compiler.maxStringChars = 10;
Collection<OutputFile> files = compiler.compile();
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), files);
}
@Test
void protocolSplit(TestInfo testInfo) throws IOException {
SpecificCompiler compiler = new SpecificCompiler(Protocol.parse(PROTOCOL));
compiler.maxStringChars = 10;
Collection<OutputFile> files = compiler.compile();
assertCompilesWithJavaCompiler(new File(INPUT_DIR, testInfo.getTestMethod().get().getName()), files);
}
@Test
void schemaWithDocs() {
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(TestSchema.SCHEMA_WITH_DOC_TAGS))
.compile();
assertEquals(3, outputs.size());
int count = 0;
for (OutputFile o : outputs) {
if (o.path.endsWith("outer_record.java")) {
count++;
assertTrue(o.contents.contains("/** This is not a world record. */"));
assertTrue(o.contents.contains("/** Inner Fixed */"));
assertTrue(o.contents.contains("/** Inner Enum */"));
assertTrue(o.contents.contains("/** Inner String */"));
}
if (o.path.endsWith("very_inner_fixed.java")) {
count++;
assertTrue(o.contents.contains("/** Very Inner Fixed */"));
assertTrue(o.contents.contains("@org.apache.avro.specific.FixedSize(1)"));
}
if (o.path.endsWith("very_inner_enum.java")) {
count++;
assertTrue(o.contents.contains("/** Very Inner Enum */"));
}
}
assertEquals(3, count);
}
@Test
void protocolWithDocs() throws IOException {
Protocol protocol = TestProtocolParsing.getSimpleProtocol();
Collection<OutputFile> out = new SpecificCompiler(protocol).compile();
assertEquals(6, out.size());
int count = 0;
for (OutputFile o : out) {
if (o.path.endsWith("Simple.java")) {
count++;
assertTrue(o.contents.contains("/** Protocol used for testing. */"));
assertTrue(o.contents.contains("* Send a greeting"));
}
}
assertEquals(1, count, "Missed generated protocol!");
}
@Test
void needCompile() throws IOException, InterruptedException {
String schema = "" + "{ \"name\": \"Foo\", \"type\": \"record\", "
+ " \"fields\": [ {\"name\": \"package\", \"type\": \"string\" },"
+ " {\"name\": \"short\", \"type\": \"Foo\" } ] }";
File inputFile = new File(INPUT_DIR.getPath(), "input.avsc");
try (FileWriter fw = new FileWriter(inputFile)) {
fw.write(schema);
}
File outputDir = OUTPUT_DIR;
File outputFile = new File(outputDir, "Foo.java");
outputFile.delete();
assertFalse(outputFile.exists());
outputDir.delete();
assertFalse(outputDir.exists());
SpecificCompiler.compileSchema(inputFile, outputDir);
assertTrue(outputDir.exists());
assertTrue(outputFile.exists());
long lastModified = outputFile.lastModified();
Thread.sleep(1000); // granularity of JVM doesn't seem to go below 1 sec
SpecificCompiler.compileSchema(inputFile, outputDir);
assertEquals(lastModified, outputFile.lastModified());
try (FileWriter fw = new FileWriter(inputFile)) {
fw.write(schema);
}
SpecificCompiler.compileSchema(inputFile, outputDir);
assertTrue(lastModified != outputFile.lastModified());
}
/**
* Creates a record with the given name, error status, and fields.
*
* @param name the name of the schema.
* @param isError true if the schema represents an error; false otherwise.
* @param fields the field(s) to add to the schema.
* @return the schema.
*/
private Schema createRecord(String name, boolean isError, Field... fields) {
Schema record = Schema.createRecord(name, null, null, isError);
record.setFields(Arrays.asList(fields));
return record;
}
@Test
void generateGetMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, height), height));
assertEquals("getHeightAndWidth",
SpecificCompiler.generateGetMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("getMessage", SpecificCompiler.generateGetMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$", SpecificCompiler.generateGetMethod(createRecord("test", true, message), message));
assertEquals("getCause", SpecificCompiler.generateGetMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("getCause$", SpecificCompiler.generateGetMethod(createRecord("test", true, cause), cause));
assertEquals("getClass$", SpecificCompiler.generateGetMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("getClass$", SpecificCompiler.generateGetMethod(createRecord("test", true, clasz), clasz));
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight", SpecificCompiler.generateGetMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("getHeight$0",
SpecificCompiler.generateGetMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$", SpecificCompiler.generateGetMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$",
SpecificCompiler.generateGetMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("getMessage$0",
SpecificCompiler.generateGetMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$", SpecificCompiler.generateGetMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$",
SpecificCompiler.generateGetMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("getSchema$0",
SpecificCompiler.generateGetMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateSetMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, height), height));
assertEquals("setHeightAndWidth",
SpecificCompiler.generateSetMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("setMessage", SpecificCompiler.generateSetMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$", SpecificCompiler.generateSetMethod(createRecord("test", true, message), message));
assertEquals("setCause", SpecificCompiler.generateSetMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("setCause$", SpecificCompiler.generateSetMethod(createRecord("test", true, cause), cause));
assertEquals("setClass$", SpecificCompiler.generateSetMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("setClass$", SpecificCompiler.generateSetMethod(createRecord("test", true, clasz), clasz));
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight", SpecificCompiler.generateSetMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("setHeight$0",
SpecificCompiler.generateSetMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$", SpecificCompiler.generateSetMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$",
SpecificCompiler.generateSetMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("setMessage$0",
SpecificCompiler.generateSetMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$", SpecificCompiler.generateSetMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$",
SpecificCompiler.generateSetMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("setSchema$0",
SpecificCompiler.generateSetMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateHasMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, height), height));
assertEquals("hasHeightAndWidth",
SpecificCompiler.generateHasMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("hasMessage", SpecificCompiler.generateHasMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(createRecord("test", true, message), message));
assertEquals("hasCause", SpecificCompiler.generateHasMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("hasCause$", SpecificCompiler.generateHasMethod(createRecord("test", true, cause), cause));
assertEquals("hasClass$", SpecificCompiler.generateHasMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("hasClass$", SpecificCompiler.generateHasMethod(createRecord("test", true, clasz), clasz));
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight", SpecificCompiler.generateHasMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("hasHeight$0",
SpecificCompiler.generateHasMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$", SpecificCompiler.generateHasMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$",
SpecificCompiler.generateHasMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("hasMessage$0",
SpecificCompiler.generateHasMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$", SpecificCompiler.generateHasMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$",
SpecificCompiler.generateHasMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("hasSchema$0",
SpecificCompiler.generateHasMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void generateClearMethod() {
Field height = new Field("height", Schema.create(Type.INT), null, null);
Field Height = new Field("Height", Schema.create(Type.INT), null, null);
Field height_and_width = new Field("height_and_width", Schema.create(Type.STRING), null, null);
Field message = new Field("message", Schema.create(Type.STRING), null, null);
Field Message = new Field("Message", Schema.create(Type.STRING), null, null);
Field cause = new Field("cause", Schema.create(Type.STRING), null, null);
Field clasz = new Field("class", Schema.create(Type.STRING), null, null);
Field schema = new Field("schema", Schema.create(Type.STRING), null, null);
Field Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearHeight", SpecificCompiler.generateClearMethod(createRecord("test", false, height), height));
assertEquals("clearHeightAndWidth",
SpecificCompiler.generateClearMethod(createRecord("test", false, height_and_width), height_and_width));
assertEquals("clearMessage", SpecificCompiler.generateClearMethod(createRecord("test", false, message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(createRecord("test", true, message), message));
assertEquals("clearCause", SpecificCompiler.generateClearMethod(createRecord("test", false, cause), cause));
cause = new Field("cause", Schema.create(Type.STRING), null, null);
assertEquals("clearCause$", SpecificCompiler.generateClearMethod(createRecord("test", true, cause), cause));
assertEquals("clearClass$", SpecificCompiler.generateClearMethod(createRecord("test", false, clasz), clasz));
clasz = new Field("class", Schema.create(Type.STRING), null, null);
assertEquals("clearClass$", SpecificCompiler.generateClearMethod(createRecord("test", true, clasz), clasz));
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", false, schema), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", true, schema), schema));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight", SpecificCompiler.generateClearMethod(createRecord("test", false, Height), Height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight",
SpecificCompiler.generateClearMethod(createRecord("test", false, height, Height), height));
height = new Field("height", Schema.create(Type.INT), null, null);
Height = new Field("Height", Schema.create(Type.INT), null, null);
assertEquals("clearHeight$0",
SpecificCompiler.generateClearMethod(createRecord("test", false, height, Height), Height));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$", SpecificCompiler.generateClearMethod(createRecord("test", true, Message), Message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$",
SpecificCompiler.generateClearMethod(createRecord("test", true, message, Message), message));
message = new Field("message", Schema.create(Type.STRING), null, null);
Message = new Field("Message", Schema.create(Type.STRING), null, null);
assertEquals("clearMessage$0",
SpecificCompiler.generateClearMethod(createRecord("test", true, message, Message), Message));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$", SpecificCompiler.generateClearMethod(createRecord("test", false, Schema$), Schema$));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$",
SpecificCompiler.generateClearMethod(createRecord("test", false, schema, Schema$), schema));
schema = new Field("schema", Schema.create(Type.STRING), null, null);
Schema$ = new Field("Schema", Schema.create(Type.STRING), null, null);
assertEquals("clearSchema$0",
SpecificCompiler.generateClearMethod(createRecord("test", false, schema, Schema$), Schema$));
}
@Test
void annotations() throws Exception {
// an interface generated for protocol
assertNotNull(Simple.class.getAnnotation(TestAnnotation.class));
// a class generated for a record
assertNotNull(TestRecord.class.getAnnotation(TestAnnotation.class));
// a class generated for a fixed
assertNotNull(MD5.class.getAnnotation(TestAnnotation.class));
// a class generated for an enum
assertNotNull(Kind.class.getAnnotation(TestAnnotation.class));
// a field
assertNotNull(TestRecord.class.getDeclaredField("name").getAnnotation(TestAnnotation.class));
// a method
assertNotNull(Simple.class.getMethod("ack").getAnnotation(TestAnnotation.class));
}
@Test
void aliases() throws IOException {
Schema s = new Schema.Parser().parse("{\"name\":\"X\",\"type\":\"record\",\"aliases\":[\"Y\"],\"fields\":["
+ "{\"name\":\"f\",\"type\":\"int\",\"aliases\":[\"g\"]}]}");
SpecificCompiler compiler = new SpecificCompiler(s);
compiler.setStringType(StringType.valueOf("String"));
Collection<OutputFile> outputs = compiler.compile();
assertEquals(1, outputs.size());
OutputFile o = outputs.iterator().next();
assertEquals(o.path, "X.java");
assertTrue(o.contents.contains("[\\\"Y\\\"]"));
assertTrue(o.contents.contains("[\\\"g\\\"]"));
}
/**
* Checks that a schema passes through the SpecificCompiler, and, optionally,
* uses the system's Java compiler to check that the generated code is valid.
*/
public static void assertCompiles(File dstDir, Schema schema, boolean useJavaCompiler) throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(schema).compile();
assertNotNull(outputs);
if (useJavaCompiler) {
assertCompilesWithJavaCompiler(dstDir, outputs);
}
}
/**
* Checks that a protocol passes through the SpecificCompiler, and, optionally,
* uses the system's Java compiler to check that the generated code is valid.
*/
public static void assertCompiles(File dstDir, Protocol protocol, boolean useJavaCompiler) throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(protocol).compile();
assertNotNull(outputs);
if (useJavaCompiler) {
assertCompilesWithJavaCompiler(dstDir, outputs);
}
}
/** Uses the system's java compiler to actually compile the generated code. */
static void assertCompilesWithJavaCompiler(File dstDir, Collection<OutputFile> outputs) throws IOException {
if (outputs.isEmpty()) {
return; // Nothing to compile!
}
List<File> javaFiles = new ArrayList<>();
for (OutputFile o : outputs) {
javaFiles.add(o.writeToDestination(null, dstDir));
}
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
CompilationTask cTask = compiler.getTask(null, fileManager, null, null, null,
fileManager.getJavaFileObjects(javaFiles.toArray(new File[0])));
assertTrue(cTask.call());
}
private static String SCHEMA1 = "{ \"name\": \"volatile\", \"type\": \"record\", "
+ " \"fields\": [{\"name\": \"ownerAddress\", \"type\": [\"null\",{ \"type\": \"string\",\"java-class\": \"java.net.URI\"}], \"default\": null},"
+ " {\"name\": \"ownerURL\", \"type\": [\"null\",{ \"type\": \"string\",\"java-class\": \"java.net.URL\"}], \"default\": null}]}";
@Test
void generateExceptionCodeBlock() throws IOException {
Collection<OutputFile> outputs = new SpecificCompiler(new Schema.Parser().parse(SCHEMA1)).compile();
assertEquals(1, outputs.size());
String contents = outputs.iterator().next().contents;
assertTrue(contents.contains("private java.net.URI"));
assertTrue(contents.contains("catch (java.net.URISyntaxException e)"));
assertTrue(contents.contains("private java.net.URL"));
assertTrue(contents.contains("catch (java.net.MalformedURLException e)"));
}
}
| 7,485 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.avro.Protocol.Message;
/**
* This class represents the context of an RPC call or RPC handshake. Designed
* to provide information to RPC plugin writers, this class encapsulates
* information about the rpc exchange, including handshake and call metadata.
* Note: this data includes full copies of the RPC payload, so plugins which
* store RPCContexts beyond the life of each call should be conscious of memory
* use.
*
*/
public class RPCContext {
private HandshakeRequest handshakeRequest;
private HandshakeResponse handshakeResponse;
protected Map<String, ByteBuffer> requestCallMeta, responseCallMeta;
protected Object response;
protected Exception error;
private Message message;
List<ByteBuffer> requestPayload;
List<ByteBuffer> responsePayload;
/** Set the handshake request of this RPC. */
public void setHandshakeRequest(HandshakeRequest handshakeRequest) {
this.handshakeRequest = handshakeRequest;
}
/** Get the handshake request of this RPC. */
public HandshakeRequest getHandshakeRequest() {
return this.handshakeRequest;
}
/** Set the handshake response of this RPC. */
public void setHandshakeResponse(HandshakeResponse handshakeResponse) {
this.handshakeResponse = handshakeResponse;
}
/** Get the handshake response of this RPC. */
public HandshakeResponse getHandshakeResponse() {
return this.handshakeResponse;
}
/**
* This is an access method for the handshake state provided by the client to
* the server.
*
* @return a map representing handshake state from the client to the server
*/
public Map<String, ByteBuffer> requestHandshakeMeta() {
if (handshakeRequest.getMeta() == null)
handshakeRequest.setMeta(new HashMap<>());
return handshakeRequest.getMeta();
}
void setRequestHandshakeMeta(Map<String, ByteBuffer> newmeta) {
handshakeRequest.setMeta(newmeta);
}
/**
* This is an access method for the handshake state provided by the server back
* to the client
*
* @return a map representing handshake state from the server to the client
*/
public Map<String, ByteBuffer> responseHandshakeMeta() {
if (handshakeResponse.getMeta() == null)
handshakeResponse.setMeta(new HashMap<>());
return handshakeResponse.getMeta();
}
void setResponseHandshakeMeta(Map<String, ByteBuffer> newmeta) {
handshakeResponse.setMeta(newmeta);
}
/**
* This is an access method for the per-call state provided by the client to the
* server.
*
* @return a map representing per-call state from the client to the server
*/
public Map<String, ByteBuffer> requestCallMeta() {
if (requestCallMeta == null) {
requestCallMeta = new HashMap<>();
}
return requestCallMeta;
}
void setRequestCallMeta(Map<String, ByteBuffer> newmeta) {
requestCallMeta = newmeta;
}
/**
* This is an access method for the per-call state provided by the server back
* to the client.
*
* @return a map representing per-call state from the server to the client
*/
public Map<String, ByteBuffer> responseCallMeta() {
if (responseCallMeta == null) {
responseCallMeta = new HashMap<>();
}
return responseCallMeta;
}
void setResponseCallMeta(Map<String, ByteBuffer> newmeta) {
responseCallMeta = newmeta;
}
void setResponse(Object response) {
this.response = response;
this.error = null;
}
/**
* The response object generated at the server, if it exists. If an exception
* was generated, this will be null.
*
* @return the response created by this RPC, no null if an exception was
* generated
*/
public Object response() {
return response;
}
void setError(Exception error) {
this.response = null;
this.error = error;
}
/**
* The exception generated at the server, or null if no such exception has
* occurred
*
* @return the exception generated at the server, or null if no such exception
*/
public Exception error() {
return error;
}
/**
* Indicates whether an exception was generated at the server
*
* @return true is an exception was generated at the server, or false if not
*/
public boolean isError() {
return error != null;
}
/** Sets the {@link Message} corresponding to this RPC */
public void setMessage(Message message) {
this.message = message;
}
/**
* Returns the {@link Message} corresponding to this RPC
*
* @return this RPC's {@link Message}
*/
public Message getMessage() {
return message;
}
/**
* Sets the serialized payload of the request in this RPC. Will not include
* handshake or meta-data.
*/
public void setRequestPayload(List<ByteBuffer> payload) {
this.requestPayload = payload;
}
/**
* Returns the serialized payload of the request in this RPC. Will only be
* generated from a Requestor and will not include handshake or meta-data. If
* the request payload has not been set yet, returns null.
*
* @return this RPC's request payload.
*/
public List<ByteBuffer> getRequestPayload() {
return this.requestPayload;
}
/**
* Returns the serialized payload of the response in this RPC. Will only be
* generated from a Responder and will not include handshake or meta-data. If
* the response payload has not been set yet, returns null.
*
* @return this RPC's response payload.
*/
public List<ByteBuffer> getResponsePayload() {
return this.responsePayload;
}
/**
* Sets the serialized payload of the response in this RPC. Will not include
* handshake or meta-data.
*/
public void setResponsePayload(List<ByteBuffer> payload) {
this.responsePayload = payload;
}
}
| 7,486 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.DatagramChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A datagram-based server implementation. This uses a simple, non-standard wire
* protocol and is not intended for production services.
*/
public class DatagramServer extends Thread implements Server {
private static final Logger LOG = LoggerFactory.getLogger(DatagramServer.class);
private final Responder responder;
private final DatagramChannel channel;
private final Transceiver transceiver;
public DatagramServer(Responder responder, SocketAddress addr) throws IOException {
String name = "DatagramServer on " + addr;
this.responder = responder;
this.channel = DatagramChannel.open();
channel.socket().bind(addr);
this.transceiver = new DatagramTransceiver(channel);
setName(name);
setDaemon(true);
}
@Override
public int getPort() {
return channel.socket().getLocalPort();
}
@Override
public void run() {
while (true) {
try {
transceiver.writeBuffers(responder.respond(transceiver.readBuffers()));
} catch (ClosedChannelException e) {
return;
} catch (IOException e) {
LOG.warn("unexpected error", e);
throw new RuntimeException(e);
}
}
}
@Override
public void close() {
this.interrupt();
}
public static void main(String[] arg) throws Exception {
DatagramServer server = new DatagramServer(null, new InetSocketAddress(0));
server.start();
System.out.println("started");
server.join();
}
}
| 7,487 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/HttpTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.EOFException;
import java.net.Proxy;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.net.URL;
import java.net.HttpURLConnection;
/** An HTTP-based {@link Transceiver} implementation. */
public class HttpTransceiver extends Transceiver {
static final String CONTENT_TYPE = "avro/binary";
private URL url;
private Proxy proxy;
private HttpURLConnection connection;
private int timeout;
public HttpTransceiver(URL url) {
this.url = url;
}
public HttpTransceiver(URL url, Proxy proxy) {
this(url);
this.proxy = proxy;
}
/** Set the connect and read timeouts, in milliseconds. */
public void setTimeout(int timeout) {
this.timeout = timeout;
}
@Override
public String getRemoteName() {
return this.url.toString();
}
@Override
public synchronized List<ByteBuffer> readBuffers() throws IOException {
try (InputStream in = connection.getInputStream()) {
return readBuffers(in);
}
}
@Override
public synchronized void writeBuffers(List<ByteBuffer> buffers) throws IOException {
if (proxy == null)
connection = (HttpURLConnection) url.openConnection();
else
connection = (HttpURLConnection) url.openConnection(proxy);
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", CONTENT_TYPE);
connection.setRequestProperty("Content-Length", Integer.toString(getLength(buffers)));
connection.setDoOutput(true);
connection.setReadTimeout(timeout);
connection.setConnectTimeout(timeout);
try (OutputStream out = connection.getOutputStream()) {
writeBuffers(buffers, out);
}
}
static int getLength(List<ByteBuffer> buffers) {
int length = 0;
for (ByteBuffer buffer : buffers) {
length += 4;
length += buffer.remaining();
}
length += 4;
return length;
}
static List<ByteBuffer> readBuffers(InputStream in) throws IOException {
List<ByteBuffer> buffers = new ArrayList<>();
while (true) {
int length = (in.read() << 24) + (in.read() << 16) + (in.read() << 8) + in.read();
if (length == 0) { // end of buffers
return buffers;
}
ByteBuffer buffer = ByteBuffer.allocate(length);
while (buffer.hasRemaining()) {
int p = buffer.position();
int i = in.read(buffer.array(), p, buffer.remaining());
if (i < 0)
throw new EOFException("Unexpected EOF");
((Buffer) buffer).position(p + i);
}
((Buffer) buffer).flip();
buffers.add(buffer);
}
}
static void writeBuffers(List<ByteBuffer> buffers, OutputStream out) throws IOException {
for (ByteBuffer buffer : buffers) {
writeLength(buffer.limit(), out); // length-prefix
out.write(buffer.array(), buffer.position(), buffer.remaining());
((Buffer) buffer).position(buffer.limit());
}
writeLength(0, out); // null-terminate
}
private static void writeLength(int length, OutputStream out) throws IOException {
out.write(0xff & (length >>> 24));
out.write(0xff & (length >>> 16));
out.write(0xff & (length >>> 8));
out.write(0xff & length);
}
}
| 7,488 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.util.Map;
import java.net.SocketAddress;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslException;
import javax.security.auth.callback.CallbackHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link Server} that uses {@link javax.security.sasl} for authentication and
* encryption.
*/
public class SaslSocketServer extends SocketServer {
private static final Logger LOG = LoggerFactory.getLogger(SaslServer.class);
private static abstract class SaslServerFactory {
protected abstract SaslServer getServer() throws SaslException;
}
private SaslServerFactory factory;
/**
* Create using SASL's anonymous
* (<a href="https://www.ietf.org/rfc/rfc2245.txt">RFC 2245) mechanism.
*/
public SaslSocketServer(Responder responder, SocketAddress addr) throws IOException {
this(responder, addr, new SaslServerFactory() {
@Override
public SaslServer getServer() {
return new AnonymousServer();
}
});
}
/** Create using the specified {@link SaslServer} parameters. */
public SaslSocketServer(Responder responder, SocketAddress addr, final String mechanism, final String protocol,
final String serverName, final Map<String, ?> props, final CallbackHandler cbh) throws IOException {
this(responder, addr, new SaslServerFactory() {
@Override
public SaslServer getServer() throws SaslException {
return Sasl.createSaslServer(mechanism, protocol, serverName, props, cbh);
}
});
}
private SaslSocketServer(Responder responder, SocketAddress addr, SaslServerFactory factory) throws IOException {
super(responder, addr);
this.factory = factory;
}
@Override
protected Transceiver getTransceiver(SocketChannel channel) throws IOException {
return new SaslSocketTransceiver(channel, factory.getServer());
}
private static class AnonymousServer implements SaslServer {
private String user;
@Override
public String getMechanismName() {
return "ANONYMOUS";
}
@Override
public byte[] evaluateResponse(byte[] response) throws SaslException {
this.user = new String(response, StandardCharsets.UTF_8);
return null;
}
@Override
public boolean isComplete() {
return user != null;
}
@Override
public String getAuthorizationID() {
return user;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public Object getNegotiatedProperty(String propName) {
return null;
}
@Override
public void dispose() {
}
}
}
| 7,489 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.util.List;
import java.util.ArrayList;
import java.io.IOException;
import java.net.SocketAddress;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.channels.DatagramChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A datagram-based {@link Transceiver} implementation. This uses a simple,
* non-standard wire protocol and is not intended for production services.
*/
public class DatagramTransceiver extends Transceiver {
private static final Logger LOG = LoggerFactory.getLogger(DatagramTransceiver.class);
private static final int MAX_SIZE = 16 * 1024;
private DatagramChannel channel;
private SocketAddress remote;
private ByteBuffer buffer = ByteBuffer.allocate(MAX_SIZE);
@Override
public String getRemoteName() {
return remote.toString();
}
public DatagramTransceiver(SocketAddress remote) throws IOException {
this(DatagramChannel.open());
this.remote = remote;
}
public DatagramTransceiver(DatagramChannel channel) {
this.channel = channel;
}
@Override
public synchronized List<ByteBuffer> readBuffers() throws IOException {
((Buffer) buffer).clear();
remote = channel.receive(buffer);
LOG.info("received from " + remote);
((Buffer) buffer).flip();
List<ByteBuffer> buffers = new ArrayList<>();
while (true) {
int length = buffer.getInt();
if (length == 0) { // end of buffers
return buffers;
}
ByteBuffer chunk = buffer.slice(); // use data without copying
((Buffer) chunk).limit(length);
((Buffer) buffer).position(buffer.position() + length);
buffers.add(chunk);
}
}
@Override
public synchronized void writeBuffers(List<ByteBuffer> buffers) throws IOException {
((Buffer) buffer).clear();
for (ByteBuffer b : buffers) {
buffer.putInt(b.remaining());
buffer.put(b); // copy data. sigh.
}
buffer.putInt(0);
((Buffer) buffer).flip();
channel.send(buffer, remote);
LOG.info("sent to " + remote);
}
}
| 7,490 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Callback.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
/**
* Interface for receiving asynchronous callbacks. For each request with an
* asynchronous callback, either {@link #handleResult(Object)} or
* {@link #handleError(Throwable)} will be invoked.
*/
public interface Callback<T> {
/**
* Receives a callback result.
*
* @param result the result returned in the callback.
*/
void handleResult(T result);
/**
* Receives an error.
*
* @param error the error returned in the callback.
*/
void handleError(Throwable error);
}
| 7,491 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/LocalTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
/** Implementation of IPC that remains in process. */
public class LocalTransceiver extends Transceiver {
private Responder responder;
public LocalTransceiver(Responder responder) {
this.responder = responder;
}
@Override
public String getRemoteName() {
return "local";
}
@Override
public List<ByteBuffer> transceive(List<ByteBuffer> request) throws IOException {
return responder.respond(request);
}
@Override
public List<ByteBuffer> readBuffers() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void writeBuffers(List<ByteBuffer> buffers) throws IOException {
throw new UnsupportedOperationException();
}
}
| 7,492 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/SaslSocketTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.io.EOFException;
import java.net.SocketAddress;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslServer;
import org.apache.avro.Protocol;
import org.apache.avro.util.ByteBufferOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link Transceiver} that uses {@link javax.security.sasl} for
* authentication and encryption.
*/
public class SaslSocketTransceiver extends Transceiver {
private static final Logger LOG = LoggerFactory.getLogger(SaslSocketTransceiver.class);
private static final ByteBuffer EMPTY = ByteBuffer.allocate(0);
private static enum Status {
START, CONTINUE, FAIL, COMPLETE
}
private SaslParticipant sasl;
private SocketChannel channel;
private boolean dataIsWrapped;
private boolean saslResponsePiggybacked;
private Protocol remote;
private ByteBuffer readHeader = ByteBuffer.allocate(4);
private ByteBuffer writeHeader = ByteBuffer.allocate(4);
private ByteBuffer zeroHeader = ByteBuffer.allocate(4).putInt(0);
/**
* Create using SASL's anonymous
* (<a href="https://www.ietf.org/rfc/rfc2245.txt">RFC 2245) mechanism.
*/
public SaslSocketTransceiver(SocketAddress address) throws IOException {
this(address, new AnonymousClient());
}
/** Create using the specified {@link SaslClient}. */
public SaslSocketTransceiver(SocketAddress address, SaslClient saslClient) throws IOException {
this.sasl = new SaslParticipant(saslClient);
this.channel = SocketChannel.open(address);
this.channel.socket().setTcpNoDelay(true);
LOG.debug("open to {}", getRemoteName());
open(true);
}
/** Create using the specified {@link SaslServer}. */
public SaslSocketTransceiver(SocketChannel channel, SaslServer saslServer) throws IOException {
this.sasl = new SaslParticipant(saslServer);
this.channel = channel;
LOG.debug("open from {}", getRemoteName());
open(false);
}
@Override
public boolean isConnected() {
return remote != null;
}
@Override
public void setRemote(Protocol remote) {
this.remote = remote;
}
@Override
public Protocol getRemote() {
return remote;
}
@Override
public String getRemoteName() {
return channel.socket().getRemoteSocketAddress().toString();
}
@Override
public synchronized List<ByteBuffer> transceive(List<ByteBuffer> request) throws IOException {
if (saslResponsePiggybacked) { // still need to read response
saslResponsePiggybacked = false;
Status status = readStatus();
ByteBuffer frame = readFrame();
switch (status) {
case COMPLETE:
break;
case FAIL:
throw new SaslException("Fail: " + toString(frame));
default:
throw new IOException("Unexpected SASL status: " + status);
}
}
return super.transceive(request);
}
private void open(boolean isClient) throws IOException {
LOG.debug("beginning SASL negotiation");
if (isClient) {
ByteBuffer response = EMPTY;
if (sasl.client.hasInitialResponse())
response = ByteBuffer.wrap(sasl.evaluate(response.array()));
write(Status.START, sasl.getMechanismName(), response);
if (sasl.isComplete())
saslResponsePiggybacked = true;
}
while (!sasl.isComplete()) {
Status status = readStatus();
ByteBuffer frame = readFrame();
switch (status) {
case START:
String mechanism = toString(frame);
frame = readFrame();
if (!mechanism.equalsIgnoreCase(sasl.getMechanismName())) {
write(Status.FAIL, "Wrong mechanism: " + mechanism);
throw new SaslException("Wrong mechanism: " + mechanism);
}
case CONTINUE:
byte[] response;
try {
response = sasl.evaluate(frame.array());
status = sasl.isComplete() ? Status.COMPLETE : Status.CONTINUE;
} catch (SaslException e) {
response = e.toString().getBytes(StandardCharsets.UTF_8);
status = Status.FAIL;
}
write(status, response != null ? ByteBuffer.wrap(response) : EMPTY);
break;
case COMPLETE:
sasl.evaluate(frame.array());
if (!sasl.isComplete())
throw new SaslException("Expected completion!");
break;
case FAIL:
throw new SaslException("Fail: " + toString(frame));
default:
throw new IOException("Unexpected SASL status: " + status);
}
}
LOG.debug("SASL opened");
String qop = (String) sasl.getNegotiatedProperty(Sasl.QOP);
LOG.debug("QOP = {}", qop);
dataIsWrapped = (qop != null && !qop.equalsIgnoreCase("auth"));
}
private String toString(ByteBuffer buffer) {
return new String(buffer.array(), StandardCharsets.UTF_8);
}
@Override
public synchronized List<ByteBuffer> readBuffers() throws IOException {
List<ByteBuffer> buffers = new ArrayList<>();
while (true) {
ByteBuffer buffer = readFrameAndUnwrap();
if (((Buffer) buffer).remaining() == 0)
return buffers;
buffers.add(buffer);
}
}
private Status readStatus() throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(1);
read(buffer);
int status = buffer.get();
if (status > Status.values().length)
throw new IOException("Unexpected SASL status byte: " + status);
return Status.values()[status];
}
private ByteBuffer readFrameAndUnwrap() throws IOException {
ByteBuffer frame = readFrame();
if (!dataIsWrapped)
return frame;
ByteBuffer unwrapped = ByteBuffer.wrap(sasl.unwrap(frame.array()));
LOG.debug("unwrapped data of length: {}", unwrapped.remaining());
return unwrapped;
}
private ByteBuffer readFrame() throws IOException {
read(readHeader);
ByteBuffer buffer = ByteBuffer.allocate(readHeader.getInt());
LOG.debug("about to read: {} bytes", buffer.capacity());
read(buffer);
return buffer;
}
private void read(ByteBuffer buffer) throws IOException {
((Buffer) buffer).clear();
while (buffer.hasRemaining())
if (channel.read(buffer) == -1)
throw new EOFException();
((Buffer) buffer).flip();
}
@Override
public synchronized void writeBuffers(List<ByteBuffer> buffers) throws IOException {
if (buffers == null)
return; // no data to write
List<ByteBuffer> writes = new ArrayList<>(buffers.size() * 2 + 1);
int currentLength = 0;
ByteBuffer currentHeader = writeHeader;
for (ByteBuffer buffer : buffers) { // gather writes
if (buffer.remaining() == 0)
continue; // ignore empties
if (dataIsWrapped) {
LOG.debug("wrapping data of length: {}", buffer.remaining());
buffer = ByteBuffer.wrap(sasl.wrap(buffer.array(), buffer.position(), buffer.remaining()));
}
int length = buffer.remaining();
if (!dataIsWrapped // can append buffers on wire
&& (currentLength + length) <= ByteBufferOutputStream.BUFFER_SIZE) {
if (currentLength == 0)
writes.add(currentHeader);
currentLength += length;
((Buffer) currentHeader).clear();
currentHeader.putInt(currentLength);
LOG.debug("adding {} to write, total now {}", length, currentLength);
} else {
currentLength = length;
currentHeader = ByteBuffer.allocate(4).putInt(length);
writes.add(currentHeader);
LOG.debug("planning write of {}", length);
}
((Buffer) currentHeader).flip();
writes.add(buffer);
}
((Buffer) zeroHeader).flip(); // zero-terminate
writes.add(zeroHeader);
writeFully(writes.toArray(new ByteBuffer[0]));
}
private void write(Status status, String prefix, ByteBuffer response) throws IOException {
LOG.debug("write status: {} {}", status, prefix);
write(status, prefix);
write(response);
}
private void write(Status status, String response) throws IOException {
write(status, ByteBuffer.wrap(response.getBytes(StandardCharsets.UTF_8)));
}
private void write(Status status, ByteBuffer response) throws IOException {
LOG.debug("write status: {}", status);
ByteBuffer statusBuffer = ByteBuffer.allocate(1);
((Buffer) statusBuffer).clear();
((Buffer) statusBuffer.put((byte) (status.ordinal()))).flip();
writeFully(statusBuffer);
write(response);
}
private void write(ByteBuffer response) throws IOException {
LOG.debug("writing: {}", response.remaining());
((Buffer) writeHeader).clear();
((Buffer) writeHeader.putInt(response.remaining())).flip();
writeFully(writeHeader, response);
}
private void writeFully(ByteBuffer... buffers) throws IOException {
int length = buffers.length;
int start = 0;
do {
channel.write(buffers, start, length - start);
while (buffers[start].remaining() == 0) {
start++;
if (start == length)
return;
}
} while (true);
}
@Override
public void close() throws IOException {
if (channel.isOpen()) {
LOG.info("closing to " + getRemoteName());
channel.close();
}
sasl.dispose();
}
/**
* Used to abstract over the <code>SaslServer</code> and <code>SaslClient</code>
* classes, which share a lot of their interface, but unfortunately don't share
* a common superclass.
*/
private static class SaslParticipant {
// One of these will always be null.
public SaslServer server;
public SaslClient client;
public SaslParticipant(SaslServer server) {
this.server = server;
}
public SaslParticipant(SaslClient client) {
this.client = client;
}
public String getMechanismName() {
if (client != null)
return client.getMechanismName();
else
return server.getMechanismName();
}
public boolean isComplete() {
if (client != null)
return client.isComplete();
else
return server.isComplete();
}
public void dispose() throws SaslException {
if (client != null)
client.dispose();
else
server.dispose();
}
public byte[] unwrap(byte[] buf) throws SaslException {
if (client != null)
return client.unwrap(buf, 0, buf.length);
else
return server.unwrap(buf, 0, buf.length);
}
public byte[] wrap(byte[] buf, int start, int len) throws SaslException {
if (client != null)
return client.wrap(buf, start, len);
else
return server.wrap(buf, start, len);
}
public Object getNegotiatedProperty(String propName) {
if (client != null)
return client.getNegotiatedProperty(propName);
else
return server.getNegotiatedProperty(propName);
}
public byte[] evaluate(byte[] buf) throws SaslException {
if (client != null)
return client.evaluateChallenge(buf);
else
return server.evaluateResponse(buf);
}
}
private static class AnonymousClient implements SaslClient {
@Override
public String getMechanismName() {
return "ANONYMOUS";
}
@Override
public boolean hasInitialResponse() {
return true;
}
@Override
public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
return System.getProperty("user.name").getBytes(StandardCharsets.UTF_8);
}
@Override
public boolean isComplete() {
return true;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
throw new UnsupportedOperationException();
}
@Override
public Object getNegotiatedProperty(String propName) {
return null;
}
@Override
public void dispose() {
}
}
}
| 7,493 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Transceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.avro.Protocol;
/** Base transport class used by {@link Requestor}. */
public abstract class Transceiver implements Closeable {
private final ReentrantLock channelLock = new ReentrantLock();
public abstract String getRemoteName() throws IOException;
/**
* Acquires an exclusive lock on the transceiver's channel.
*/
public void lockChannel() {
channelLock.lock();
}
/**
* Releases the lock on the transceiver's channel if held by the calling thread.
*/
public void unlockChannel() {
if (channelLock.isHeldByCurrentThread()) {
channelLock.unlock();
}
}
/**
* Called by {@link Requestor#request(String,Object)} for two-way messages. By
* default calls {@link #writeBuffers(List)} followed by {@link #readBuffers()}.
*/
public List<ByteBuffer> transceive(List<ByteBuffer> request) throws IOException {
lockChannel();
try {
writeBuffers(request);
return readBuffers();
} finally {
unlockChannel();
}
}
/**
* Called by {@link Requestor#request(String,Object,Callback)} for two-way
* messages using callbacks.
*/
public void transceive(List<ByteBuffer> request, Callback<List<ByteBuffer>> callback) throws IOException {
// The default implementation works synchronously
try {
List<ByteBuffer> response = transceive(request);
callback.handleResult(response);
} catch (IOException e) {
callback.handleError(e);
}
}
/** Called by the default definition of {@link #transceive(List)}. */
public abstract List<ByteBuffer> readBuffers() throws IOException;
/** Called by {@link Requestor#request(String,Object)} for one-way messages. */
public abstract void writeBuffers(List<ByteBuffer> buffers) throws IOException;
/**
* True if a handshake has been completed for this connection. Used to determine
* whether a handshake need be completed prior to a one-way message. Requests
* and responses are always prefixed by handshakes, but one-way messages. If the
* first request sent over a connection is one-way, then a handshake-only
* response is returned. Subsequent one-way messages over the connection will
* have no response data sent. Returns false by default.
*/
public boolean isConnected() {
return false;
}
/**
* Called with the remote protocol when a handshake has been completed. After
* this has been called and while a connection is maintained,
* {@link #isConnected()} should return true and #getRemote() should return this
* protocol. Does nothing by default.
*/
public void setRemote(Protocol protocol) {
}
/**
* Returns the protocol passed to {@link #setRemote(Protocol)}. Throws
* IllegalStateException by default.
*/
public Protocol getRemote() {
throw new IllegalStateException("Not connected.");
}
@Override
public void close() throws IOException {
}
}
| 7,494 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/RPCPlugin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
/**
* An instrumentation API for RPC metadata. Each of these methods is invoked at
* key points during the RPC exchange. Additionally, path-based
* <em>metadata</em> that is passed along with the RPC call and can be set or
* queried by subsequent instrumentation points.
*/
public class RPCPlugin {
/**
* Called on the client before the initial RPC handshake to setup any handshake
* metadata for this plugin
*
* @param context the handshake rpc context
*/
public void clientStartConnect(RPCContext context) {
}
/**
* Called on the server during the RPC handshake
*
* @param context the handshake rpc context
*/
public void serverConnecting(RPCContext context) {
}
/**
* Called on the client after the initial RPC handshake
*
* @param context the handshake rpc context
*/
public void clientFinishConnect(RPCContext context) {
}
/**
* This method is invoked at the client before it issues the RPC call.
*
* @param context the per-call rpc context (in/out parameter)
*/
public void clientSendRequest(RPCContext context) {
}
/**
* This method is invoked at the RPC server when the request is received, but
* before the call itself is executed
*
* @param context the per-call rpc context (in/out parameter)
*/
public void serverReceiveRequest(RPCContext context) {
}
/**
* This method is invoked at the server before the response is executed, but
* before the response has been formulated
*
* @param context the per-call rpc context (in/out parameter)
*/
public void serverSendResponse(RPCContext context) {
}
/**
* This method is invoked at the client after the call is executed, and after
* the client receives the response
*
* @param context the per-call rpc context
*/
public void clientReceiveResponse(RPCContext context) {
}
}
| 7,495 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/SocketTransceiver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.net.SocketAddress;
import java.nio.channels.SocketChannel;
import java.nio.channels.ClosedChannelException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.avro.Protocol;
/**
* A socket-based {@link Transceiver} implementation. This uses a simple,
* non-standard wire protocol and is not intended for production services.
*
* @deprecated use {@link SaslSocketTransceiver} instead.
*/
public class SocketTransceiver extends Transceiver {
private static final Logger LOG = LoggerFactory.getLogger(SocketTransceiver.class);
private SocketChannel channel;
private ByteBuffer header = ByteBuffer.allocate(4);
private Protocol remote;
public SocketTransceiver(SocketAddress address) throws IOException {
this(SocketChannel.open(address));
}
public SocketTransceiver(SocketChannel channel) throws IOException {
this.channel = channel;
this.channel.socket().setTcpNoDelay(true);
LOG.info("open to " + getRemoteName());
}
@Override
public String getRemoteName() {
return channel.socket().getRemoteSocketAddress().toString();
}
@Override
public synchronized List<ByteBuffer> readBuffers() throws IOException {
List<ByteBuffer> buffers = new ArrayList<>();
while (true) {
((Buffer) header).clear();
while (header.hasRemaining()) {
if (channel.read(header) < 0)
throw new ClosedChannelException();
}
((Buffer) header).flip();
int length = header.getInt();
if (length == 0) { // end of buffers
return buffers;
}
ByteBuffer buffer = ByteBuffer.allocate(length);
while (buffer.hasRemaining()) {
if (channel.read(buffer) < 0)
throw new ClosedChannelException();
}
((Buffer) buffer).flip();
buffers.add(buffer);
}
}
@Override
public synchronized void writeBuffers(List<ByteBuffer> buffers) throws IOException {
if (buffers == null)
return; // no data to write
for (ByteBuffer buffer : buffers) {
if (buffer.limit() == 0)
continue;
writeLength(buffer.limit()); // length-prefix
channel.write(buffer);
}
writeLength(0); // null-terminate
}
private void writeLength(int length) throws IOException {
((Buffer) header).clear();
header.putInt(length);
((Buffer) header).flip();
channel.write(header);
}
@Override
public boolean isConnected() {
return remote != null;
}
@Override
public void setRemote(Protocol remote) {
this.remote = remote;
}
@Override
public Protocol getRemote() {
return remote;
}
@Override
public void close() throws IOException {
if (channel.isOpen()) {
LOG.info("closing to " + getRemoteName());
channel.close();
}
}
}
| 7,496 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/ResponderServlet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.avro.AvroRuntimeException;
/** An {@link HttpServlet} that responds to Avro RPC requests. */
public class ResponderServlet extends HttpServlet {
private Responder responder;
public ResponderServlet(Responder responder) throws IOException {
this.responder = responder;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
response.setContentType(HttpTransceiver.CONTENT_TYPE);
List<ByteBuffer> requestBufs = HttpTransceiver.readBuffers(request.getInputStream());
try {
List<ByteBuffer> responseBufs = responder.respond(requestBufs);
response.setContentLength(HttpTransceiver.getLength(responseBufs));
HttpTransceiver.writeBuffers(responseBufs, response.getOutputStream());
} catch (AvroRuntimeException e) {
throw new ServletException(e);
}
}
}
| 7,497 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Server.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
/** A server listening on a port. */
public interface Server {
/** The port this server runs on. */
int getPort();
/** Start this server. */
void start();
/** Stop this server. */
void close();
/** Wait for this server to exit. */
void join() throws InterruptedException;
}
| 7,498 |
0 | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro | Create_ds/avro/lang/java/ipc/src/main/java/org/apache/avro/ipc/Requestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.ipc;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.ReentrantLock;
import java.util.List;
import java.util.Map;
import org.apache.avro.AvroRemoteException;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.avro.Protocol.Message;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.avro.util.ByteBufferOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Base class for the client side of a protocol interaction. */
public abstract class Requestor {
private static final Logger LOG = LoggerFactory.getLogger(Requestor.class);
private static final Schema META = Schema.createMap(Schema.create(Schema.Type.BYTES));
private static final GenericDatumReader<Map<String, ByteBuffer>> META_READER = new GenericDatumReader<>(META);
private static final GenericDatumWriter<Map<String, ByteBuffer>> META_WRITER = new GenericDatumWriter<>(META);
private final Protocol local;
private volatile Protocol remote;
private volatile boolean sendLocalText;
private final Transceiver transceiver;
private final ReentrantLock handshakeLock = new ReentrantLock();
protected final List<RPCPlugin> rpcMetaPlugins;
public Protocol getLocal() {
return local;
}
public Transceiver getTransceiver() {
return transceiver;
}
protected Requestor(Protocol local, Transceiver transceiver) throws IOException {
this.local = local;
this.transceiver = transceiver;
this.rpcMetaPlugins = new CopyOnWriteArrayList<>();
}
/**
* Adds a new plugin to manipulate RPC metadata. Plugins are executed in the
* order that they are added.
*
* @param plugin a plugin that will manipulate RPC metadata
*/
public void addRPCPlugin(RPCPlugin plugin) {
rpcMetaPlugins.add(plugin);
}
private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory();
/** Writes a request message and reads a response or error message. */
public Object request(String messageName, Object request) throws Exception {
// Initialize request
Request rpcRequest = new Request(messageName, request, new RPCContext());
CallFuture<Object> future = /* only need a Future for two-way messages */
rpcRequest.getMessage().isOneWay() ? null : new CallFuture<>();
// Send request
request(rpcRequest, future);
if (future == null) // the message is one-way, so return immediately
return null;
try { // the message is two-way, wait for the result
return future.get();
} catch (ExecutionException e) {
Throwable error = e.getCause();
if (error instanceof Exception) {
throw (Exception) error;
} else {
throw new AvroRuntimeException(error);
}
}
}
/**
* Writes a request message and returns the result through a Callback. Clients
* can also use a Future interface by creating a new CallFuture<T>, passing it
* in as the Callback parameter, and then waiting on that Future.
*
* @param <T> the return type of the message.
* @param messageName the name of the message to invoke.
* @param request the request data to send.
* @param callback the callback which will be invoked when the response is
* returned or an error occurs.
* @throws AvroRemoteException if an exception is thrown to client by server.
* @throws IOException if an I/O error occurs while sending the
* message.
* @throws AvroRuntimeException for another undeclared error while sending the
* message.
*/
public <T> void request(String messageName, Object request, Callback<T> callback)
throws AvroRemoteException, IOException {
request(new Request(messageName, request, new RPCContext()), callback);
}
/** Writes a request message and returns the result through a Callback. */
<T> void request(Request request, Callback<T> callback) throws AvroRemoteException, IOException {
Transceiver t = getTransceiver();
if (!t.isConnected()) {
// Acquire handshake lock so that only one thread is performing the
// handshake and other threads block until the handshake is completed
handshakeLock.lock();
try {
if (t.isConnected()) {
// Another thread already completed the handshake; no need to hold
// the write lock
handshakeLock.unlock();
} else {
CallFuture<T> callFuture = new CallFuture<>(callback);
t.transceive(request.getBytes(), new TransceiverCallback<>(request, callFuture));
try {
// Block until handshake complete
callFuture.await();
} catch (InterruptedException e) {
// Restore the interrupted status
Thread.currentThread().interrupt();
}
if (request.getMessage().isOneWay()) {
Throwable error = callFuture.getError();
if (error != null) {
if (error instanceof AvroRemoteException) {
throw (AvroRemoteException) error;
} else if (error instanceof AvroRuntimeException) {
throw (AvroRuntimeException) error;
} else if (error instanceof IOException) {
throw (IOException) error;
} else {
throw new AvroRuntimeException(error);
}
}
}
return;
}
} finally {
if (handshakeLock.isHeldByCurrentThread()) {
handshakeLock.unlock();
}
}
}
if (request.getMessage().isOneWay()) {
t.lockChannel();
try {
t.writeBuffers(request.getBytes());
if (callback != null) {
callback.handleResult(null);
}
} finally {
t.unlockChannel();
}
} else {
t.transceive(request.getBytes(), new TransceiverCallback<>(request, callback));
}
}
private static final ConcurrentMap<String, MD5> REMOTE_HASHES = new ConcurrentHashMap<>();
private static final ConcurrentMap<MD5, Protocol> REMOTE_PROTOCOLS = new ConcurrentHashMap<>();
private static final SpecificDatumWriter<HandshakeRequest> HANDSHAKE_WRITER = new SpecificDatumWriter<>(
HandshakeRequest.class);
private static final SpecificDatumReader<HandshakeResponse> HANDSHAKE_READER = new SpecificDatumReader<>(
HandshakeResponse.class);
private void writeHandshake(Encoder out) throws IOException {
if (getTransceiver().isConnected())
return;
MD5 localHash = new MD5();
localHash.bytes(local.getMD5());
String remoteName = transceiver.getRemoteName();
MD5 remoteHash = REMOTE_HASHES.get(remoteName);
if (remoteHash == null) { // guess remote is local
remoteHash = localHash;
remote = local;
} else {
remote = REMOTE_PROTOCOLS.get(remoteHash);
}
HandshakeRequest handshake = new HandshakeRequest();
handshake.setClientHash(localHash);
handshake.setServerHash(remoteHash);
if (sendLocalText)
handshake.setClientProtocol(local.toString());
RPCContext context = new RPCContext();
context.setHandshakeRequest(handshake);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientStartConnect(context);
}
handshake.setMeta(context.requestHandshakeMeta());
HANDSHAKE_WRITER.write(handshake, out);
}
private boolean readHandshake(Decoder in) throws IOException {
if (getTransceiver().isConnected())
return true;
boolean established = false;
HandshakeResponse handshake = HANDSHAKE_READER.read(null, in);
switch (handshake.getMatch()) {
case BOTH:
established = true;
sendLocalText = false;
break;
case CLIENT:
LOG.debug("Handshake match = CLIENT");
setRemote(handshake);
established = true;
sendLocalText = false;
break;
case NONE:
LOG.debug("Handshake match = NONE");
setRemote(handshake);
sendLocalText = true;
break;
default:
throw new AvroRuntimeException("Unexpected match: " + handshake.getMatch());
}
RPCContext context = new RPCContext();
context.setHandshakeResponse(handshake);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientFinishConnect(context);
}
if (established)
getTransceiver().setRemote(remote);
return established;
}
private void setRemote(HandshakeResponse handshake) throws IOException {
remote = Protocol.parse(handshake.getServerProtocol().toString());
MD5 remoteHash = handshake.getServerHash();
REMOTE_HASHES.put(transceiver.getRemoteName(), remoteHash);
REMOTE_PROTOCOLS.putIfAbsent(remoteHash, remote);
}
/** Return the remote protocol. Force a handshake if required. */
public Protocol getRemote() throws IOException {
if (remote != null)
return remote; // already have it
MD5 remoteHash = REMOTE_HASHES.get(transceiver.getRemoteName());
if (remoteHash != null) {
remote = REMOTE_PROTOCOLS.get(remoteHash);
if (remote != null)
return remote; // already cached
}
handshakeLock.lock();
try {
// force handshake
ByteBufferOutputStream bbo = new ByteBufferOutputStream();
// direct because the payload is tiny.
Encoder out = ENCODER_FACTORY.directBinaryEncoder(bbo, null);
writeHandshake(out);
out.writeInt(0); // empty metadata
out.writeString(""); // bogus message name
List<ByteBuffer> response = getTransceiver().transceive(bbo.getBufferList());
ByteBufferInputStream bbi = new ByteBufferInputStream(response);
BinaryDecoder in = DecoderFactory.get().binaryDecoder(bbi, null);
readHandshake(in);
return this.remote;
} finally {
handshakeLock.unlock();
}
}
/** Writes a request message. */
public abstract void writeRequest(Schema schema, Object request, Encoder out) throws IOException;
@Deprecated // for compatibility in 1.5
public Object readResponse(Schema schema, Decoder in) throws IOException {
return readResponse(schema, schema, in);
}
/** Reads a response message. */
public abstract Object readResponse(Schema writer, Schema reader, Decoder in) throws IOException;
@Deprecated // for compatibility in 1.5
public Object readError(Schema schema, Decoder in) throws IOException {
return readError(schema, schema, in);
}
/** Reads an error message. */
public abstract Exception readError(Schema writer, Schema reader, Decoder in) throws IOException;
/**
* Handles callbacks from transceiver invocations.
*/
protected class TransceiverCallback<T> implements Callback<List<ByteBuffer>> {
private final Request request;
private final Callback<T> callback;
/**
* Creates a TransceiverCallback.
*
* @param request the request to set.
* @param callback the callback to set.
*/
public TransceiverCallback(Request request, Callback<T> callback) {
this.request = request;
this.callback = callback;
}
@Override
@SuppressWarnings("unchecked")
public void handleResult(List<ByteBuffer> responseBytes) {
ByteBufferInputStream bbi = new ByteBufferInputStream(responseBytes);
BinaryDecoder in = DecoderFactory.get().binaryDecoder(bbi, null);
try {
if (!readHandshake(in)) {
// Resend the handshake and return
Request handshake = new Request(request);
getTransceiver().transceive(handshake.getBytes(), new TransceiverCallback<>(handshake, callback));
return;
}
} catch (Exception e) {
LOG.error("Error handling transceiver callback: " + e, e);
}
// Read response; invoke callback
Response response = new Response(request, in);
Object responseObject;
try {
try {
responseObject = response.getResponse();
} catch (Exception e) {
if (callback != null) {
callback.handleError(e);
}
return;
}
if (callback != null) {
callback.handleResult((T) responseObject);
}
} catch (Throwable t) {
LOG.error("Error in callback handler: " + t, t);
}
}
@Override
public void handleError(Throwable error) {
callback.handleError(error);
}
}
/**
* Encapsulates/generates a request.
*/
class Request {
private final String messageName;
private final Object request;
private final RPCContext context;
private final BinaryEncoder encoder;
private Message message;
private List<ByteBuffer> requestBytes;
/**
* Creates a Request.
*
* @param messageName the name of the message to invoke.
* @param request the request data to send.
* @param context the RPC context to use.
*/
public Request(String messageName, Object request, RPCContext context) {
this(messageName, request, context, null);
}
/**
* Creates a Request.
*
* @param messageName the name of the message to invoke.
* @param request the request data to send.
* @param context the RPC context to use.
* @param encoder the BinaryEncoder to use to serialize the request.
*/
public Request(String messageName, Object request, RPCContext context, BinaryEncoder encoder) {
this.messageName = messageName;
this.request = request;
this.context = context;
this.encoder = ENCODER_FACTORY.binaryEncoder(new ByteBufferOutputStream(), encoder);
}
/**
* Copy constructor.
*
* @param other Request from which to copy fields.
*/
public Request(Request other) {
this.messageName = other.messageName;
this.request = other.request;
this.context = other.context;
this.encoder = other.encoder;
}
/**
* Gets the message name.
*
* @return the message name.
*/
public String getMessageName() {
return messageName;
}
/**
* Gets the RPC context.
*
* @return the RPC context.
*/
public RPCContext getContext() {
return context;
}
/**
* Gets the Message associated with this request.
*
* @return this request's message.
*/
public Message getMessage() {
if (message == null) {
message = getLocal().getMessages().get(messageName);
if (message == null) {
throw new AvroRuntimeException("Not a local message: " + messageName);
}
}
return message;
}
/**
* Gets the request data, generating it first if necessary.
*
* @return the request data.
* @throws IOException if an error occurs generating the request data.
*/
public List<ByteBuffer> getBytes() throws IOException {
if (requestBytes == null) {
ByteBufferOutputStream bbo = new ByteBufferOutputStream();
BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bbo, encoder);
// use local protocol to write request
Message m = getMessage();
context.setMessage(m);
writeRequest(m.getRequest(), request, out); // write request payload
out.flush();
List<ByteBuffer> payload = bbo.getBufferList();
writeHandshake(out); // prepend handshake if needed
context.setRequestPayload(payload);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientSendRequest(context); // get meta-data from plugins
}
META_WRITER.write(context.requestCallMeta(), out);
out.writeString(m.getName()); // write message name
out.flush();
bbo.append(payload);
requestBytes = bbo.getBufferList();
}
return requestBytes;
}
}
/**
* Encapsulates/parses a response.
*/
class Response {
private final Request request;
private final BinaryDecoder in;
/**
* Creates a Response.
*
* @param request the Request associated with this response.
*/
public Response(Request request) {
this(request, null);
}
/**
* Creates a Creates a Response.
*
* @param request the Request associated with this response.
* @param in the BinaryDecoder to use to deserialize the response.
*/
public Response(Request request, BinaryDecoder in) {
this.request = request;
this.in = in;
}
/**
* Gets the RPC response, reading/deserializing it first if necessary.
*
* @return the RPC response.
* @throws Exception if an error occurs reading/deserializing the response.
*/
public Object getResponse() throws Exception {
Message lm = request.getMessage();
Message rm = remote.getMessages().get(request.getMessageName());
if (rm == null)
throw new AvroRuntimeException("Not a remote message: " + request.getMessageName());
Transceiver t = getTransceiver();
if ((lm.isOneWay() != rm.isOneWay()) && t.isConnected())
throw new AvroRuntimeException("Not both one-way messages: " + request.getMessageName());
if (lm.isOneWay() && t.isConnected())
return null; // one-way w/ handshake
RPCContext context = request.getContext();
context.setResponseCallMeta(META_READER.read(null, in));
if (!in.readBoolean()) { // no error
Object response = readResponse(rm.getResponse(), lm.getResponse(), in);
context.setResponse(response);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientReceiveResponse(context);
}
return response;
} else {
Exception error = readError(rm.getErrors(), lm.getErrors(), in);
context.setError(error);
for (RPCPlugin plugin : rpcMetaPlugins) {
plugin.clientReceiveResponse(context);
}
throw error;
}
}
}
}
| 7,499 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.