index
int64
0
0
repo_id
stringlengths
9
205
file_path
stringlengths
31
246
content
stringlengths
1
12.2M
__index_level_0__
int64
0
10k
0
Create_ds/avro/lang/java/tools/src/test/compiler
Create_ds/avro/lang/java/tools/src/test/compiler/output/NoSettersTest.java
/** * Autogenerated by Avro * * DO NOT EDIT DIRECTLY */ package avro.examples.baseball; import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; import org.apache.avro.message.BinaryMessageEncoder; import org.apache.avro.message.BinaryMessageDecoder; import org.apache.avro.message.SchemaStore; /** Test that setters are omitted */ @org.apache.avro.specific.AvroGenerated public class NoSettersTest extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { private static final long serialVersionUID = 8604146783520861700L; public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"NoSettersTest\",\"namespace\":\"avro.examples.baseball\",\"doc\":\"Test that setters are omitted\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"},{\"name\":\"favorite_number\",\"type\":[\"int\",\"null\"]}]}"); public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } private static final SpecificData MODEL$ = new SpecificData(); private static final BinaryMessageEncoder<NoSettersTest> ENCODER = new BinaryMessageEncoder<>(MODEL$, SCHEMA$); private static final BinaryMessageDecoder<NoSettersTest> DECODER = new BinaryMessageDecoder<>(MODEL$, SCHEMA$); /** * Return the BinaryMessageEncoder instance used by this class. * @return the message encoder used by this class */ public static BinaryMessageEncoder<NoSettersTest> getEncoder() { return ENCODER; } /** * Return the BinaryMessageDecoder instance used by this class. * @return the message decoder used by this class */ public static BinaryMessageDecoder<NoSettersTest> getDecoder() { return DECODER; } /** * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. * @param resolver a {@link SchemaStore} used to find schemas by fingerprint * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore */ public static BinaryMessageDecoder<NoSettersTest> createDecoder(SchemaStore resolver) { return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); } /** * Serializes this NoSettersTest to a ByteBuffer. * @return a buffer holding the serialized data for this instance * @throws java.io.IOException if this instance could not be serialized */ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { return ENCODER.encode(this); } /** * Deserializes a NoSettersTest from a ByteBuffer. * @param b a byte buffer holding serialized data for an instance of this class * @return a NoSettersTest instance decoded from the given buffer * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class */ public static NoSettersTest fromByteBuffer( java.nio.ByteBuffer b) throws java.io.IOException { return DECODER.decode(b); } private java.lang.CharSequence name; private java.lang.Integer favorite_number; /** * Default constructor. Note that this does not initialize fields * to their default values from the schema. If that is desired then * one should use <code>newBuilder()</code>. */ public NoSettersTest() {} /** * All-args constructor. * @param name The new value for name * @param favorite_number The new value for favorite_number */ public NoSettersTest(java.lang.CharSequence name, java.lang.Integer favorite_number) { this.name = name; this.favorite_number = favorite_number; } @Override public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } @Override public org.apache.avro.Schema getSchema() { return SCHEMA$; } // Used by DatumWriter. Applications should not call. @Override public java.lang.Object get(int field$) { switch (field$) { case 0: return name; case 1: return favorite_number; default: throw new IndexOutOfBoundsException("Invalid index: " + field$); } } // Used by DatumReader. Applications should not call. @Override @SuppressWarnings(value="unchecked") public void put(int field$, java.lang.Object value$) { switch (field$) { case 0: name = (java.lang.CharSequence)value$; break; case 1: favorite_number = (java.lang.Integer)value$; break; default: throw new IndexOutOfBoundsException("Invalid index: " + field$); } } /** * Gets the value of the 'name' field. * @return The value of the 'name' field. */ public java.lang.CharSequence getName() { return name; } /** * Gets the value of the 'favorite_number' field. * @return The value of the 'favorite_number' field. */ public java.lang.Integer getFavoriteNumber() { return favorite_number; } /** * Creates a new NoSettersTest RecordBuilder. * @return A new NoSettersTest RecordBuilder */ public static avro.examples.baseball.NoSettersTest.Builder newBuilder() { return new avro.examples.baseball.NoSettersTest.Builder(); } /** * Creates a new NoSettersTest RecordBuilder by copying an existing Builder. * @param other The existing builder to copy. * @return A new NoSettersTest RecordBuilder */ public static avro.examples.baseball.NoSettersTest.Builder newBuilder(avro.examples.baseball.NoSettersTest.Builder other) { if (other == null) { return new avro.examples.baseball.NoSettersTest.Builder(); } else { return new avro.examples.baseball.NoSettersTest.Builder(other); } } /** * Creates a new NoSettersTest RecordBuilder by copying an existing NoSettersTest instance. * @param other The existing instance to copy. * @return A new NoSettersTest RecordBuilder */ public static avro.examples.baseball.NoSettersTest.Builder newBuilder(avro.examples.baseball.NoSettersTest other) { if (other == null) { return new avro.examples.baseball.NoSettersTest.Builder(); } else { return new avro.examples.baseball.NoSettersTest.Builder(other); } } /** * RecordBuilder for NoSettersTest instances. */ @org.apache.avro.specific.AvroGenerated public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase<NoSettersTest> implements org.apache.avro.data.RecordBuilder<NoSettersTest> { private java.lang.CharSequence name; private java.lang.Integer favorite_number; /** Creates a new Builder */ private Builder() { super(SCHEMA$, MODEL$); } /** * Creates a Builder by copying an existing Builder. * @param other The existing Builder to copy. */ private Builder(avro.examples.baseball.NoSettersTest.Builder other) { super(other); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); fieldSetFlags()[0] = other.fieldSetFlags()[0]; } if (isValidValue(fields()[1], other.favorite_number)) { this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number); fieldSetFlags()[1] = other.fieldSetFlags()[1]; } } /** * Creates a Builder by copying an existing NoSettersTest instance * @param other The existing instance to copy. */ private Builder(avro.examples.baseball.NoSettersTest other) { super(SCHEMA$, MODEL$); if (isValidValue(fields()[0], other.name)) { this.name = data().deepCopy(fields()[0].schema(), other.name); fieldSetFlags()[0] = true; } if (isValidValue(fields()[1], other.favorite_number)) { this.favorite_number = data().deepCopy(fields()[1].schema(), other.favorite_number); fieldSetFlags()[1] = true; } } /** * Gets the value of the 'name' field. * @return The value. */ public java.lang.CharSequence getName() { return name; } /** * Sets the value of the 'name' field. * @param value The value of 'name'. * @return This builder. */ public avro.examples.baseball.NoSettersTest.Builder setName(java.lang.CharSequence value) { validate(fields()[0], value); this.name = value; fieldSetFlags()[0] = true; return this; } /** * Checks whether the 'name' field has been set. * @return True if the 'name' field has been set, false otherwise. */ public boolean hasName() { return fieldSetFlags()[0]; } /** * Clears the value of the 'name' field. * @return This builder. */ public avro.examples.baseball.NoSettersTest.Builder clearName() { name = null; fieldSetFlags()[0] = false; return this; } /** * Gets the value of the 'favorite_number' field. * @return The value. */ public java.lang.Integer getFavoriteNumber() { return favorite_number; } /** * Sets the value of the 'favorite_number' field. * @param value The value of 'favorite_number'. * @return This builder. */ public avro.examples.baseball.NoSettersTest.Builder setFavoriteNumber(java.lang.Integer value) { validate(fields()[1], value); this.favorite_number = value; fieldSetFlags()[1] = true; return this; } /** * Checks whether the 'favorite_number' field has been set. * @return True if the 'favorite_number' field has been set, false otherwise. */ public boolean hasFavoriteNumber() { return fieldSetFlags()[1]; } /** * Clears the value of the 'favorite_number' field. * @return This builder. */ public avro.examples.baseball.NoSettersTest.Builder clearFavoriteNumber() { favorite_number = null; fieldSetFlags()[1] = false; return this; } @Override @SuppressWarnings("unchecked") public NoSettersTest build() { try { NoSettersTest record = new NoSettersTest(); record.name = fieldSetFlags()[0] ? this.name : (java.lang.CharSequence) defaultValue(fields()[0]); record.favorite_number = fieldSetFlags()[1] ? this.favorite_number : (java.lang.Integer) defaultValue(fields()[1]); return record; } catch (org.apache.avro.AvroMissingFieldException e) { throw e; } catch (java.lang.Exception e) { throw new org.apache.avro.AvroRuntimeException(e); } } } @SuppressWarnings("unchecked") private static final org.apache.avro.io.DatumWriter<NoSettersTest> WRITER$ = (org.apache.avro.io.DatumWriter<NoSettersTest>)MODEL$.createDatumWriter(SCHEMA$); @Override public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException { WRITER$.write(this, SpecificData.getEncoder(out)); } @SuppressWarnings("unchecked") private static final org.apache.avro.io.DatumReader<NoSettersTest> READER$ = (org.apache.avro.io.DatumReader<NoSettersTest>)MODEL$.createDatumReader(SCHEMA$); @Override public void readExternal(java.io.ObjectInput in) throws java.io.IOException { READER$.read(this, SpecificData.getDecoder(in)); } @Override protected boolean hasCustomCoders() { return true; } @Override public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException { out.writeString(this.name); if (this.favorite_number == null) { out.writeIndex(1); out.writeNull(); } else { out.writeIndex(0); out.writeInt(this.favorite_number); } } @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException { org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); if (fieldOrder == null) { this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); if (in.readIndex() != 0) { in.readNull(); this.favorite_number = null; } else { this.favorite_number = in.readInt(); } } else { for (int i = 0; i < 2; i++) { switch (fieldOrder[i].pos()) { case 0: this.name = in.readString(this.name instanceof Utf8 ? (Utf8)this.name : null); break; case 1: if (in.readIndex() != 0) { in.readNull(); this.favorite_number = null; } else { this.favorite_number = in.readInt(); } break; default: throw new java.io.IOException("Corrupt ResolvingDecoder."); } } } } }
6,900
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaFingerprintTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.IOException; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.SchemaNormalization; /** * Utility to generate fingerprint(s) from a schema. */ public class SchemaFingerprintTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { final OptionParser optParser = new OptionParser(); final OptionSpec<String> fingerprintOpt = optParser .accepts("fingerprint", "Fingerprint algorithm to use. Recommended Avro practice dictiates " + "that \"CRC-64-AVRO\" is used for 64-bit fingerprints, \"MD5\" is " + "used for 128-bit fingerprints, and \"SHA-256\" is used for 256-bit " + "fingerprints.") .withRequiredArg().ofType(String.class).defaultsTo("CRC-64-AVRO"); final OptionSet opts = optParser.parse(args.toArray(new String[0])); final Schema.Parser parser = new Schema.Parser(); final List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() < 1) { printHelp(out, optParser); return 0; } for (final String fileOrStdin : (List<String>) opts.nonOptionArguments()) { final InputStream input = Util.fileOrStdin(fileOrStdin, in); try { final Schema schema = parser.parse(input); final byte[] fingerprint = SchemaNormalization.parsingFingerprint(opts.valueOf(fingerprintOpt), schema); out.format("%s %s%n", Util.encodeHex(fingerprint), fileOrStdin); } finally { Util.close(input); } } return 0; } @Override public String getName() { return "fingerprint"; } @Override public String getShortDescription() { return "Returns the fingerprint for the schemas."; } private void printHelp(PrintStream out, OptionParser optParser) throws IOException { out.println("fingerprint [--fingerprint <fingerprint>] input-file [inputfile [inputfile...]]"); out.println(); out.println("generates fingerprints based on Avro specification."); optParser.printHelpOn(out); out.println("A dash ('-') can be given to read a schema from stdin"); } }
6,901
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/ConcatTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileStream; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; import org.apache.hadoop.fs.Path; /** * Tool to concatenate avro files with the same schema and non-reserved * metatdata. */ public class ConcatTool implements Tool { /** * @return 0 for success, 1 if the schemas of the input files differ, 2 if the * non-reserved input metadata differs, 3 if the input files are encoded * with more than one codec. */ @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.isEmpty()) { printHelp(out); return 0; } OutputStream output = out; if (args.size() > 1) { output = Util.fileOrStdout(args.get(args.size() - 1), out); args = args.subList(0, args.size() - 1); } DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<>()); Schema schema = null; Map<String, byte[]> metadata = new TreeMap<>(); String inputCodec = null; for (String inFile : expandsInputFiles(args)) { InputStream input = Util.fileOrStdin(inFile, in); DataFileStream<GenericRecord> reader = new DataFileStream<>(input, new GenericDatumReader<>()); if (schema == null) { // this is the first file - set up the writer, and store the // Schema & metadata we'll use. schema = reader.getSchema(); for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { byte[] metadatum = reader.getMeta(key); metadata.put(key, metadatum); writer.setMeta(key, metadatum); } } inputCodec = reader.getMetaString(DataFileConstants.CODEC); if (inputCodec == null) { inputCodec = DataFileConstants.NULL_CODEC; } writer.setCodec(CodecFactory.fromString(inputCodec)); writer.create(schema, output); } else { // check that we're appending to the same schema & metadata. if (!schema.equals(reader.getSchema())) { err.println("input files have different schemas"); reader.close(); return 1; } for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { byte[] metadatum = reader.getMeta(key); byte[] writersMetadatum = metadata.get(key); if (!Arrays.equals(metadatum, writersMetadatum)) { err.println("input files have different non-reserved metadata"); reader.close(); return 2; } } } String thisCodec = reader.getMetaString(DataFileConstants.CODEC); if (thisCodec == null) { thisCodec = DataFileConstants.NULL_CODEC; } if (!inputCodec.equals(thisCodec)) { err.println("input files have different codecs"); reader.close(); return 3; } } writer.appendAllFrom(reader, /* recompress */ false); reader.close(); } writer.close(); return 0; } /** Processes a list of input files to expand directories if needed. */ private static List<String> expandsInputFiles(List<String> args) throws IOException { List<String> files = new ArrayList<>(); for (String arg : args) { if (arg.equals("-")) { files.add(arg); } else { List<Path> paths = Util.getFiles(arg); for (Path path : paths) { files.add(path.toString()); } } } return files; } private void printHelp(PrintStream out) { out.println("concat [input-file...] output-file"); out.println(); out.println("Concatenates one or more input files into a new output file"); out.println("by appending the input blocks without decoding them. The input"); out.println("files must have the same schema, metadata and codec. If they"); out.println("do not the tool will return the following error codes:"); out.println(" 1 if the schemas don't match"); out.println(" 2 if the metadata doesn't match"); out.println(" 3 if the codecs don't match"); out.println("If no input files are given stdin will be used. The tool"); out.println("0 on success. A dash ('-') can be given as an input file"); out.println("to use stdin, and as an output file to use stdout. If a directory"); out.println("is given as an input-file all the files within this directory"); out.println("are used."); } @Override public String getName() { return "concat"; } @Override public String getShortDescription() { return "Concatenates avro files without re-compressing."; } }
6,902
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/JsonToBinaryFragmentTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.EOFException; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.JsonDecoder; /** Tool to convert JSON data into the binary form. */ public class JsonToBinaryFragmentTool implements Tool { @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optionParser = new OptionParser(); OptionSpec<String> schemaFileOption = optionParser .accepts("schema-file", "File containing schema, must not occur with inline schema.").withOptionalArg() .ofType(String.class); OptionSet optionSet = optionParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) optionSet.nonOptionArguments(); String schemaFile = schemaFileOption.value(optionSet); if (nargs.size() != (schemaFile == null ? 2 : 1)) { err.println("jsontofrag --schema-file <file> [inline-schema] input-file"); err.println(" converts JSON to Avro fragments."); optionParser.printHelpOn(err); err.println(" A dash '-' for input-file means stdin."); return 1; } Schema schema; String inputFile; if (schemaFile == null) { schema = new Schema.Parser().parse(nargs.get(0)); inputFile = nargs.get(1); } else { schema = Util.parseSchemaFromFS(schemaFile); inputFile = nargs.get(0); } InputStream input = Util.fileOrStdin(inputFile, stdin); try { GenericDatumReader<Object> reader = new GenericDatumReader<>(schema); JsonDecoder jsonDecoder = DecoderFactory.get().jsonDecoder(schema, input); GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema); Encoder e = EncoderFactory.get().binaryEncoder(out, null); Object datum = null; while (true) { try { datum = reader.read(datum, jsonDecoder); } catch (EOFException eofException) { break; } writer.write(datum, e); e.flush(); } } finally { Util.close(input); } return 0; } @Override public String getName() { return "jsontofrag"; } @Override public String getShortDescription() { return "Renders a JSON-encoded Avro datum as binary."; } }
6,903
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/Util.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * The section demarcated by 'copied from Apache commons-codec' is * from Apache Commons Codec v1.9. */ package org.apache.avro.tool; import static org.apache.avro.file.DataFileConstants.DEFLATE_CODEC; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.zip.Deflater; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileReader; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.io.DecoderFactory; import org.apache.avro.mapred.FsInput; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import joptsimple.OptionSet; import joptsimple.OptionParser; import joptsimple.OptionSpec; /** Static utility methods for tools. */ class Util { /** * Returns stdin if filename is "-", else opens the File in the owning * filesystem and returns an InputStream for it. Relative paths will be opened * in the default filesystem. * * @param filename The filename to be opened * @throws IOException */ static BufferedInputStream fileOrStdin(String filename, InputStream stdin) throws IOException { return new BufferedInputStream(filename.equals("-") ? stdin : openFromFS(filename)); } /** * Returns stdout if filename is "-", else opens the file from the owning * filesystem and returns an OutputStream for it. Relative paths will be opened * in the default filesystem. * * @param filename The filename to be opened * @throws IOException */ static BufferedOutputStream fileOrStdout(String filename, OutputStream stdout) throws IOException { return new BufferedOutputStream(filename.equals("-") ? stdout : createFromFS(filename)); } /** * Returns an InputStream for the file using the owning filesystem, or the * default if none is given. * * @param filename The filename to be opened * @throws IOException */ static InputStream openFromFS(String filename) throws IOException { Path p = new Path(filename); return p.getFileSystem(new Configuration()).open(p); } /** * Returns an InputStream for the file using the owning filesystem, or the * default if none is given. * * @param filename The filename to be opened * @throws IOException */ static InputStream openFromFS(Path filename) throws IOException { return filename.getFileSystem(new Configuration()).open(filename); } /** * Returns a seekable FsInput using the owning filesystem, or the default if * none is given. * * @param filename The filename to be opened * @throws IOException */ static FsInput openSeekableFromFS(String filename) throws IOException { return new FsInput(new Path(filename), new Configuration()); } /** * Opens the file for writing in the owning filesystem, or the default if none * is given. * * @param filename The filename to be opened. * @return An OutputStream to the specified file. * @throws IOException */ static OutputStream createFromFS(String filename) throws IOException { Path p = new Path(filename); return new BufferedOutputStream(p.getFileSystem(new Configuration()).create(p)); } /** * Closes the inputstream created from {@link Util.fileOrStdin} unless it is * System.in. * * @param in The inputstream to be closed. */ static void close(InputStream in) { if (!System.in.equals(in)) { try { in.close(); } catch (IOException e) { System.err.println("could not close InputStream " + in.toString()); } } } /** * Closes the outputstream created from {@link Util.fileOrStdout} unless it is * System.out. * * @param out The outputStream to be closed. */ static void close(OutputStream out) { if (!System.out.equals(out)) { try { out.close(); } catch (IOException e) { System.err.println("could not close OutputStream " + out.toString()); } } } /** * Parses a schema from the specified file. * * @param filename The file name to parse * @return The parsed schema * @throws IOException */ static Schema parseSchemaFromFS(String filename) throws IOException { InputStream stream = openFromFS(filename); try { return new Schema.Parser().parse(stream); } finally { close(stream); } } /** * If pathname is a file, this method returns a list with a single absolute Path * to that file. If pathname is a directory, this method returns a list of * Pathes to all the files within this directory. Only files inside that * directory are included, no subdirectories or files in subdirectories will be * added. If pathname is a glob pattern, all files matching the pattern are * included. * * The List is sorted alphabetically. * * @param fileOrDirName filename, directoryname or a glob pattern * @return A Path List * @throws IOException */ static List<Path> getFiles(String fileOrDirName) throws IOException { List<Path> pathList = new ArrayList<>(); Path path = new Path(fileOrDirName); FileSystem fs = path.getFileSystem(new Configuration()); if (fs.isFile(path)) { pathList.add(path); } else if (fs.isDirectory(path)) { for (FileStatus status : fs.listStatus(path)) { if (!status.isDirectory()) { pathList.add(status.getPath()); } } } else { FileStatus[] fileStatuses = fs.globStatus(path); if (fileStatuses != null) { for (FileStatus status : fileStatuses) { pathList.add(status.getPath()); } } else { throw new FileNotFoundException(fileOrDirName); } } Collections.sort(pathList); return pathList; } /** * Concatenate the result of {@link #getFiles(String)} applied to all file or * directory names. The list is sorted alphabetically and contains no * subdirectories or files within those. * * The list is sorted alphabetically. * * @param fileOrDirNames A list of filenames, directorynames or glob patterns * @return A list of Paths, one for each file * @throws IOException */ static List<Path> getFiles(List<String> fileOrDirNames) throws IOException { ArrayList<Path> pathList = new ArrayList<>(fileOrDirNames.size()); for (String name : fileOrDirNames) { pathList.addAll(getFiles(name)); } Collections.sort(pathList); return pathList; } /** * Converts a String JSON object into a generic datum. * * This is inefficient (creates extra objects), so should be used sparingly. */ static Object jsonToGenericDatum(Schema schema, String jsonData) throws IOException { GenericDatumReader<Object> reader = new GenericDatumReader<>(schema); Object datum = reader.read(null, DecoderFactory.get().jsonDecoder(schema, jsonData)); return datum; } /** Reads and returns the first datum in a data file. */ static Object datumFromFile(Schema schema, String file) throws IOException { try (DataFileReader<Object> in = new DataFileReader<>(new File(file), new GenericDatumReader<>(schema))) { return in.next(); } } static OptionSpec<String> compressionCodecOption(OptionParser optParser) { return optParser.accepts("codec", "Compression codec").withRequiredArg().ofType(String.class) .defaultsTo(DEFLATE_CODEC); } static OptionSpec<String> compressionCodecOptionWithDefault(OptionParser optParser, String s) { return optParser.accepts("codec", "Compression codec").withRequiredArg().ofType(String.class).defaultsTo(s); } static OptionSpec<Integer> compressionLevelOption(OptionParser optParser) { return optParser.accepts("level", "Compression level (only applies to deflate, xz, and zstandard)") .withRequiredArg().ofType(Integer.class).defaultsTo(Deflater.DEFAULT_COMPRESSION); } static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level) { return codecFactory(opts, codec, level, DEFLATE_CODEC); } static CodecFactory codecFactory(OptionSet opts, OptionSpec<String> codec, OptionSpec<Integer> level, String defaultCodec) { String codecName = opts.hasArgument(codec) ? codec.value(opts) : defaultCodec; if (codecName.equals(DEFLATE_CODEC)) { return CodecFactory.deflateCodec(level.value(opts)); } else if (codecName.equals(DataFileConstants.XZ_CODEC)) { return CodecFactory.xzCodec(level.value(opts)); } else if (codecName.equals(DataFileConstants.ZSTANDARD_CODEC)) { return CodecFactory.zstandardCodec(level.value(opts)); } else { return CodecFactory.fromString(codec.value(opts)); } } // Below copied from Apache commons-codec version 1.9 // org.apache.commons.codec.binary.Hex, see NOTICE. /** * Used to build output as Hex */ private static final char[] DIGITS_LOWER = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' }; /** * Converts an array of bytes into an array of characters representing the * hexadecimal values of each byte in order. The returned array will be double * the length of the passed array, as it takes two characters to represent any * given byte. * * @param data a byte[] to convert to Hex characters * @param toDigits the output alphabet * @return A char[] containing hexadecimal characters */ static String encodeHex(final byte[] data) { final int l = data.length; final char[] out = new char[l << 1]; // two characters form the hex value. for (int i = 0, j = 0; i < l; i++) { out[j++] = DIGITS_LOWER[(0xF0 & data[i]) >>> 4]; out[j++] = DIGITS_LOWER[0x0F & data[i]]; } return new String(out); } // end copied from Apache commons-codec }
6,904
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/IdlTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.compiler.idl.Idl; import org.apache.avro.idl.IdlFile; import org.apache.avro.idl.IdlReader; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.util.List; /** * Tool implementation for generating Avro JSON schemata from idl format files. */ public class IdlTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { boolean useJavaCC = "--useJavaCC".equals(getArg(args, 0, null)); if (args.size() > (useJavaCC ? 3 : 2) || (args.size() == 1 && (args.get(0).equals("--help") || args.get(0).equals("-help")))) { err.println("Usage: idl [--useJavaCC] [in [out]]"); err.println(); err.println("If an output path is not specified, outputs to stdout."); err.println("If no input or output is specified, takes input from"); err.println("stdin and outputs to stdout."); err.println("The special path \"-\" may also be specified to refer to"); err.println("stdin and stdout."); return -1; } String inputName = getArg(args, useJavaCC ? 1 : 0, "-"); File inputFile = "-".equals(inputName) ? null : new File(inputName); String outputName = getArg(args, useJavaCC ? 2 : 1, "-"); File outputFile = "-".equals(outputName) ? null : new File(outputName); Schema m = null; Protocol p = null; if (useJavaCC) { try (Idl parser = new Idl(inputFile)) { p = parser.CompilationUnit(); for (String warning : parser.getWarningsAfterParsing()) { err.println("Warning: " + warning); } } } else { IdlReader parser = new IdlReader(); IdlFile idlFile = inputFile == null ? parser.parse(in) : parser.parse(inputFile.toPath()); for (String warning : idlFile.getWarnings()) { err.println("Warning: " + warning); } p = idlFile.getProtocol(); m = idlFile.getMainSchema(); } PrintStream parseOut = out; if (outputFile != null) { parseOut = new PrintStream(new FileOutputStream(outputFile)); } if (m == null && p == null) { err.println("Error: the IDL file does not contain a schema nor a protocol."); return 1; } try { parseOut.print(m == null ? p.toString(true) : m.toString(true)); } finally { if (parseOut != out) // Close only the newly created FileOutputStream parseOut.close(); } return 0; } private String getArg(List<String> args, int index, String defaultValue) { if (index < args.size()) { return args.get(index); } else { return defaultValue; } } @Override public String getName() { return "idl"; } @Override public String getShortDescription() { return "Generates a JSON schema or protocol from an Avro IDL file"; } }
6,905
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/FromTextTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; /** * Reads a text file into an Avro data file. * * Can accept a file name, and HDFS file URI, or stdin. Can write to a file * name, an HDFS URI, or stdout. */ public class FromTextTool implements Tool { private static final String TEXT_FILE_SCHEMA = "\"bytes\""; @Override public String getName() { return "fromtext"; } @Override public String getShortDescription() { return "Imports a text file into an avro data file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<Integer> level = Util.compressionLevelOption(p); OptionSpec<String> codec = Util.compressionCodecOption(p); OptionSet opts = p.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() != 2) { err.println("Expected 2 args: from_file to_file (local filenames," + " Hadoop URI's, or '-' for stdin/stdout"); p.printHelpOn(err); return 1; } CodecFactory codecFactory = Util.codecFactory(opts, codec, level); BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin); BufferedOutputStream outStream = Util.fileOrStdout(nargs.get(1), out); DataFileWriter<ByteBuffer> writer = new DataFileWriter<>(new GenericDatumWriter<>()); writer.setCodec(codecFactory); writer.create(new Schema.Parser().parse(TEXT_FILE_SCHEMA), outStream); ByteBuffer line = ByteBuffer.allocate(128); boolean returnSeen = false; byte[] buf = new byte[8192]; for (int end = inStream.read(buf); end != -1; end = inStream.read(buf)) { for (int i = 0; i < end; i++) { int b = buf[i] & 0xFF; if (b == '\n') { // newline if (!returnSeen) { System.out.println("Writing line = " + line.position()); ((Buffer) line).flip(); writer.append(line); ((Buffer) line).clear(); } else { returnSeen = false; } } else if (b == '\r') { // return ((Buffer) line).flip(); writer.append(line); ((Buffer) line).clear(); returnSeen = true; } else { if (line.position() == line.limit()) { // reallocate longer line ByteBuffer tempLine = ByteBuffer.allocate(line.limit() * 2); ((Buffer) line).flip(); tempLine.put(line); line = tempLine; } line.put((byte) b); returnSeen = false; } } } writer.close(); inStream.close(); return 0; } }
6,906
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniToJsonTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import org.apache.trevni.Input; import org.apache.trevni.ColumnFileReader; import org.apache.trevni.ColumnMetaData; import org.apache.trevni.ColumnValues; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; /** * Tool to read Trevni files and print them as JSON. This can read any Trevni * file. Nested structure is reconstructed from the columns rather than any * schema information. */ public class TrevniToJsonTool implements Tool { static final JsonFactory FACTORY = new JsonFactory(); private JsonGenerator generator; private ColumnValues[] values; private String[] shortNames; @Override public String getName() { return "trevni_tojson"; } @Override public String getShortDescription() { return "Dumps a Trevni file as JSON."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { String filename; boolean pretty = false; if (args.size() == 2 && "-pretty".equals(args.get(0))) { pretty = true; filename = args.get(1); } else if (args.size() == 1) { filename = args.get(0); } else { err.println("Usage: [-pretty] input"); return 1; } toJson(TrevniUtil.input(filename), out, pretty); return 0; } /** Read a Trevni file and print each row as a JSON object. */ public void toJson(Input input, PrintStream out, boolean pretty) throws IOException { this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8); if (pretty) { generator.useDefaultPrettyPrinter(); } else { // ensure newline separation MinimalPrettyPrinter pp = new MinimalPrettyPrinter(); pp.setRootValueSeparator(System.getProperty("line.separator")); generator.setPrettyPrinter(pp); } ColumnFileReader reader = new ColumnFileReader(input); int columnCount = (int) reader.getColumnCount(); this.values = new ColumnValues[columnCount]; this.shortNames = new String[columnCount]; for (int i = 0; i < columnCount; i++) { values[i] = reader.getValues(i); shortNames[i] = shortName(reader.getColumnMetaData(i)); } List<ColumnMetaData> roots = reader.getRoots(); for (long row = 0; row < reader.getRowCount(); row++) { for (ColumnValues v : values) v.startRow(); generator.writeStartObject(); for (ColumnMetaData root : roots) valueToJson(root); generator.writeEndObject(); } generator.flush(); out.println(); reader.close(); } private void valueToJson(ColumnMetaData column) throws IOException { generator.writeFieldName(shortNames[column.getNumber()]); ColumnValues in = values[column.getNumber()]; if (!column.isArray()) { primitiveToJson(column, in.nextValue()); } else { generator.writeStartArray(); int length = in.nextLength(); for (int i = 0; i < length; i++) { Object value = in.nextValue(); List<ColumnMetaData> children = column.getChildren(); if (children.size() == 0) { primitiveToJson(column, value); } else { generator.writeStartObject(); if (value != null) { generator.writeFieldName("value$"); primitiveToJson(column, value); } for (ColumnMetaData child : children) valueToJson(child); generator.writeEndObject(); } } generator.writeEndArray(); } } private void primitiveToJson(ColumnMetaData column, Object value) throws IOException { switch (column.getType()) { case NULL: generator.writeNull(); break; case BOOLEAN: generator.writeBoolean((Boolean) value); break; case INT: generator.writeNumber((Integer) value); break; case LONG: generator.writeNumber((Long) value); break; case FIXED32: generator.writeNumber((Integer) value); break; case FIXED64: generator.writeNumber((Long) value); break; case FLOAT: generator.writeNumber((Float) value); break; case DOUBLE: generator.writeNumber((Double) value); break; case STRING: generator.writeString((String) value); break; case BYTES: generator.writeBinary((byte[]) value); break; default: throw new RuntimeException("Unknown value type: " + column.getType()); } } // trim off portion of name shared with parent private String shortName(ColumnMetaData column) { String name = column.getName(); ColumnMetaData parent = column.getParent(); if (parent != null && name.startsWith(parent.getName())) name = name.substring(parent.getName().length()); if (!Character.isLetterOrDigit(name.charAt(0))) name = name.substring(1); return name; } }
6,907
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniCreateRandomTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.util.RandomData; import org.apache.trevni.ColumnFileMetaData; import org.apache.trevni.avro.AvroColumnWriter; /** Tool to create randomly populated Trevni file based on an Avro schema */ public class TrevniCreateRandomTool implements Tool { @Override public String getName() { return "trevni_random"; } @Override public String getShortDescription() { return "Create a Trevni file filled with random instances of a schema."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() != 3) { err.println("Usage: schemaFile count outputFile"); return 1; } File schemaFile = new File(args.get(0)); int count = Integer.parseInt(args.get(1)); File outputFile = new File(args.get(2)); Schema schema = new Schema.Parser().parse(schemaFile); AvroColumnWriter<Object> writer = new AvroColumnWriter<>(schema, new ColumnFileMetaData()); for (Object datum : new RandomData(schema, count)) writer.write(datum); writer.writeTo(outputFile); return 0; } }
6,908
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetMetaTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.file.DataFileReader; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.mapred.FsInput; /** Reads a data file to get its metadata. */ public class DataFileGetMetaTool implements Tool { @Override public String getName() { return "getmeta"; } @Override public String getShortDescription() { return "Prints out the metadata of an Avro data file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<String> keyOption = p.accepts("key", "Metadata key").withOptionalArg().ofType(String.class); OptionSet opts = p.parse(args.toArray(new String[0])); String keyName = keyOption.value(opts); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() != 1) { err.println("Expected 1 arg: input_file"); p.printHelpOn(err); return 1; } FsInput in = Util.openSeekableFromFS(args.get(0)); DataFileReader<Void> reader = new DataFileReader<>(in, new GenericDatumReader<>()); if (keyName != null) { byte[] value = reader.getMeta(keyName); if (value != null) { out.write(value, 0, value.length); out.println(); } } else { List<String> keys = reader.getMetaKeys(); for (String key : keys) { out.print(escapeKey(key)); out.print('\t'); byte[] value = reader.getMeta(key); out.write(value, 0, value.length); out.println(); } } reader.close(); return 0; } // escape TAB, NL and CR in keys, so that output can be reliably parsed static String escapeKey(String key) { key = key.replace("\\", "\\\\"); // escape backslashes first key = key.replace("\t", "\\t"); // TAB key = key.replace("\n", "\\n"); // NL key = key.replace("\r", "\\r"); // CR return key; } }
6,909
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/RpcSendTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.File; import java.net.URI; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.Protocol.Message; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; import org.apache.avro.ipc.Ipc; import org.apache.avro.ipc.generic.GenericRequestor; /** * Sends a single RPC message. */ public class RpcSendTool implements Tool { @Override public String getName() { return "rpcsend"; } @Override public String getShortDescription() { return "Sends a single RPC message."; } @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<String> file = p.accepts("file", "Data file containing request parameters.").withRequiredArg() .ofType(String.class); OptionSpec<String> data = p.accepts("data", "JSON-encoded request parameters.").withRequiredArg() .ofType(String.class); OptionSet opts = p.parse(args.toArray(new String[0])); args = (List<String>) opts.nonOptionArguments(); if (args.size() != 3) { err.println("Usage: uri protocol_file message_name (-data d | -file f)"); p.printHelpOn(err); return 1; } URI uri = new URI(args.get(0)); Protocol protocol = Protocol.parse(new File(args.get(1))); String messageName = args.get(2); Message message = protocol.getMessages().get(messageName); if (message == null) { err.println(String.format("No message named '%s' found in protocol '%s'.", messageName, protocol)); return 1; } Object datum; if (data.value(opts) != null) { datum = Util.jsonToGenericDatum(message.getRequest(), data.value(opts)); } else if (file.value(opts) != null) { datum = Util.datumFromFile(message.getRequest(), file.value(opts)); } else { err.println("One of -data or -file must be specified."); return 1; } GenericRequestor client = new GenericRequestor(protocol, Ipc.createTransceiver(uri)); Object response = client.request(message.getName(), datum); dumpJson(out, message.getResponse(), response); return 0; } private void dumpJson(PrintStream out, Schema schema, Object datum) throws IOException { DatumWriter<Object> writer = new GenericDatumWriter<>(schema); JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out, true); writer.write(datum, jsonEncoder); jsonEncoder.flush(); out.println(); out.flush(); } }
6,910
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniMetadataTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import org.apache.trevni.Input; import org.apache.trevni.ColumnFileReader; import org.apache.trevni.MetaData; import org.apache.trevni.ColumnMetaData; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.util.MinimalPrettyPrinter; /** Tool to print Trevni file metadata as JSON. */ public class TrevniMetadataTool implements Tool { static final JsonFactory FACTORY = new JsonFactory(); private JsonGenerator generator; @Override public String getName() { return "trevni_meta"; } @Override public String getShortDescription() { return "Dumps a Trevni file's metadata as JSON."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { String filename; boolean pretty = false; if (args.size() == 2 && "-pretty".equals(args.get(0))) { pretty = true; filename = args.get(1); } else if (args.size() == 1) { filename = args.get(0); } else { err.println("Usage: [-pretty] input"); return 1; } dump(TrevniUtil.input(filename), out, pretty); return 0; } /** Read a Trevni file and print each row as a JSON object. */ public void dump(Input input, PrintStream out, boolean pretty) throws IOException { this.generator = FACTORY.createGenerator(out, JsonEncoding.UTF8); if (pretty) { generator.useDefaultPrettyPrinter(); } else { // ensure newline separation MinimalPrettyPrinter pp = new MinimalPrettyPrinter(); pp.setRootValueSeparator(System.getProperty("line.separator")); generator.setPrettyPrinter(pp); } ColumnFileReader reader = new ColumnFileReader(input); generator.writeStartObject(); generator.writeNumberField("rowCount", reader.getRowCount()); generator.writeNumberField("columnCount", reader.getColumnCount()); generator.writeFieldName("metadata"); dump(reader.getMetaData()); generator.writeFieldName("columns"); generator.writeStartArray(); for (ColumnMetaData c : reader.getColumnMetaData()) dump(c); generator.writeEndArray(); generator.writeEndObject(); generator.flush(); out.println(); reader.close(); } private void dump(MetaData<?> meta) throws IOException { generator.writeStartObject(); for (Map.Entry<String, byte[]> e : meta.entrySet()) generator.writeStringField(e.getKey(), new String(e.getValue(), StandardCharsets.ISO_8859_1)); generator.writeEndObject(); } }
6,911
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.apache.avro.Schema; import org.apache.avro.file.DataFileStream; import org.apache.avro.generic.GenericDatumReader; /** Reads an avro data file into a plain text file. */ public class ToTextTool implements Tool { private static final String TEXT_FILE_SCHEMA = "\"bytes\""; private static final byte[] LINE_SEPARATOR = System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8); @Override public String getName() { return "totext"; } @Override public String getShortDescription() { return "Converts an Avro data file to a text file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSet opts = p.parse(args.toArray(new String[0])); if (opts.nonOptionArguments().size() != 2) { err.println("Expected 2 args: from_file to_file (filenames or '-' for stdin/stdout"); p.printHelpOn(err); return 1; } BufferedInputStream inStream = Util.fileOrStdin(args.get(0), stdin); BufferedOutputStream outStream = Util.fileOrStdout(args.get(1), out); GenericDatumReader<Object> reader = new GenericDatumReader<>(); DataFileStream<Object> fileReader = new DataFileStream<>(inStream, reader); if (!fileReader.getSchema().equals(new Schema.Parser().parse(TEXT_FILE_SCHEMA))) { err.println("Avro file is not generic text schema"); p.printHelpOn(err); fileReader.close(); return 1; } while (fileReader.hasNext()) { ByteBuffer outBuff = (ByteBuffer) fileReader.next(); outStream.write(outBuff.array()); outStream.write(LINE_SEPARATOR); } fileReader.close(); Util.close(inStream); Util.close(outStream); return 0; } }
6,912
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/ToTrevniTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.List; import org.apache.avro.file.DataFileStream; import org.apache.avro.generic.GenericDatumReader; import org.apache.trevni.ColumnFileMetaData; import org.apache.trevni.avro.AvroColumnWriter; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; /** Reads an Avro data file and writes a Trevni file. */ public class ToTrevniTool implements Tool { @Override public String getName() { return "totrevni"; } @Override public String getShortDescription() { return "Converts an Avro data file to a Trevni file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<String> codec = p.accepts("codec", "Compression codec").withRequiredArg().defaultsTo("null") .ofType(String.class); OptionSet opts = p.parse(args.toArray(new String[0])); if (opts.nonOptionArguments().size() != 2) { err.println("Usage: inFile outFile (filenames or '-' for stdin/stdout)"); p.printHelpOn(err); return 1; } args = (List<String>) opts.nonOptionArguments(); DataFileStream<Object> reader = new DataFileStream(Util.fileOrStdin(args.get(0), stdin), new GenericDatumReader<>()); OutputStream outs = Util.fileOrStdout(args.get(1), out); AvroColumnWriter<Object> writer = new AvroColumnWriter<>(reader.getSchema(), new ColumnFileMetaData().setCodec(codec.value(opts))); for (Object datum : reader) writer.write(datum); writer.writeTo(outs); outs.close(); reader.close(); return 0; } }
6,913
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/IdlToSchemataTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.compiler.idl.Idl; import org.apache.avro.idl.IdlFile; import org.apache.avro.idl.IdlReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.util.List; /** * Extract the Avro JSON schemata of the types of a protocol defined through an * idl format file. */ public class IdlToSchemataTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { boolean useJavaCC = "--useJavaCC".equals(getArg(args, 0, null)); if (args.isEmpty() || args.size() > (useJavaCC ? 3 : 2) || isRequestingHelp(args)) { err.println("Usage: idl2schemata [--useJavaCC] [idl [outdir]]"); err.println(); err.println("If an output directory is not specified, " + "outputs to current directory."); return -1; } String inputName = getArg(args, useJavaCC ? 1 : 0, "-"); File inputFile = "-".equals(inputName) ? null : new File(inputName); File outputDirectory = getOutputDirectory(getArg(args, useJavaCC ? 2 : 1, "")); if (useJavaCC) { try (Idl parser = new Idl(inputFile)) { final Protocol protocol = parser.CompilationUnit(); final List<String> warnings = parser.getWarningsAfterParsing(); for (String warning : warnings) { err.println("Warning: " + warning); } for (Schema schema : protocol.getTypes()) { print(schema, outputDirectory); } } } else { IdlReader parser = new IdlReader(); IdlFile idlFile = inputFile == null ? parser.parse(in) : parser.parse(inputFile.toPath()); for (String warning : idlFile.getWarnings()) { err.println("Warning: " + warning); } for (Schema schema : idlFile.getNamedSchemas().values()) { print(schema, outputDirectory); } } return 0; } private boolean isRequestingHelp(List<String> args) { return args.size() == 1 && (args.get(0).equals("--help") || args.get(0).equals("-help")); } private String getArg(List<String> args, int index, String defaultValue) { if (index < args.size()) { return args.get(index); } else { return defaultValue; } } private File getOutputDirectory(String dirname) { File outputDirectory = new File(dirname); outputDirectory.mkdirs(); return outputDirectory; } private void print(Schema schema, File outputDirectory) throws FileNotFoundException { String dirpath = outputDirectory.getAbsolutePath(); String filename = dirpath + "/" + schema.getName() + ".avsc"; FileOutputStream fileOutputStream = new FileOutputStream(filename); PrintStream printStream = new PrintStream(fileOutputStream); printStream.println(schema.toString(true)); printStream.close(); } @Override public String getName() { return "idl2schemata"; } @Override public String getShortDescription() { return "Extract JSON schemata of the types from an Avro IDL file"; } }
6,914
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/TetherTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.mapred.AvroJob; import org.apache.avro.mapred.tether.TetherJob; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.commons.cli.Options; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; @SuppressWarnings("deprecation") public class TetherTool implements Tool { public TetherJob job; @Override public String getName() { return "tether"; } @Override public String getShortDescription() { return "Run a tethered mapreduce job."; } @Override public int run(InputStream ins, PrintStream outs, PrintStream err, List<String> args) throws Exception { String[] argarry = args.toArray(new String[0]); Options opts = new Options(); Option helpopt = OptionBuilder.hasArg(false).withDescription("print this message").create("help"); Option inopt = OptionBuilder.hasArg().isRequired().withDescription("comma-separated input paths").create("in"); Option outopt = OptionBuilder.hasArg().isRequired().withDescription("The output path.").create("out"); Option pargs = OptionBuilder.hasArg().withDescription( "A string containing the command line arguments to pass to the tethered process. String should be enclosed in quotes") .create("exec_args"); Option popt = OptionBuilder.hasArg().isRequired().withDescription("executable program, usually in HDFS") .create("program"); Option outscopt = OptionBuilder.withType(File.class).hasArg().isRequired() .withDescription("schema file for output of reducer").create("outschema"); Option outscmapopt = OptionBuilder.withType(File.class).hasArg() .withDescription("(optional) map output schema file, if different from outschema").create("outschemamap"); Option redopt = OptionBuilder.withType(Integer.class).hasArg().withDescription("(optional) number of reducers") .create("reduces"); Option cacheopt = OptionBuilder.withType(Boolean.class).hasArg() .withDescription( "(optional) boolean indicating whether or not the executable should be distributed via distributed cache") .create("exec_cached"); Option protoopt = OptionBuilder.hasArg() .withDescription("(optional) specifies the transport protocol 'http' or 'sasl'").create("protocol"); opts.addOption(redopt); opts.addOption(outscopt); opts.addOption(popt); opts.addOption(pargs); opts.addOption(inopt); opts.addOption(outopt); opts.addOption(helpopt); opts.addOption(outscmapopt); opts.addOption(cacheopt); opts.addOption(protoopt); CommandLineParser parser = new GnuParser(); CommandLine line = null; HelpFormatter formatter = new HelpFormatter(); JobConf job = new JobConf(); try { line = parser.parse(opts, argarry); if (line.hasOption("help")) { formatter.printHelp("tether", opts); return 0; } FileInputFormat.addInputPaths(job, line.getOptionValue("in")); FileOutputFormat.setOutputPath(job, new Path(line.getOptionValue("out"))); List<String> exargs = null; Boolean cached = false; if (line.hasOption("exec_args")) { String[] splitargs = line.getOptionValue("exec_args").split(" "); exargs = new ArrayList<>(Arrays.asList(splitargs)); } if (line.hasOption("exec_cached")) { cached = Boolean.parseBoolean(line.getOptionValue("exec_cached")); } TetherJob.setExecutable(job, new File(line.getOptionValue("program")), exargs, cached); File outschema = (File) line.getParsedOptionValue("outschema"); job.set(AvroJob.OUTPUT_SCHEMA, Schema.parse(outschema).toString()); if (line.hasOption("outschemamap")) { job.set(AvroJob.MAP_OUTPUT_SCHEMA, new Schema.Parser().parse((File) line.getParsedOptionValue("outschemamap")).toString()); } if (line.hasOption("reduces")) { job.setNumReduceTasks((Integer) line.getParsedOptionValue("reduces")); } if (line.hasOption("protocol")) { TetherJob.setProtocol(job, line.getOptionValue("protocol")); } } catch (Exception exp) { System.out.println("Unexpected exception: " + exp.getMessage()); formatter.printHelp("tether", opts); return -1; } TetherJob.runJob(job); return 0; } }
6,915
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/TrevniUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.URI; import org.apache.trevni.Input; import org.apache.trevni.avro.HadoopInput; import org.apache.trevni.InputFile; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; /** Static utility methods for tools. */ class TrevniUtil { static Input input(String filename) throws IOException { if (filename.startsWith("hdfs://")) { return new HadoopInput(new Path(filename), new Configuration()); } else { return new InputFile(new File(filename)); } } /** * Returns stdin if filename is "-", else opens the local or HDFS file and * returns an InputStream for it. * * @throws IOException */ static InputStream input(String filename, InputStream stdin) throws IOException { if (filename.equals("-")) return new BufferedInputStream(stdin); else if (filename.startsWith("hdfs://")) { FileSystem fs = FileSystem.get(URI.create(filename), new Configuration()); return new BufferedInputStream(fs.open(new Path(filename))); } else { return new BufferedInputStream(new FileInputStream(new File(filename))); } } /** * Returns stdout if filename is "-", else opens the local or HDFS file and * returns an OutputStream for it. * * @throws IOException */ static OutputStream output(String filename, OutputStream stdout) throws IOException { if (filename.equals("-")) return new BufferedOutputStream(stdout); else if (filename.startsWith("hdfs://")) { FileSystem fs = FileSystem.get(URI.create(filename), new Configuration()); return new BufferedOutputStream(fs.create(new Path(filename))); } else { return new BufferedOutputStream(new FileOutputStream(new File(filename))); } } }
6,916
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileGetSchemaTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import org.apache.avro.file.DataFileReader; import org.apache.avro.generic.GenericDatumReader; /** Reads a data file to get its schema. */ public class DataFileGetSchemaTool implements Tool { @Override public String getName() { return "getschema"; } @Override public String getShortDescription() { return "Prints out schema of an Avro data file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() != 1) { err.println("Expected 1 argument: input_file"); return 1; } DataFileReader<Void> reader = new DataFileReader<>(Util.openSeekableFromFS(args.get(0)), new GenericDatumReader<>()); out.println(reader.getSchema().toString(true)); reader.close(); return 0; } }
6,917
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/Tool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.PrintStream; import java.util.List; /** * Command-line "avro-tools" utilities should implement this interface for * delegation by {@link Main}. */ public interface Tool { /** * Runs the tool with supplied arguments. Input and output streams are * customizable for easier testing. * * @param in Input stream to read data (typically System.in). * @param out Output of tool (typically System.out). * @param err Error stream (typically System.err). * @param args Non-null list of arguments. * @return result code (0 for success) * @throws Exception Just like main(), tools may throw Exception. */ int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception; /** * Name of tool, to be used in listings. */ String getName(); /** * 1-line description to be used in command listings. */ String getShortDescription(); }
6,918
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileReadTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Schema; import org.apache.avro.file.DataFileStream; import org.apache.avro.io.DatumWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Reads a data file and dumps to JSON */ public class DataFileReadTool implements Tool { private static final Logger LOG = LoggerFactory.getLogger(DataFileReadTool.class); private static final long DEFAULT_HEAD_COUNT = 10; @Override public String getName() { return "tojson"; } @Override public String getShortDescription() { return "Dumps an Avro data file as JSON, record per line or pretty."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optionParser = new OptionParser(); OptionSpec<Void> prettyOption = optionParser.accepts("pretty", "Turns on pretty printing."); String headDesc = String.format("Converts the first X records (default is %d).", DEFAULT_HEAD_COUNT); OptionSpec<String> headOption = optionParser.accepts("head", headDesc).withOptionalArg(); OptionSpec<String> readerSchemaFileOption = optionParser.accepts("reader-schema-file", "Reader schema file") .withOptionalArg().ofType(String.class); OptionSpec<String> readerSchemaOption = optionParser.accepts("reader-schema", "Reader schema").withOptionalArg() .ofType(String.class); OptionSet optionSet = optionParser.parse(args.toArray(new String[0])); boolean pretty = optionSet.has(prettyOption); List<String> nargs = new ArrayList<>((List<String>) optionSet.nonOptionArguments()); String readerSchemaStr = readerSchemaOption.value(optionSet); String readerSchemaFile = readerSchemaFileOption.value(optionSet); Schema readerSchema = getSchema(readerSchemaStr, readerSchemaFile); long headCount = getHeadCount(optionSet, headOption, nargs); if (nargs.size() != 1) { printHelp(err); err.println(); optionParser.printHelpOn(err); return 1; } BufferedInputStream inStream = Util.fileOrStdin(nargs.get(0), stdin); GenericDatumReader<Object> reader = new GenericDatumReader<>(); if (readerSchema != null) { reader.setExpected(readerSchema); } try (DataFileStream<Object> streamReader = new DataFileStream<>(inStream, reader)) { Schema schema = readerSchema != null ? readerSchema : streamReader.getSchema(); DatumWriter<Object> writer = new GenericDatumWriter<>(schema); JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out, pretty); for (long recordCount = 0; streamReader.hasNext() && recordCount < headCount; recordCount++) { Object datum = streamReader.next(); writer.write(datum, encoder); } encoder.flush(); out.println(); out.flush(); } return 0; } static Schema getSchema(String schemaStr, String schemaFile) throws IOException { Schema readerSchema = null; if (schemaFile != null) { LOG.info("Reading schema from file '{}'", schemaFile); readerSchema = Util.parseSchemaFromFS(schemaFile); } else if (schemaStr != null) { LOG.info("Reading schema from string '{}'", schemaStr); readerSchema = new Schema.Parser().parse(schemaStr); } return readerSchema; } private static long getHeadCount(OptionSet optionSet, OptionSpec<String> headOption, List<String> nargs) { long headCount = Long.MAX_VALUE; if (optionSet.has(headOption)) { headCount = DEFAULT_HEAD_COUNT; List<String> headValues = optionSet.valuesOf(headOption); if (headValues.size() > 0) { // if the value parses to int, assume it's meant to go with --head // otherwise assume it was an optionSet.nonOptionArgument and add back to the // list // TODO: support input filenames whose whole path+name is int parsable? try { headCount = Long.parseLong(headValues.get(0)); if (headCount < 0) throw new AvroRuntimeException("--head count must not be negative"); } catch (NumberFormatException ex) { nargs.addAll(headValues); } } } return headCount; } private void printHelp(PrintStream ps) { ps.println("tojson [--pretty] [--head[=X]] input-file"); ps.println(); ps.println(getShortDescription()); ps.println("A dash ('-') can be given as an input file to use stdin"); } }
6,919
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/Main.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.Arrays; import java.util.Map; import java.util.TreeMap; import java.io.InputStream; /** Command-line driver. */ public class Main { /** * Available tools, initialized in constructor. */ final Map<String, Tool> tools; int maxLen = 0; Main() { tools = new TreeMap<>(); for (Tool tool : new Tool[] { new CatTool(), new RecordCountTool(), new SpecificCompilerTool(), new InduceSchemaTool(), new JsonToBinaryFragmentTool(), new BinaryFragmentToJsonTool(), new CreateRandomFileTool(), new DataFileReadTool(), new DataFileWriteTool(), new DataFileGetMetaTool(), new DataFileGetSchemaTool(), new DataFileRepairTool(), new IdlTool(), new IdlToSchemataTool(), new RecodecTool(), new ConcatTool(), new RpcReceiveTool(), new RpcSendTool(), new RpcProtocolTool(), new FromTextTool(), new ToTextTool(), new ToTrevniTool(), new TetherTool(), new TrevniCreateRandomTool(), new TrevniMetadataTool(), new TrevniToJsonTool(), new SchemaNormalizationTool(), new SchemaFingerprintTool() }) { Tool prev = tools.put(tool.getName(), tool); if (prev != null) { throw new AssertionError("Two tools with identical names: " + tool + ", " + prev); } maxLen = Math.max(tool.getName().length(), maxLen); } } public static void main(String[] args) throws Exception { int rc = new Main().run(args); System.exit(rc); } /** * Delegates to tool specified on the command-line. */ private int run(String[] args) throws Exception { if (args.length != 0) { Tool tool = tools.get(args[0]); if (tool != null) { return tool.run(System.in, System.out, System.err, Arrays.asList(args).subList(1, args.length)); } } System.err.print("Version "); try (InputStream versionInput = Main.class.getClassLoader().getResourceAsStream("VERSION.txt")) { printStream(versionInput); } System.err.print(" of "); try (InputStream noticeInput = Main.class.getClassLoader().getResourceAsStream("META-INF/NOTICE")) { printHead(noticeInput, 5); } System.err.println("----------------"); System.err.println("Available tools:"); for (Tool k : tools.values()) { System.err.printf("%" + maxLen + "s %s\n", k.getName(), k.getShortDescription()); } return 1; } private static void printStream(InputStream in) throws Exception { byte[] buffer = new byte[1024]; for (int i = in.read(buffer); i != -1; i = in.read(buffer)) System.err.write(buffer, 0, i); } private static void printHead(InputStream in, int lines) throws Exception { BufferedReader r = new BufferedReader(new InputStreamReader(in)); for (int i = 0; i < lines; i++) { String line = r.readLine(); if (line == null) { break; } System.err.println(line); } } }
6,920
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/CreateRandomFileTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.util.RandomData; /** Creates a file filled with randomly-generated instances of a schema. */ public class CreateRandomFileTool implements Tool { @Override public String getName() { return "random"; } @Override public String getShortDescription() { return "Creates a file with randomly generated instances of a schema."; } @SuppressWarnings("unchecked") @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<Integer> count = p.accepts("count", "Record Count").withRequiredArg().ofType(Integer.class); OptionSpec<String> codec = Util.compressionCodecOption(p); OptionSpec<Integer> level = Util.compressionLevelOption(p); OptionSpec<String> file = p.accepts("schema-file", "Schema File").withOptionalArg().ofType(String.class); OptionSpec<String> inschema = p.accepts("schema", "Schema").withOptionalArg().ofType(String.class); OptionSpec<Long> seedOpt = p.accepts("seed", "Seed for random").withOptionalArg().ofType(Long.class); OptionSet opts = p.parse(args.toArray(new String[0])); if (opts.nonOptionArguments().size() != 1) { err.println("Usage: outFile (filename or '-' for stdout)"); p.printHelpOn(err); return 1; } args = (List<String>) opts.nonOptionArguments(); String schemastr = inschema.value(opts); String schemafile = file.value(opts); Long seed = seedOpt.value(opts); if (schemastr == null && schemafile == null) { err.println("Need input schema (--schema-file) or (--schema)"); p.printHelpOn(err); return 1; } Schema schema = (schemafile != null) ? Util.parseSchemaFromFS(schemafile) : new Schema.Parser().parse(schemastr); DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>()); writer.setCodec(Util.codecFactory(opts, codec, level)); writer.create(schema, Util.fileOrStdout(args.get(0), out)); Integer countValue = count.value(opts); if (countValue == null) { err.println("Need count (--count)"); p.printHelpOn(err); writer.close(); return 1; } RandomData rd = seed == null ? new RandomData(schema, countValue) : new RandomData(schema, countValue, seed); for (Object datum : rd) writer.append(datum); writer.close(); return 0; } }
6,921
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/RecordCountTool.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import com.google.common.collect.ImmutableList; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.apache.avro.file.DataFileStream; import org.apache.avro.generic.GenericDatumReader; import org.apache.hadoop.fs.Path; import java.io.InputStream; import java.io.PrintStream; import java.util.List; /** Counts the records in avro files or folders */ public class RecordCountTool implements Tool { @Override public String getName() { return "count"; } @Override public String getShortDescription() { return "Counts the records in avro files or folders"; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optionParser = new OptionParser(); OptionSet optionSet = optionParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) optionSet.nonOptionArguments(); if (nargs.isEmpty()) { printHelp(err); err.println(); optionParser.printHelpOn(err); return 0; } long count = 0L; if (ImmutableList.of("-").equals(nargs)) { count = countRecords(stdin); } else { for (Path file : Util.getFiles(nargs)) { try (final InputStream inStream = Util.openFromFS(file)) { count += countRecords(inStream); } } } out.println(count); out.flush(); return 0; } private long countRecords(InputStream inStream) throws java.io.IOException { long count = 0L; try (DataFileStream<Object> streamReader = new DataFileStream<>(inStream, new GenericDatumReader<>())) { while (streamReader.hasNext()) { count = count + streamReader.getBlockCount(); streamReader.nextBlock(); } } return count; } private void printHelp(PrintStream ps) { ps.println(getName() + " [input-files...]"); ps.println(); ps.println(getShortDescription()); ps.println("A dash ('-') can be given as an input-file to use stdin"); } }
6,922
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileRepairTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; /** Recovers data from a corrupt Avro Data file */ public class DataFileRepairTool implements Tool { @Override public String getName() { return "repair"; } @Override public String getShortDescription() { return "Recovers data from a corrupt Avro Data file"; } private void printInfo(PrintStream output) { output.println("Insufficient arguments. Arguments: [-o option] " + "input_file output_file \n" + " Where option is one of the following: \n" + " " + ALL + " (default) recover as many records as possible.\n" + " " + PRIOR + " recover only records prior to the first instance" + " of corruption \n" + " " + AFTER + " recover only records after the first instance of" + " corruption.\n" + " " + REPORT + " print the corruption report only, reporting the\n" + " number of valid and corrupted blocks and records\n" + " input_file is the file to read from. output_file is the file to\n" + " create and write recovered data to. output_file is ignored if\n" + " using the report option."); } private static final Set<String> OPTIONS = new HashSet<>(); private static final String ALL = "all"; private static final String PRIOR = "prior"; private static final String AFTER = "after"; private static final String REPORT = "report"; static { OPTIONS.add(ALL); OPTIONS.add(PRIOR); OPTIONS.add(AFTER); OPTIONS.add(REPORT); } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() < 2) { printInfo(err); return 1; } int index = 0; String input = args.get(index); String option = "all"; if ("-o".equals(input)) { option = args.get(1); index += 2; } if (!OPTIONS.contains(option) || (args.size() - index < 1)) { printInfo(err); return 1; } input = args.get(index++); if (!REPORT.equals(option)) { if (args.size() - index < 1) { printInfo(err); return 1; } } if (ALL.equals(option)) { return recoverAll(input, args.get(index), out, err); } else if (PRIOR.equals(option)) { return recoverPrior(input, args.get(index), out, err); } else if (AFTER.equals(option)) { return recoverAfter(input, args.get(index), out, err); } else if (REPORT.equals(option)) { return reportOnly(input, out, err); } else { return 1; } } private int recover(String input, String output, PrintStream out, PrintStream err, boolean recoverPrior, boolean recoverAfter) throws IOException { File infile = new File(input); if (!infile.canRead()) { err.println("cannot read file: " + input); return 1; } out.println("Recovering file: " + input); GenericDatumReader<Object> reader = new GenericDatumReader<>(); try (DataFileReader<Object> fileReader = new DataFileReader<>(infile, reader)) { Schema schema = fileReader.getSchema(); String codecStr = fileReader.getMetaString(DataFileConstants.CODEC); CodecFactory codecFactory = CodecFactory.fromString("" + codecStr); List<String> metas = fileReader.getMetaKeys(); if (recoverPrior || recoverAfter) { GenericDatumWriter<Object> writer = new GenericDatumWriter<>(); DataFileWriter<Object> fileWriter = new DataFileWriter<>(writer); try { File outfile = new File(output); for (String key : metas) { if (!key.startsWith("avro.")) { byte[] val = fileReader.getMeta(key); fileWriter.setMeta(key, val); } } fileWriter.setCodec(codecFactory); int result = innerRecover(fileReader, fileWriter, out, err, recoverPrior, recoverAfter, schema, outfile); return result; } catch (Exception e) { e.printStackTrace(err); return 1; } } else { return innerRecover(fileReader, null, out, err, recoverPrior, recoverAfter, null, null); } } } private int innerRecover(DataFileReader<Object> fileReader, DataFileWriter<Object> fileWriter, PrintStream out, PrintStream err, boolean recoverPrior, boolean recoverAfter, Schema schema, File outfile) { int numBlocks = 0; int numCorruptBlocks = 0; int numRecords = 0; int numCorruptRecords = 0; int recordsWritten = 0; long position = fileReader.previousSync(); long blockSize = 0; long blockCount = 0; boolean fileWritten = false; try { while (true) { try { if (!fileReader.hasNext()) { out.println("File Summary: "); out.println(" Number of blocks: " + numBlocks + " Number of corrupt blocks: " + numCorruptBlocks); out.println(" Number of records: " + numRecords + " Number of corrupt records: " + numCorruptRecords); if (recoverAfter || recoverPrior) { out.println(" Number of records written " + recordsWritten); } out.println(); return 0; } position = fileReader.previousSync(); blockCount = fileReader.getBlockCount(); blockSize = fileReader.getBlockSize(); numRecords += blockCount; long blockRemaining = blockCount; numBlocks++; boolean lastRecordWasBad = false; long badRecordsInBlock = 0; while (blockRemaining > 0) { try { Object datum = fileReader.next(); if ((recoverPrior && numCorruptBlocks == 0) || (recoverAfter && numCorruptBlocks > 0)) { if (!fileWritten) { try { fileWriter.create(schema, outfile); fileWritten = true; } catch (Exception e) { e.printStackTrace(err); return 1; } } try { fileWriter.append(datum); recordsWritten++; } catch (Exception e) { e.printStackTrace(err); throw e; } } blockRemaining--; lastRecordWasBad = false; } catch (Exception e) { long pos = blockCount - blockRemaining; if (badRecordsInBlock == 0) { // first corrupt record numCorruptBlocks++; err.println("Corrupt block: " + numBlocks + " Records in block: " + blockCount + " uncompressed block size: " + blockSize); err.println("Corrupt record at position: " + (pos)); } else { // second bad record in block, if consecutive skip block. err.println("Corrupt record at position: " + (pos)); if (lastRecordWasBad) { // consecutive bad record err.println( "Second consecutive bad record in block: " + numBlocks + ". Skipping remainder of block. "); numCorruptRecords += blockRemaining; badRecordsInBlock += blockRemaining; try { fileReader.sync(position); } catch (Exception e2) { err.println("failed to sync to sync marker, aborting"); e2.printStackTrace(err); return 1; } break; } } blockRemaining--; lastRecordWasBad = true; numCorruptRecords++; badRecordsInBlock++; } } if (badRecordsInBlock != 0) { err.println("** Number of unrecoverable records in block: " + (badRecordsInBlock)); } position = fileReader.previousSync(); } catch (Exception e) { err.println("Failed to read block " + numBlocks + ". Unknown record " + "count in block. Skipping. Reason: " + e.getMessage()); numCorruptBlocks++; try { fileReader.sync(position); } catch (Exception e2) { err.println("failed to sync to sync marker, aborting"); e2.printStackTrace(err); return 1; } } } } finally { if (fileWritten) { try { fileWriter.close(); } catch (Exception e) { e.printStackTrace(err); return 1; } } } } private int reportOnly(String input, PrintStream out, PrintStream err) throws IOException { return recover(input, null, out, err, false, false); } private int recoverAfter(String input, String output, PrintStream out, PrintStream err) throws IOException { return recover(input, output, out, err, false, true); } private int recoverPrior(String input, String output, PrintStream out, PrintStream err) throws IOException { return recover(input, output, out, err, true, false); } private int recoverAll(String input, String output, PrintStream out, PrintStream err) throws IOException { return recover(input, output, out, err, true, true); } }
6,923
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/BinaryFragmentToJsonTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; /** Converts an input file from Avro binary into JSON. */ public class BinaryFragmentToJsonTool implements Tool { @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optionParser = new OptionParser(); OptionSpec<Void> noPrettyOption = optionParser.accepts("no-pretty", "Turns off pretty printing."); OptionSpec<String> schemaFileOption = optionParser .accepts("schema-file", "File containing schema, must not occur with inline schema.").withOptionalArg() .ofType(String.class); OptionSet optionSet = optionParser.parse(args.toArray(new String[0])); Boolean noPretty = optionSet.has(noPrettyOption); List<String> nargs = (List<String>) optionSet.nonOptionArguments(); String schemaFile = schemaFileOption.value(optionSet); if (nargs.size() != (schemaFile == null ? 2 : 1)) { err.println("fragtojson --no-pretty --schema-file <file> [inline-schema] input-file"); err.println(" converts Avro fragments to JSON."); optionParser.printHelpOn(err); err.println(" A dash '-' for input-file means stdin."); return 1; } Schema schema; String inputFile; if (schemaFile == null) { schema = new Schema.Parser().parse(nargs.get(0)); inputFile = nargs.get(1); } else { schema = Util.parseSchemaFromFS(schemaFile); inputFile = nargs.get(0); } InputStream input = Util.fileOrStdin(inputFile, stdin); try { DatumReader<Object> reader = new GenericDatumReader<>(schema); BinaryDecoder binaryDecoder = DecoderFactory.get().binaryDecoder(input, null); DatumWriter<Object> writer = new GenericDatumWriter<>(schema); JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(schema, out, !noPretty); Object datum = null; while (!binaryDecoder.isEnd()) { datum = reader.read(datum, binaryDecoder); writer.write(datum, jsonEncoder); jsonEncoder.flush(); } out.println(); out.flush(); } finally { Util.close(input); } return 0; } @Override public String getName() { return "fragtojson"; } @Override public String getShortDescription() { return "Renders a binary-encoded Avro datum as JSON."; } }
6,924
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/RpcProtocolTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import org.apache.avro.Protocol; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.EncoderFactory; import org.apache.avro.ipc.HandshakeRequest; import org.apache.avro.ipc.HandshakeResponse; import org.apache.avro.ipc.Ipc; import org.apache.avro.ipc.MD5; import org.apache.avro.ipc.Transceiver; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.util.ByteBufferInputStream; import org.apache.avro.util.ByteBufferOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.net.URI; import java.nio.ByteBuffer; import java.util.LinkedHashMap; import java.util.List; /** * Tool to grab the protocol from a remote running service. */ public class RpcProtocolTool implements Tool { @Override public String getName() { return "rpcprotocol"; } @Override public String getShortDescription() { return "Output the protocol of a RPC service"; } @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() != 1) { err.println("Usage: uri"); return 1; } URI uri = URI.create(args.get(0)); try (Transceiver transceiver = Ipc.createTransceiver(uri)) { // write an empty HandshakeRequest HandshakeRequest rq = HandshakeRequest.newBuilder().setClientHash(new MD5(new byte[16])) .setServerHash(new MD5(new byte[16])).setClientProtocol(null).setMeta(new LinkedHashMap<>()).build(); DatumWriter<HandshakeRequest> handshakeWriter = new SpecificDatumWriter<>(HandshakeRequest.class); ByteBufferOutputStream byteBufferOutputStream = new ByteBufferOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(byteBufferOutputStream, null); handshakeWriter.write(rq, encoder); encoder.flush(); // send it and get the response List<ByteBuffer> response = transceiver.transceive(byteBufferOutputStream.getBufferList()); // parse the response ByteBufferInputStream byteBufferInputStream = new ByteBufferInputStream(response); DatumReader<HandshakeResponse> handshakeReader = new SpecificDatumReader<>(HandshakeResponse.class); HandshakeResponse handshakeResponse = handshakeReader.read(null, DecoderFactory.get().binaryDecoder(byteBufferInputStream, null)); Protocol p = Protocol.parse(handshakeResponse.getServerProtocol()); // finally output the protocol out.println(p.toString(true)); } return 0; } }
6,925
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/DataFileWriteTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.DataInputStream; import java.io.EOFException; import java.io.InputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumReader; import org.apache.avro.io.Decoder; import org.apache.avro.io.DecoderFactory; /** Reads new-line delimited JSON records and writers an Avro data file. */ public class DataFileWriteTool implements Tool { @Override public String getName() { return "fromjson"; } @Override public String getShortDescription() { return "Reads JSON records and writes an Avro data file."; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<String> codec = Util.compressionCodecOptionWithDefault(p, DataFileConstants.NULL_CODEC); OptionSpec<Integer> level = Util.compressionLevelOption(p); OptionSpec<String> file = p.accepts("schema-file", "Schema File").withOptionalArg().ofType(String.class); OptionSpec<String> inschema = p.accepts("schema", "Schema").withOptionalArg().ofType(String.class); OptionSet opts = p.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() != 1) { err.println("Expected 1 arg: input_file"); p.printHelpOn(err); return 1; } String schemastr = inschema.value(opts); String schemafile = file.value(opts); if (schemastr == null && schemafile == null) { err.println("Need an input schema file (--schema-file) or inline schema (--schema)"); p.printHelpOn(err); return 1; } Schema schema = DataFileReadTool.getSchema(schemastr, schemafile); DatumReader<Object> reader = new GenericDatumReader<>(schema); InputStream input = Util.fileOrStdin(nargs.get(0), stdin); try { DataInputStream din = new DataInputStream(input); DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>()); writer.setCodec(Util.codecFactory(opts, codec, level, DataFileConstants.NULL_CODEC)); writer.create(schema, out); Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din); Object datum; while (true) { try { datum = reader.read(null, decoder); } catch (EOFException e) { break; } writer.append(datum); } writer.close(); } finally { Util.close(input); } return 0; } }
6,926
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/SpecificCompilerTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.Optional; import java.util.Set; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData.StringType; import org.apache.avro.compiler.specific.SpecificCompiler; import org.apache.avro.compiler.specific.SpecificCompiler.FieldVisibility; /** * A Tool for compiling avro protocols or schemas to Java classes using the Avro * SpecificCompiler. */ public class SpecificCompilerTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> origArgs) throws Exception { if (origArgs.size() < 3) { System.err .println("Usage: [-encoding <outputencoding>] [-string] [-bigDecimal] [-fieldVisibility <visibilityType>] " + "[-noSetters] [-nullSafeAnnotations] [-addExtraOptionalGetters] [-optionalGetters <optionalGettersType>] " + "[-templateDir <templateDir>] (schema|protocol) input... outputdir"); System.err.println(" input - input files or directories"); System.err.println(" outputdir - directory to write generated java"); System.err.println(" -encoding <outputencoding> - set the encoding of " + "output file(s)"); System.err.println(" -string - use java.lang.String instead of Utf8"); System.err.println(" -fieldVisibility [private|public] - use either and default private"); System.err.println(" -noSetters - do not generate setters"); System.err.println(" -nullSafeAnnotations - add @Nullable and @NotNull annotations"); System.err .println(" -addExtraOptionalGetters - generate extra getters with this format: 'getOptional<FieldName>'"); System.err.println( " -optionalGetters [all_fields|only_nullable_fields]- generate getters returning Optional<T> for all fields or only for nullable fields"); System.err .println(" -bigDecimal - use java.math.BigDecimal for " + "decimal type instead of java.nio.ByteBuffer"); System.err.println(" -templateDir - directory with custom Velocity templates"); return 1; } CompilerOptions compilerOpts = new CompilerOptions(); compilerOpts.stringType = StringType.CharSequence; compilerOpts.useLogicalDecimal = false; compilerOpts.createSetters = true; compilerOpts.createNullSafeAnnotations = false; compilerOpts.optionalGettersType = Optional.empty(); compilerOpts.addExtraOptionalGetters = false; compilerOpts.encoding = Optional.empty(); compilerOpts.templateDir = Optional.empty(); compilerOpts.fieldVisibility = Optional.empty(); List<String> args = new ArrayList<>(origArgs); if (args.contains("-noSetters")) { compilerOpts.createSetters = false; args.remove(args.indexOf("-noSetters")); } if (args.contains("-nullSafeAnnotations")) { compilerOpts.createNullSafeAnnotations = true; args.remove(args.indexOf("-nullSafeAnnotations")); } if (args.contains("-addExtraOptionalGetters")) { compilerOpts.addExtraOptionalGetters = true; args.remove(args.indexOf("-addExtraOptionalGetters")); } int arg = 0; if (args.contains("-optionalGetters")) { arg = args.indexOf("-optionalGetters") + 1; try { compilerOpts.optionalGettersType = Optional .of(OptionalGettersType.valueOf(args.get(arg).toUpperCase(Locale.ENGLISH))); } catch (IllegalArgumentException | IndexOutOfBoundsException e) { System.err.println("Expected one of" + Arrays.toString(OptionalGettersType.values())); return 1; } args.remove(arg); args.remove(arg - 1); } if (args.contains("-encoding")) { arg = args.indexOf("-encoding") + 1; compilerOpts.encoding = Optional.of(args.get(arg)); args.remove(arg); args.remove(arg - 1); } if (args.contains("-string")) { compilerOpts.stringType = StringType.String; args.remove(args.indexOf("-string")); } if (args.contains("-fieldVisibility")) { arg = args.indexOf("-fieldVisibility") + 1; try { compilerOpts.fieldVisibility = Optional.of(FieldVisibility.valueOf(args.get(arg).toUpperCase(Locale.ENGLISH))); } catch (IllegalArgumentException | IndexOutOfBoundsException e) { System.err.println("Expected one of" + Arrays.toString(FieldVisibility.values())); return 1; } args.remove(arg); args.remove(arg - 1); } if (args.contains("-templateDir")) { arg = args.indexOf("-templateDir") + 1; compilerOpts.templateDir = Optional.of(args.get(arg)); args.remove(arg); args.remove(arg - 1); } arg = 0; if ("-bigDecimal".equalsIgnoreCase(args.get(arg))) { compilerOpts.useLogicalDecimal = true; arg++; } String method = args.get(arg); List<File> inputs = new ArrayList<>(); File output = new File(args.get(args.size() - 1)); for (int i = arg + 1; i < args.size() - 1; i++) { inputs.add(new File(args.get(i))); } if ("schema".equals(method)) { Schema.Parser parser = new Schema.Parser(); for (File src : determineInputs(inputs, SCHEMA_FILTER)) { Schema schema = parser.parse(src); final SpecificCompiler compiler = new SpecificCompiler(schema); executeCompiler(compiler, compilerOpts, src, output); } } else if ("protocol".equals(method)) { for (File src : determineInputs(inputs, PROTOCOL_FILTER)) { Protocol protocol = Protocol.parse(src); final SpecificCompiler compiler = new SpecificCompiler(protocol); executeCompiler(compiler, compilerOpts, src, output); } } else { System.err.println("Expected \"schema\" or \"protocol\"."); return 1; } return 0; } private void executeCompiler(SpecificCompiler compiler, CompilerOptions opts, File src, File output) throws IOException { compiler.setStringType(opts.stringType); compiler.setCreateSetters(opts.createSetters); compiler.setCreateNullSafeAnnotations(opts.createNullSafeAnnotations); opts.optionalGettersType.ifPresent(choice -> { compiler.setGettersReturnOptional(true); switch (choice) { case ALL_FIELDS: compiler.setOptionalGettersForNullableFieldsOnly(false); break; case ONLY_NULLABLE_FIELDS: compiler.setOptionalGettersForNullableFieldsOnly(true); break; default: throw new IllegalStateException("Unsupported value '" + choice + "'"); } }); compiler.setCreateOptionalGetters(opts.addExtraOptionalGetters); opts.templateDir.ifPresent(compiler::setTemplateDir); compiler.setEnableDecimalLogicalType(opts.useLogicalDecimal); opts.encoding.ifPresent(compiler::setOutputCharacterEncoding); opts.fieldVisibility.ifPresent(compiler::setFieldVisibility); compiler.compileToDestination(src, output); } @Override public String getName() { return "compile"; } @Override public String getShortDescription() { return "Generates Java code for the given schema."; } /** * For an Array of files, sort using {@link String#compareTo(String)} for each * filename. * * @param files Array of File objects to sort * @return the sorted File array */ private static File[] sortFiles(File[] files) { Objects.requireNonNull(files, "files cannot be null"); Arrays.sort(files, Comparator.comparing(File::getName)); return files; } /** * For a List of files or directories, returns a File[] containing each file * passed as well as each file with a matching extension found in the directory. * Each directory is sorted using {@link String#compareTo(String)} for each * filename. * * @param inputs List of File objects that are files or directories * @param filter File extension filter to match on when fetching files from a * directory * @return Unique array of files */ private static File[] determineInputs(List<File> inputs, FilenameFilter filter) { Set<File> fileSet = new LinkedHashSet<>(); // preserve order and uniqueness for (File file : inputs) { // if directory, look at contents to see what files match extension if (file.isDirectory()) { File[] files = file.listFiles(filter); // sort files in directory to compile deterministically // independent of system/ locale Collections.addAll(fileSet, files != null ? sortFiles(files) : new File[0]); } // otherwise, just add the file. else { fileSet.add(file); } } if (fileSet.size() > 0) { System.err.println("Input files to compile:"); for (File file : fileSet) { System.err.println(" " + file); } } else { System.err.println("No input files found."); } return fileSet.toArray(new File[0]); } private static final FileExtensionFilter SCHEMA_FILTER = new FileExtensionFilter("avsc"); private static final FileExtensionFilter PROTOCOL_FILTER = new FileExtensionFilter("avpr"); private static class CompilerOptions { Optional<String> encoding; StringType stringType; Optional<FieldVisibility> fieldVisibility; boolean useLogicalDecimal; boolean createSetters; boolean createNullSafeAnnotations; boolean addExtraOptionalGetters; Optional<OptionalGettersType> optionalGettersType; Optional<String> templateDir; } private enum OptionalGettersType { ALL_FIELDS, ONLY_NULLABLE_FIELDS } private static class FileExtensionFilter implements FilenameFilter { private String extension; private FileExtensionFilter(String extension) { this.extension = extension; } @Override public boolean accept(File dir, String name) { return name.endsWith(this.extension); } } }
6,927
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/CatTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileStream; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.Path; /** Tool to extract samples from an Avro data file. */ public class CatTool implements Tool { private long totalCopied; private double sampleCounter; private GenericRecord reuse; private DataFileStream<GenericRecord> reader; private DataFileWriter<GenericRecord> writer; private Schema schema; private List<Path> inFiles; private int currentInput; @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optParser = new OptionParser(); OptionSpec<Long> offsetOpt = optParser.accepts("offset", "offset for reading input").withRequiredArg() .ofType(Long.class).defaultsTo(Long.valueOf(0)); OptionSpec<Long> limitOpt = optParser.accepts("limit", "maximum number of records in the outputfile") .withRequiredArg().ofType(Long.class).defaultsTo(Long.MAX_VALUE); OptionSpec<Double> fracOpt = optParser.accepts("samplerate", "rate at which records will be collected") .withRequiredArg().ofType(Double.class).defaultsTo(Double.valueOf(1)); OptionSet opts = optParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() < 2) { printHelp(out); return 0; } inFiles = Util.getFiles(nargs.subList(0, nargs.size() - 1)); System.out.println("List of input files:"); for (Path p : inFiles) { System.out.println(p); } currentInput = -1; nextInput(); OutputStream output = out; String lastArg = nargs.get(nargs.size() - 1); if (nargs.size() > 1 && !lastArg.equals("-")) { output = Util.createFromFS(lastArg); } writer = new DataFileWriter<>(new GenericDatumWriter<>()); String codecName = reader.getMetaString(DataFileConstants.CODEC); CodecFactory codec = (codecName == null) ? CodecFactory.fromString(DataFileConstants.NULL_CODEC) : CodecFactory.fromString(codecName); writer.setCodec(codec); for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { writer.setMeta(key, reader.getMeta(key)); } } writer.create(schema, output); long offset = opts.valueOf(offsetOpt); long limit = opts.valueOf(limitOpt); double samplerate = opts.valueOf(fracOpt); sampleCounter = 1; totalCopied = 0; reuse = null; if (limit < 0) { System.out.println("limit has to be non-negative"); this.printHelp(out); return 1; } if (offset < 0) { System.out.println("offset has to be non-negative"); this.printHelp(out); return 1; } if (samplerate < 0 || samplerate > 1) { System.out.println("samplerate has to be a number between 0 and 1"); this.printHelp(out); return 1; } skip(offset); writeRecords(limit, samplerate); System.out.println(totalCopied + " records written."); writer.flush(); writer.close(); Util.close(out); return 0; } private void nextInput() throws IOException { currentInput++; Path path = inFiles.get(currentInput); FSDataInputStream input = new FSDataInputStream(Util.openFromFS(path)); reader = new DataFileStream<>(input, new GenericDatumReader<>()); if (schema == null) { // if this is the first file, the schema gets saved schema = reader.getSchema(); } else if (!schema.equals(reader.getSchema())) { // subsequent files have to have equal schemas throw new IOException("schemas dont match"); } } private boolean hasNextInput() { return inFiles.size() > (currentInput + 1); } /** skips a number of records from the input */ private long skip(long skip) throws IOException { long skipped = 0; while (0 < skip && reader.hasNext()) { reader.next(reuse); skip--; skipped++; } if ((0 < skip) && hasNextInput()) { // goto next file nextInput(); skipped = skipped + skip(skip); } return skipped; } /** * writes records with the given samplerate The record at position offset is * guaranteed to be taken */ private long writeRecords(long count, double samplerate) throws IOException { long written = 0; while (written < count && reader.hasNext()) { reuse = reader.next(reuse); sampleCounter = sampleCounter + samplerate; if (sampleCounter >= 1) { writer.append(reuse); written++; sampleCounter--; } } totalCopied = totalCopied + written; if (written < count && hasNextInput()) { // goto next file nextInput(); written = written + writeRecords(count - written, samplerate); } return written; } private void printHelp(PrintStream out) { out.println("cat --offset <offset> --limit <limit> --samplerate <samplerate> [input-files...] output-file"); out.println(); out.println("extracts records from a list of input files into a new file."); out.println("--offset start of the extract"); out.println("--limit maximum number of records in the output file."); out.println("--samplerate rate at which records will be collected"); out.println("A dash ('-') can be given to direct output to stdout"); } @Override public String getName() { return "cat"; } @Override public String getShortDescription() { return "Extracts samples from files"; } }
6,928
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/SchemaNormalizationTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.InputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import org.apache.avro.Schema; import org.apache.avro.SchemaNormalization; /** * Utility to convert an Avro @{Schema} to its canonical form. */ public class SchemaNormalizationTool implements Tool { @Override public String getName() { return "canonical"; } @Override public String getShortDescription() { return "Converts an Avro Schema to its canonical form"; } @Override public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSet opts = p.parse(args.toArray(new String[0])); if (opts.nonOptionArguments().size() != 2) { err.println("Expected 2 args: infile outfile (filenames or '-' for stdin/stdout)"); p.printHelpOn(err); return 1; } BufferedInputStream inStream = Util.fileOrStdin(args.get(0), stdin); BufferedOutputStream outStream = Util.fileOrStdout(args.get(1), out); Schema schema = new Schema.Parser().parse(inStream); String canonicalForm = SchemaNormalization.toParsingForm(schema); outStream.write(canonicalForm.getBytes(StandardCharsets.UTF_8)); Util.close(inStream); Util.close(outStream); return 0; } }
6,929
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/RecodecTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintStream; import java.util.List; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.Schema; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.DataFileConstants; import org.apache.avro.file.DataFileStream; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; /** Tool to alter the codec of an Avro data file. */ public class RecodecTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser optParser = new OptionParser(); OptionSpec<String> codecOpt = Util.compressionCodecOptionWithDefault(optParser, DataFileConstants.NULL_CODEC); OptionSpec<Integer> levelOpt = Util.compressionLevelOption(optParser); OptionSet opts = optParser.parse(args.toArray(new String[0])); List<String> nargs = (List<String>) opts.nonOptionArguments(); if (nargs.size() > 2) { err.println("Expected at most an input file and output file."); optParser.printHelpOn(err); return 1; } InputStream input = in; boolean inputNeedsClosing = false; if (nargs.size() > 0 && !nargs.get(0).equals("-")) { input = Util.openFromFS(nargs.get(0)); inputNeedsClosing = true; } OutputStream output = out; boolean outputNeedsClosing = false; if (nargs.size() > 1 && !nargs.get(1).equals("-")) { output = Util.createFromFS(nargs.get(1)); outputNeedsClosing = true; } DataFileStream<GenericRecord> reader = new DataFileStream<>(input, new GenericDatumReader<>()); Schema schema = reader.getSchema(); DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<>()); // unlike the other Avro tools, we default to a null codec, not deflate CodecFactory codec = Util.codecFactory(opts, codecOpt, levelOpt, DataFileConstants.NULL_CODEC); writer.setCodec(codec); for (String key : reader.getMetaKeys()) { if (!DataFileWriter.isReservedMeta(key)) { writer.setMeta(key, reader.getMeta(key)); } } writer.create(schema, output); writer.appendAllFrom(reader, true); writer.flush(); if (inputNeedsClosing) { input.close(); } if (outputNeedsClosing) { output.close(); } writer.close(); return 0; } @Override public String getName() { return "recodec"; } @Override public String getShortDescription() { return "Alters the codec of a data file."; } }
6,930
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/InduceSchemaTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.File; import java.io.InputStream; import java.io.PrintStream; import java.net.URL; import java.net.URLClassLoader; import java.util.List; import org.apache.avro.reflect.ReflectData; /** * Utility to induce a schema from a class or a protocol from an interface. */ public class InduceSchemaTool implements Tool { @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { if (args.size() == 0 || args.size() > 2) { System.err.println("Usage: [colon-delimited-classpath] classname"); return 1; } ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); String className; if (args.size() == 2) { String classpaths = args.get(0); className = args.get(1); if (!classpaths.isEmpty()) { String[] paths = args.get(0).split(":"); URL[] urls = new URL[paths.length]; for (int i = 0; i < paths.length; ++i) { urls[i] = new File(paths[i]).toURI().toURL(); } classLoader = URLClassLoader.newInstance(urls, classLoader); } } else { className = args.get(0); } Class<?> klass = classLoader.loadClass(className); if (klass.isInterface()) { System.out.println(ReflectData.get().getProtocol(klass).toString(true)); } else { System.out.println(ReflectData.get().getSchema(klass).toString(true)); } return 0; } @Override public String getName() { return "induce"; } @Override public String getShortDescription() { return "Induce schema/protocol from Java class/interface via reflection."; } }
6,931
0
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro
Create_ds/avro/lang/java/tools/src/main/java/org/apache/avro/tool/RpcReceiveTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.tool; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.io.File; import java.util.List; import java.util.concurrent.CountDownLatch; import java.net.URI; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.avro.AvroRemoteException; import org.apache.avro.Protocol; import org.apache.avro.Protocol.Message; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; import org.apache.avro.ipc.Ipc; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.generic.GenericResponder; /** * Receives one RPC call and responds. (The moral equivalent of "netcat".) */ public class RpcReceiveTool implements Tool { private PrintStream out; private Object response; /** Used to communicate between server thread (responder) and run() */ private CountDownLatch latch; private Message expectedMessage; Server server; @Override public String getName() { return "rpcreceive"; } @Override public String getShortDescription() { return "Opens an RPC Server and listens for one message."; } private class SinkResponder extends GenericResponder { public SinkResponder(Protocol local) { super(local); } @Override public Object respond(Message message, Object request) throws AvroRemoteException { if (!message.equals(expectedMessage)) { out.println( String.format("Expected message '%s' but received '%s'.", expectedMessage.getName(), message.getName())); latch.countDown(); throw new IllegalArgumentException("Unexpected message."); } out.print(message.getName()); out.print("\t"); try { JsonEncoder jsonEncoder = EncoderFactory.get().jsonEncoder(message.getRequest(), out); GenericDatumWriter<Object> writer = new GenericDatumWriter<>(message.getRequest()); writer.write(request, jsonEncoder); jsonEncoder.flush(); out.flush(); } catch (IOException e) { throw new RuntimeException(e); } out.println(); new Thread(() -> { try { Thread.sleep(1000); } catch (InterruptedException e) { } latch.countDown(); }).start(); return response; } } @Override public int run(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { // Split up into two functions for easier testing. int r = run1(in, out, err, args); if (r != 0) { return r; } return run2(err); } int run1(InputStream in, PrintStream out, PrintStream err, List<String> args) throws Exception { OptionParser p = new OptionParser(); OptionSpec<String> file = p.accepts("file", "Data file containing response datum.").withRequiredArg() .ofType(String.class); OptionSpec<String> data = p.accepts("data", "JSON-encoded response datum.").withRequiredArg().ofType(String.class); OptionSet opts = p.parse(args.toArray(new String[0])); args = (List<String>) opts.nonOptionArguments(); if (args.size() != 3) { err.println("Usage: uri protocol_file message_name (-data d | -file f)"); p.printHelpOn(err); return 1; } URI uri = new URI(args.get(0)); Protocol protocol = Protocol.parse(new File(args.get(1))); String messageName = args.get(2); expectedMessage = protocol.getMessages().get(messageName); if (expectedMessage == null) { err.println(String.format("No message named '%s' found in protocol '%s'.", messageName, protocol)); return 1; } if (data.value(opts) != null) { this.response = Util.jsonToGenericDatum(expectedMessage.getResponse(), data.value(opts)); } else if (file.value(opts) != null) { this.response = Util.datumFromFile(expectedMessage.getResponse(), file.value(opts)); } else { err.println("One of -data or -file must be specified."); return 1; } this.out = out; latch = new CountDownLatch(1); server = Ipc.createServer(new SinkResponder(protocol), uri); server.start(); out.println("Port: " + server.getPort()); return 0; } int run2(PrintStream err) throws InterruptedException { latch.await(); err.println("Closing server."); server.close(); return 0; } }
6,932
0
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestStatsPluginAndServlet.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.StringWriter; import java.net.URL; import java.nio.ByteBuffer; import java.util.Random; import javax.servlet.UnavailableException; import org.apache.avro.AvroRemoteException; import org.apache.avro.Protocol; import org.apache.avro.Protocol.Message; import org.apache.avro.generic.GenericData; import org.apache.avro.generic.GenericRecord; import org.apache.avro.ipc.HttpTransceiver; import org.apache.avro.ipc.LocalTransceiver; import org.apache.avro.ipc.RPCContext; import org.apache.avro.ipc.Responder; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.generic.GenericRequestor; import org.apache.avro.ipc.generic.GenericResponder; import org.apache.avro.ipc.stats.StatsPlugin; import org.apache.avro.ipc.stats.StatsServlet; import org.junit.jupiter.api.Test; public class TestStatsPluginAndServlet { Protocol protocol = Protocol.parse("" + "{\"protocol\": \"Minimal\", " + "\"messages\": { \"m\": {" + " \"request\": [{\"name\": \"x\", \"type\": \"int\"}], " + " \"response\": \"int\"} } }"); Message message = protocol.getMessages().get("m"); private static final long MS = 1000 * 1000L; /** Returns an HTML string. */ private String generateServletResponse(StatsPlugin statsPlugin) throws IOException { StatsServlet servlet; try { servlet = new StatsServlet(statsPlugin); } catch (UnavailableException e1) { throw new IOException(); } StringWriter w = new StringWriter(); try { servlet.writeStats(w); } catch (Exception e) { e.printStackTrace(); } String o = w.toString(); return o; } /** Expects 0 and returns 1. */ static class TestResponder extends GenericResponder { public TestResponder(Protocol local) { super(local); } @Override public Object respond(Message message, Object request) throws AvroRemoteException { assertEquals(0, ((GenericRecord) request).get("x")); return 1; } } private void makeRequest(Transceiver t) throws Exception { GenericRecord params = new GenericData.Record(protocol.getMessages().get("m").getRequest()); params.put("x", 0); GenericRequestor r = new GenericRequestor(protocol, t); assertEquals(1, r.request("m", params)); } @Test void fullServerPath() throws Exception { Responder r = new TestResponder(protocol); StatsPlugin statsPlugin = new StatsPlugin(); r.addRPCPlugin(statsPlugin); Transceiver t = new LocalTransceiver(r); for (int i = 0; i < 10; ++i) { makeRequest(t); } String o = generateServletResponse(statsPlugin); assertTrue(o.contains("10 calls")); } @Test void multipleRPCs() throws IOException { org.apache.avro.ipc.stats.FakeTicks t = new org.apache.avro.ipc.stats.FakeTicks(); StatsPlugin statsPlugin = new StatsPlugin(t, StatsPlugin.LATENCY_SEGMENTER, StatsPlugin.PAYLOAD_SEGMENTER); RPCContext context1 = makeContext(); RPCContext context2 = makeContext(); statsPlugin.serverReceiveRequest(context1); t.passTime(100 * MS); // first takes 100ms statsPlugin.serverReceiveRequest(context2); String r = generateServletResponse(statsPlugin); // Check in progress RPCs assertTrue(r.contains("m: 0ms")); assertTrue(r.contains("m: 100ms")); statsPlugin.serverSendResponse(context1); t.passTime(900 * MS); // second takes 900ms statsPlugin.serverSendResponse(context2); r = generateServletResponse(statsPlugin); assertTrue(r.contains("Average: 500.0ms")); } @Test void payloadSize() throws Exception { Responder r = new TestResponder(protocol); StatsPlugin statsPlugin = new StatsPlugin(); r.addRPCPlugin(statsPlugin); Transceiver t = new LocalTransceiver(r); makeRequest(t); String resp = generateServletResponse(statsPlugin); assertTrue(resp.contains("Average: 2.0")); } private RPCContext makeContext() { RPCContext context = new RPCContext(); context.setMessage(message); return context; } /** Sleeps as requested. */ private static class SleepyResponder extends GenericResponder { public SleepyResponder(Protocol local) { super(local); } @Override public Object respond(Message message, Object request) throws AvroRemoteException { try { Thread.sleep((Long) ((GenericRecord) request).get("millis")); } catch (InterruptedException e) { throw new AvroRemoteException(e); } return null; } } /** * Demo program for using RPC stats. This automatically generates client RPC * requests. Alternatively a can be used (as below) to trigger RPCs. * * <pre> * java -jar build/avro-tools-*.jar rpcsend '{"protocol":"sleepy","namespace":null,"types":[],"messages":{"sleep":{"request":[{"name":"millis","type":"long"}],"response":"null"}}}' sleep localhost 7002 '{"millis": 20000}' * </pre> * * @param args * @throws Exception */ public static void main(String[] args) throws Exception { if (args.length == 0) { args = new String[] { "7002", "7003" }; } Protocol protocol = Protocol.parse("{\"protocol\": \"sleepy\", " + "\"messages\": { \"sleep\": {" + " \"request\": [{\"name\": \"millis\", \"type\": \"long\"}," + "{\"name\": \"data\", \"type\": \"bytes\"}], " + " \"response\": \"null\"} } }"); Responder r = new SleepyResponder(protocol); StatsPlugin p = new StatsPlugin(); r.addRPCPlugin(p); // Start Avro server HttpServer avroServer = new HttpServer(r, Integer.parseInt(args[0])); avroServer.start(); StatsServer ss = new StatsServer(p, 8080); HttpTransceiver trans = new HttpTransceiver(new URL("http://localhost:" + Integer.parseInt(args[0]))); GenericRequestor req = new GenericRequestor(protocol, trans); while (true) { Thread.sleep(1000); GenericRecord params = new GenericData.Record(protocol.getMessages().get("sleep").getRequest()); Random rand = new Random(); params.put("millis", Math.abs(rand.nextLong()) % 1000); int payloadSize = Math.abs(rand.nextInt()) % 10000; byte[] payload = new byte[payloadSize]; rand.nextBytes(payload); params.put("data", ByteBuffer.wrap(payload)); req.request("sleep", params); } } }
6,933
0
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttps.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.Responder; import org.apache.avro.TestProtocolSpecific; import org.apache.avro.ipc.HttpTransceiver; import org.eclipse.jetty.server.SslConnectionFactory; import org.eclipse.jetty.util.ssl.SslContextFactory; import java.net.URL; public class TestProtocolHttps extends TestProtocolSpecific { @Override public Server createServer(Responder testResponder) throws Exception { System.setProperty("javax.net.ssl.keyStore", "src/test/keystore"); System.setProperty("javax.net.ssl.keyStorePassword", "avrotest"); System.setProperty("javax.net.ssl.password", "avrotest"); System.setProperty("javax.net.ssl.trustStore", "src/test/truststore"); System.setProperty("javax.net.ssl.trustStorePassword", "avrotest"); SslConnectionFactory connectionFactory = new SslConnectionFactory("HTTP/1.1"); SslContextFactory sslContextFactory = connectionFactory.getSslContextFactory(); sslContextFactory.setKeyStorePath(System.getProperty("javax.net.ssl.keyStore")); sslContextFactory.setKeyManagerPassword(System.getProperty("javax.net.ssl.password")); sslContextFactory.setKeyStorePassword(System.getProperty("javax.net.ssl.keyStorePassword")); sslContextFactory.setNeedClientAuth(false); return new HttpServer(testResponder, connectionFactory, "localhost", 18443); } @Override public Transceiver createTransceiver() throws Exception { return new HttpTransceiver(new URL("https://localhost:" + server.getPort() + "/")); } protected int getExpectedHandshakeCount() { return REPEATING; } }
6,934
0
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestProtocolHttp.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.TestProtocolSpecific; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.Responder; import org.apache.avro.ipc.HttpTransceiver; import org.apache.avro.ipc.generic.GenericRequestor; import org.apache.avro.ipc.specific.SpecificRequestor; import org.apache.avro.generic.GenericData; import org.apache.avro.test.Simple; import org.junit.jupiter.api.Test; import java.net.URL; import java.net.ServerSocket; import java.net.SocketTimeoutException; import java.util.ArrayList; import java.util.LinkedHashMap; public class TestProtocolHttp extends TestProtocolSpecific { @Override public Server createServer(Responder testResponder) throws Exception { return new HttpServer(testResponder, 0); } @Override public Transceiver createTransceiver() throws Exception { return new HttpTransceiver(new URL("http://127.0.0.1:" + server.getPort() + "/")); } @Override protected int getExpectedHandshakeCount() { return REPEATING; } @Test void timeout() throws Throwable { ServerSocket s = new ServerSocket(0); HttpTransceiver client = new HttpTransceiver(new URL("http://127.0.0.1:" + s.getLocalPort() + "/")); client.setTimeout(100); Simple proxy = SpecificRequestor.getClient(Simple.class, client); try { proxy.hello("foo"); fail("Should have failed with an exception"); } catch (AvroRuntimeException e) { assertTrue(e.getCause() instanceof SocketTimeoutException, "Got unwanted exception: " + e.getCause()); } finally { s.close(); } } /** Test that Responder ignores one-way with stateless transport. */ @Test void statelessOneway() throws Exception { // a version of the Simple protocol that doesn't declare "ack" one-way Protocol protocol = new Protocol("Simple", "org.apache.avro.test"); Protocol.Message message = protocol.createMessage("ack", null, new LinkedHashMap<String, String>(), Schema.createRecord(new ArrayList<>()), Schema.create(Schema.Type.NULL), Schema.createUnion(new ArrayList<>())); protocol.getMessages().put("ack", message); // call a server over a stateless protocol that has a one-way "ack" GenericRequestor requestor = new GenericRequestor(protocol, createTransceiver()); requestor.request("ack", new GenericData.Record(message.getRequest())); // make the request again, to better test handshakes w/ differing protocols requestor.request("ack", new GenericData.Record(message.getRequest())); } }
6,935
0
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/TestBulkData.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import org.apache.avro.ipc.HttpTransceiver; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.specific.SpecificRequestor; import org.apache.avro.ipc.specific.SpecificResponder; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; import static org.junit.jupiter.api.Assertions.assertEquals; import java.net.URL; import java.nio.ByteBuffer; import java.util.Random; import org.apache.avro.test.BulkData; public class TestBulkData { private static final long COUNT = Integer.parseInt(System.getProperty("test.count", "10")); private static final int SIZE = Integer.parseInt(System.getProperty("test.size", "65536")); private static final ByteBuffer DATA = ByteBuffer.allocate(SIZE); static { Random rand = new Random(); DATA.limit(DATA.capacity()); DATA.position(0); rand.nextBytes(DATA.array()); } public static class BulkDataImpl implements BulkData { @Override public ByteBuffer read() { return DATA.duplicate(); } @Override public void write(ByteBuffer data) { assertEquals(SIZE, data.remaining()); } } private static Server server; private static BulkData proxy; @BeforeEach public void startServer() throws Exception { if (server != null) return; server = new HttpServer(new SpecificResponder(BulkData.class, new BulkDataImpl()), 0); server.start(); Transceiver client = new HttpTransceiver(new URL("http://127.0.0.1:" + server.getPort() + "/")); proxy = SpecificRequestor.getClient(BulkData.class, client); } @Test void read() throws IOException { for (long i = 0; i < COUNT; i++) assertEquals(SIZE, proxy.read().remaining()); } @Test void write() throws IOException { for (long i = 0; i < COUNT; i++) proxy.write(DATA.duplicate()); } @AfterAll public static void stopServer() throws Exception { server.close(); } public static void main(String[] args) throws Exception { TestBulkData test = new TestBulkData(); test.startServer(); System.out.println("READ"); long start = System.currentTimeMillis(); test.read(); printStats(start); System.out.println("WRITE"); start = System.currentTimeMillis(); test.write(); printStats(start); test.stopServer(); } private static void printStats(long start) { double seconds = (System.currentTimeMillis() - start) / 1000.0; System.out.println("seconds = " + (int) seconds); System.out.println("requests/second = " + (int) (COUNT / seconds)); double megabytes = (COUNT * SIZE) / (1024 * 1024.0); System.out.println("MB = " + (int) megabytes); System.out.println("MB/second = " + (int) (megabytes / seconds)); } }
6,936
0
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/test/java/org/apache/avro/ipc/jetty/StatsPluginOverhead.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import java.io.IOException; import java.net.URL; import org.apache.avro.AvroRemoteException; import org.apache.avro.Protocol; import org.apache.avro.Protocol.Message; import org.apache.avro.ipc.HttpTransceiver; import org.apache.avro.ipc.Responder; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.generic.GenericRequestor; import org.apache.avro.ipc.generic.GenericResponder; import org.apache.avro.ipc.stats.StatsPlugin; /** * Naively measures overhead of using the stats plugin. * * The API used is the generic one. The protocol is the "null" protocol: null is * sent and returned. */ public class StatsPluginOverhead { /** Number of RPCs per iteration. */ private static final int COUNT = 100000; private static final Protocol NULL_PROTOCOL = Protocol.parse("{\"protocol\": \"null\", " + "\"messages\": { \"null\": {" + " \"request\": [], " + " \"response\": \"null\"} } }"); private static class IdentityResponder extends GenericResponder { public IdentityResponder(Protocol local) { super(local); } @Override public Object respond(Message message, Object request) throws AvroRemoteException { return request; } } public static void main(String[] args) throws Exception { double with = sendRpcs(true) / 1000000000.0; double without = sendRpcs(false) / 1000000000.0; System.out.println(String.format("Overhead: %f%%. RPC/s: %f (with) vs %f (without). " + "RPC time (ms): %f vs %f", 100 * (with - without) / (without), COUNT / with, COUNT / without, 1000 * with / COUNT, 1000 * without / COUNT)); } /** Sends RPCs and returns nanos elapsed. */ private static long sendRpcs(boolean withPlugin) throws Exception { HttpServer server = createServer(withPlugin); Transceiver t = new HttpTransceiver(new URL("http://127.0.0.1:" + server.getPort() + "/")); GenericRequestor requestor = new GenericRequestor(NULL_PROTOCOL, t); long now = System.nanoTime(); for (int i = 0; i < COUNT; ++i) { requestor.request("null", null); } long elapsed = System.nanoTime() - now; t.close(); server.close(); return elapsed; } /** Starts an Avro server. */ private static HttpServer createServer(boolean withPlugin) throws IOException { Responder r = new IdentityResponder(NULL_PROTOCOL); if (withPlugin) { r.addRPCPlugin(new StatsPlugin()); } // Start Avro server HttpServer server = new HttpServer(r, 0); server.start(); return server; } }
6,937
0
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StatsServer.java
package org.apache.avro.ipc.jetty; import org.apache.avro.ipc.stats.StatsPlugin; import org.apache.avro.ipc.stats.StatsServlet; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; /* This is a server that displays live information from a StatsPlugin. * * Typical usage is as follows: * StatsPlugin plugin = new StatsPlugin(); * requestor.addPlugin(plugin); * StatsServer server = new StatsServer(plugin, 8080); * * */ public class StatsServer { Server httpServer; StatsPlugin plugin; /* * Start a stats server on the given port, responsible for the given plugin. */ public StatsServer(StatsPlugin plugin, int port) throws Exception { this.httpServer = new Server(port); this.plugin = plugin; ServletHandler handler = new ServletHandler(); httpServer.setHandler(handler); handler.addServletWithMapping(new ServletHolder(new StaticServlet()), "/"); handler.addServletWithMapping(new ServletHolder(new StatsServlet(plugin)), "/"); httpServer.start(); } /* Stops this server. */ public void stop() throws Exception { this.httpServer.stop(); } }
6,938
0
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/StaticServlet.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import java.net.URL; import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.util.resource.Resource; /** * Very simple servlet class capable of serving static files. */ public class StaticServlet extends DefaultServlet { private static final long serialVersionUID = 1L; @Override public Resource getResource(String pathInContext) { // Take only last slice of the URL as a filename, so we can adjust path. // This also prevents mischief like '../../foo.css' String[] parts = pathInContext.split("/"); String filename = parts[parts.length - 1]; URL resource = getClass().getClassLoader().getResource("org/apache/avro/ipc/stats/static/" + filename); if (resource == null) { return null; } return Resource.newResource(resource); } }
6,939
0
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc
Create_ds/avro/lang/java/ipc-jetty/src/main/java/org/apache/avro/ipc/jetty/HttpServer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.ipc.jetty; import java.io.IOException; import java.net.InetSocketAddress; import java.util.Arrays; import org.apache.avro.AvroRuntimeException; import org.apache.avro.ipc.Responder; import org.apache.avro.ipc.ResponderServlet; import org.apache.avro.ipc.Server; import org.eclipse.jetty.server.ConnectionFactory; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; /** An HTTP-based RPC {@link Server}. */ public class HttpServer implements Server { private org.eclipse.jetty.server.Server server; /** Constructs a server to run on the named port. */ public HttpServer(Responder responder, int port) throws IOException { this(new ResponderServlet(responder), null, port); } /** Constructs a server to run on the named port. */ public HttpServer(ResponderServlet servlet, int port) throws IOException { this(servlet, null, port); } /** Constructs a server to run on the named port on the specified address. */ public HttpServer(Responder responder, InetSocketAddress addr) throws IOException { this(new ResponderServlet(responder), addr.getHostString(), addr.getPort()); } /** Constructs a server to run on the named port on the specified address. */ public HttpServer(Responder responder, String bindAddress, int port) throws IOException { this(new ResponderServlet(responder), bindAddress, port); } /** Constructs a server to run on the named port on the specified address. */ public HttpServer(ResponderServlet servlet, String bindAddress, int port) throws IOException { this.server = new org.eclipse.jetty.server.Server(); ServerConnector connector = new ServerConnector(this.server); connector.setAcceptQueueSize(128); connector.setIdleTimeout(10000); if (bindAddress != null) { connector.setHost(bindAddress); } connector.setPort(port); server.addConnector(connector); ServletHandler handler = new ServletHandler(); handler.addServletWithMapping(new ServletHolder(servlet), "/*"); ServletContextHandler sch = new ServletContextHandler(); sch.setServletHandler(handler); server.setHandler(sch); } /** * Constructs a server to run with the given ConnectionFactory on the given * address/port. */ public HttpServer(Responder responder, ConnectionFactory connectionFactory, String bindAddress, int port) throws IOException { this(new ResponderServlet(responder), connectionFactory, bindAddress, port); } /** * Constructs a server to run with the given ConnectionFactory on the given * address/port. */ public HttpServer(ResponderServlet servlet, ConnectionFactory connectionFactory, String bindAddress, int port) throws IOException { this.server = new org.eclipse.jetty.server.Server(); HttpConfiguration httpConfig = new HttpConfiguration(); HttpConnectionFactory httpFactory = new HttpConnectionFactory(httpConfig); ServerConnector connector = new ServerConnector(this.server, connectionFactory, httpFactory); if (bindAddress != null) { connector.setHost(bindAddress); } connector.setPort(port); server.addConnector(connector); ServletHandler handler = new ServletHandler(); server.setHandler(handler); handler.addServletWithMapping(new ServletHolder(servlet), "/*"); } /** * Constructs a server to run with the given connector. * * @deprecated - use the Constructors that take a ConnectionFactory */ @Deprecated public HttpServer(ResponderServlet servlet, Connector connector) throws IOException { this.server = connector.getServer(); if (server.getConnectors().length == 0 || Arrays.asList(server.getConnectors()).contains(connector)) { server.addConnector(connector); } ServletHandler handler = new ServletHandler(); server.setHandler(handler); handler.addServletWithMapping(new ServletHolder(servlet), "/*"); } /** * Constructs a server to run with the given connector. * * @deprecated - use the Constructors that take a ConnectionFactory */ @Deprecated public HttpServer(Responder responder, Connector connector) throws IOException { this(new ResponderServlet(responder), connector); } public void addConnector(Connector connector) { server.addConnector(connector); } @Override public int getPort() { return ((ServerConnector) server.getConnectors()[0]).getLocalPort(); } @Override public void close() { try { server.stop(); } catch (Exception e) { throw new AvroRuntimeException(e); } } /** * Start the server. * * @throws AvroRuntimeException if the underlying Jetty server throws any * exception while starting. */ @Override public void start() { try { server.start(); } catch (Exception e) { throw new AvroRuntimeException(e); } } @Override public void join() throws InterruptedException { server.join(); } }
6,940
0
Create_ds/avro/lang/java/grpc/src/test/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroProtocolGrpc.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.AvroRuntimeException; import org.apache.avro.grpc.test.Kind; import org.apache.avro.grpc.test.MD5; import org.apache.avro.grpc.test.TestError; import org.apache.avro.grpc.test.TestRecord; import org.apache.avro.grpc.test.TestService; import org.apache.avro.ipc.CallFuture; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Arrays; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import io.grpc.ManagedChannel; import io.grpc.ManagedChannelBuilder; import io.grpc.Server; import io.grpc.ServerBuilder; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; public class TestAvroProtocolGrpc { private final TestRecord record = TestRecord.newBuilder().setName("foo").setKind(Kind.FOO) .setArrayOfLongs(Arrays.asList(42L, 424L, 4242L)).setHash(new MD5(new byte[] { 4, 2, 4, 2 })) .setNullableHash(null).build(); private final String declaredErrMsg = "Declared error"; private final String undeclaredErrMsg = "Undeclared error"; private final TestError declaredError = TestError.newBuilder().setMessage$(declaredErrMsg).build(); private final RuntimeException undeclaredError = new RuntimeException(undeclaredErrMsg); private CountDownLatch oneWayStart; private CountDownLatch oneWayDone; private AtomicInteger oneWayCount; private TestService stub; private TestService.Callback callbackStub; private Server server; private ManagedChannel channel; @BeforeEach public void setUp() throws IOException { TestService serviceImpl = new TestServiceImplBase(); setUpServerAndClient(serviceImpl); } private void setUpServerAndClient(TestService serviceImpl) throws IOException { if (server != null && !server.isShutdown()) { server.shutdown(); } if (channel != null && !channel.isShutdown()) { channel.shutdownNow(); } server = ServerBuilder.forPort(0).addService(AvroGrpcServer.createServiceDefinition(TestService.class, serviceImpl)) .build(); server.start(); int port = server.getPort(); channel = ManagedChannelBuilder.forAddress("localhost", port).usePlaintext().build(); stub = AvroGrpcClient.create(channel, TestService.class); callbackStub = AvroGrpcClient.create(channel, TestService.Callback.class); } @AfterEach public void cleanUp() { channel.shutdownNow(); server.shutdownNow(); } @Test void echoRecord() throws Exception { TestRecord echoedRecord = stub.echo(record); assertEquals(record, echoedRecord); } @Test void multipleArgsAdd() throws Exception { int result = stub.add(3, 5, 2); assertEquals(10, result); } @Test void multipleArgsConcatenate() throws Exception { String val1 = "foo-bar"; Boolean val2 = true; long val3 = 123321L; int val4 = 42; assertEquals(val1 + val2 + val3 + val4, stub.concatenate(val1, val2, val3, val4)); } @Test void callbackInterface() throws Exception { CallFuture<TestRecord> future = new CallFuture<>(); callbackStub.echo(record, future); assertEquals(record, future.get(1, TimeUnit.SECONDS)); } @Test void oneWayRpc() throws Exception { oneWayStart = new CountDownLatch(1); oneWayDone = new CountDownLatch(3); oneWayCount = new AtomicInteger(); stub.ping(); stub.ping(); // client is not stalled while server is waiting for processing requests assertEquals(0, oneWayCount.get()); oneWayStart.countDown(); stub.ping(); oneWayDone.await(1, TimeUnit.SECONDS); assertEquals(3, oneWayCount.get()); } @Test void declaredError() throws Exception { try { stub.error(true); fail("Expected exception but none thrown"); } catch (TestError te) { assertEquals(declaredErrMsg, te.getMessage$()); } } @Test void undeclaredError() throws Exception { try { stub.error(false); fail("Expected exception but none thrown"); } catch (AvroRuntimeException e) { assertTrue(e.getMessage().contains(undeclaredErrMsg)); } } @Test void nullableResponse() throws Exception { setUpServerAndClient(new TestServiceImplBase() { @Override public String concatenate(String val1, boolean val2, long val3, int val4) { return null; } }); assertNull(stub.concatenate("foo", true, 42L, 42)); } @Test void grpcConnectionError() throws Exception { assertThrows(AvroRuntimeException.class, () -> { // close the channel and initiate request channel.shutdownNow(); stub.add(0, 1, 2); }); } @Test void repeatedRequests() throws Exception { TestRecord[] echoedRecords = new TestRecord[5]; // validate results after all requests are done for (int i = 0; i < 5; i++) { echoedRecords[i] = stub.echo(record); } for (TestRecord result : echoedRecords) { assertEquals(record, result); } } @Test void concurrentClientAccess() throws Exception { ExecutorService es = Executors.newCachedThreadPool(); Future<TestRecord>[] records = new Future[5]; Future<Integer>[] adds = new Future[5]; // submit requests in parallel for (int i = 0; i < 5; i++) { records[i] = es.submit(() -> stub.echo(record)); int j = i; adds[i] = es.submit(() -> stub.add(j, 2 * j, 3 * j)); } // validate all results for (int i = 0; i < 5; i++) { assertEquals(record, records[i].get()); assertEquals(6 * i, (long) adds[i].get()); } } @Test void concurrentChannels() throws Exception { ManagedChannel otherChannel = ManagedChannelBuilder.forAddress("localhost", server.getPort()).usePlaintext() .build(); TestService otherStub = AvroGrpcClient.create(otherChannel, TestService.class); Future<Integer>[] adds = new Future[5]; Future<Integer>[] otherAdds = new Future[5]; ExecutorService es = Executors.newCachedThreadPool(); // submit requests on clients with different channels for (int i = 0; i < 5; i++) { int j = i; adds[i] = es.submit(() -> stub.add(j, j - 1, j - 2)); otherAdds[i] = es.submit(() -> otherStub.add(j, j + 1, j + 2)); } // validate all results for (int i = 0; i < 5; i++) { assertEquals((3 * i) - 3, (long) adds[i].get()); assertEquals((3 * i) + 3, (long) otherAdds[i].get()); } otherChannel.shutdownNow(); } private class TestServiceImplBase implements TestService { @Override public TestRecord echo(TestRecord record) { return record; } @Override public int add(int arg1, int arg2, int arg3) { return arg1 + arg2 + arg3; } @Override public void error(boolean declared) throws TestError { if (declared) { throw declaredError; } throw undeclaredError; } @Override public void ping() { try { oneWayStart.await(); oneWayCount.incrementAndGet(); oneWayDone.countDown(); } catch (InterruptedException e) { fail("thread interrupted when waiting for all one-way messages"); } } @Override public String concatenate(String val1, boolean val2, long val3, int val4) { return val1 + val2 + val3 + val4; } } }
6,941
0
Create_ds/avro/lang/java/grpc/src/test/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/test/java/org/apache/avro/grpc/TestAvroMarshaller.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.Protocol; import org.apache.avro.grpc.test.Kind; import org.apache.avro.grpc.test.MD5; import org.apache.avro.grpc.test.TestRecord; import org.apache.avro.grpc.test.TestService; import org.junit.jupiter.api.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.Random; import io.grpc.Drainable; import static org.junit.jupiter.api.Assertions.assertEquals; public class TestAvroMarshaller { private final TestRecord record = TestRecord.newBuilder().setName("foo").setKind(Kind.FOO) .setArrayOfLongs(Arrays.asList(42L, 424L, 4242L)).setHash(new MD5(new byte[] { 4, 2, 4, 2 })) .setNullableHash(null).build(); private final Protocol.Message message = TestService.PROTOCOL.getMessages().get("echo"); private Random random = new Random(); private void readPratialAndDrain(int partialToRead, InputStream inputStream, OutputStream target) throws IOException { // read specified partial bytes from request InputStream to target and then // drain the rest. for (int i = 0; i < partialToRead; i++) { int readByte = inputStream.read(); if (readByte >= 0) { target.write(readByte); } else { break; } } Drainable drainableRequest = (Drainable) inputStream; drainableRequest.drainTo(target); } @Test void avroRequestReadPartialAndDrain() throws IOException { AvroRequestMarshaller requestMarshaller = new AvroRequestMarshaller(message); InputStream requestInputStream = requestMarshaller.stream(new Object[] { record }); ByteArrayOutputStream requestOutputStream = new ByteArrayOutputStream(); readPratialAndDrain(random.nextInt(7) + 1, requestInputStream, requestOutputStream); InputStream serialized = new ByteArrayInputStream(requestOutputStream.toByteArray()); Object[] parsedArgs = requestMarshaller.parse(serialized); assertEquals(1, parsedArgs.length); assertEquals(record, parsedArgs[0]); } @Test void avroResponseReadPartialAndDrain() throws IOException { AvroResponseMarshaller responseMarshaller = new AvroResponseMarshaller(message); InputStream responseInputStream = responseMarshaller.stream(record); ByteArrayOutputStream responseOutputStream = new ByteArrayOutputStream(); readPratialAndDrain(random.nextInt(7) + 1, responseInputStream, responseOutputStream); InputStream serialized = new ByteArrayInputStream(responseOutputStream.toByteArray()); Object parsedResponse = responseMarshaller.parse(serialized); assertEquals(record, parsedResponse); } }
6,942
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroRequestMarshaller.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import com.google.common.io.ByteStreams; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import io.grpc.MethodDescriptor; import io.grpc.Status; /** Marshaller for Avro RPC request. */ public class AvroRequestMarshaller implements MethodDescriptor.Marshaller<Object[]> { private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory(); private static final DecoderFactory DECODER_FACTORY = new DecoderFactory(); private final Protocol.Message message; public AvroRequestMarshaller(Protocol.Message message) { this.message = message; } @Override public InputStream stream(Object[] value) { return new AvroRequestInputStream(value, message); } @Override public Object[] parse(InputStream stream) { try { BinaryDecoder in = DECODER_FACTORY.binaryDecoder(stream, null); Schema reqSchema = message.getRequest(); GenericRecord request = (GenericRecord) new SpecificDatumReader<>(reqSchema).read(null, in); Object[] args = new Object[reqSchema.getFields().size()]; int i = 0; for (Schema.Field field : reqSchema.getFields()) { args[i++] = request.get(field.name()); } return args; } catch (IOException e) { throw Status.INTERNAL.withCause(e).withDescription("Error deserializing avro request arguments") .asRuntimeException(); } finally { AvroGrpcUtils.skipAndCloseQuietly(stream); } } private static class AvroRequestInputStream extends AvroInputStream { private final Protocol.Message message; private Object[] args; AvroRequestInputStream(Object[] args, Protocol.Message message) { this.args = args; this.message = message; } @Override public int drainTo(OutputStream target) throws IOException { int written; if (getPartial() != null) { written = (int) ByteStreams.copy(getPartial(), target); } else { Schema reqSchema = message.getRequest(); CountingOutputStream outputStream = new CountingOutputStream(target); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(outputStream, null); int i = 0; for (Schema.Field param : reqSchema.getFields()) { new SpecificDatumWriter<>(param.schema()).write(args[i++], out); } out.flush(); args = null; written = outputStream.getWrittenCount(); } return written; } } }
6,943
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcClient.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.AvroRemoteException; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Protocol; import org.apache.avro.ipc.CallFuture; import org.apache.avro.ipc.Callback; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.Arrays; import io.grpc.CallOptions; import io.grpc.Channel; import io.grpc.MethodDescriptor; import io.grpc.stub.ClientCalls; import io.grpc.stub.StreamObserver; /** Component that sets up a gRPC client for Avro's IDL and Serialization. */ public abstract class AvroGrpcClient { private AvroGrpcClient() { } /** * Creates a gRPC client for Avro's interface with default {@link CallOptions}. * * @param channel the channel used for gRPC {@link ClientCalls}. * @param iface Avro interface for which client is built. * @param <T> type of Avro Interface. * @return a new client proxy. */ public static <T> T create(Channel channel, Class<T> iface) { return create(channel, iface, CallOptions.DEFAULT); } /** * Creates a gRPC client for Avro's interface with provided {@link CallOptions}. * * @param channel the channel used for gRPC {@link ClientCalls}. * @param iface Avro interface for which client is built. * @param callOptions client call options for gRPC. * @param <T> type of Avro Interface. * @return a new client proxy. */ public static <T> T create(Channel channel, Class<T> iface, CallOptions callOptions) { Protocol protocol = AvroGrpcUtils.getProtocol(iface); ServiceDescriptor serviceDescriptor = ServiceDescriptor.create(iface); ServiceInvocationHandler proxyHandler = new ServiceInvocationHandler(channel, callOptions, protocol, serviceDescriptor); return (T) Proxy.newProxyInstance(iface.getClassLoader(), new Class[] { iface }, proxyHandler); } private static class ServiceInvocationHandler implements InvocationHandler { private final Channel channel; private final CallOptions callOptions; private final ServiceDescriptor serviceDescriptor; ServiceInvocationHandler(Channel channel, CallOptions callOptions, Protocol protocol, ServiceDescriptor serviceDescriptor) { this.channel = channel; this.callOptions = callOptions; this.serviceDescriptor = serviceDescriptor; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { try { return invokeUnaryMethod(method, args); } catch (RuntimeException re) { // rethrow any runtime exception throw re; } catch (Exception e) { // throw any of the declared exceptions for (Class<?> exceptionClass : method.getExceptionTypes()) { if (exceptionClass.isInstance(e)) { throw e; } } // wrap all other exceptions throw new AvroRemoteException(e); } } private Object invokeUnaryMethod(Method method, Object[] args) throws Exception { Type[] parameterTypes = method.getParameterTypes(); if ((parameterTypes.length > 0) && (parameterTypes[parameterTypes.length - 1] instanceof Class) && Callback.class.isAssignableFrom(((Class<?>) parameterTypes[parameterTypes.length - 1]))) { // get the callback argument from the end Object[] finalArgs = Arrays.copyOf(args, args.length - 1); Callback<?> callback = (Callback<?>) args[args.length - 1]; unaryRequest(method.getName(), finalArgs, callback); return null; } else { return unaryRequest(method.getName(), args); } } private Object unaryRequest(String methodName, Object[] args) throws Exception { CallFuture<Object> callFuture = new CallFuture<>(); unaryRequest(methodName, args, callFuture); try { return callFuture.get(); } catch (Exception e) { if (e.getCause() instanceof Exception) { throw (Exception) e.getCause(); } throw new AvroRemoteException(e.getCause()); } } private <RespT> void unaryRequest(String methodName, Object[] args, Callback<RespT> callback) throws Exception { StreamObserver<Object> observerAdpater = new CallbackToResponseStreamObserverAdpater<>(callback); ClientCalls.asyncUnaryCall( channel.newCall(serviceDescriptor.getMethod(methodName, MethodDescriptor.MethodType.UNARY), callOptions), args, observerAdpater); } private static class CallbackToResponseStreamObserverAdpater<T> implements StreamObserver<Object> { private final Callback<T> callback; CallbackToResponseStreamObserverAdpater(Callback<T> callback) { this.callback = callback; } @Override public void onNext(Object value) { if (value instanceof Throwable) { callback.handleError((Throwable) value); } else { callback.handleResult((T) value); } } @Override public void onError(Throwable t) { callback.handleError(new AvroRuntimeException(t)); } @Override public void onCompleted() { // do nothing as there is no equivalent in Callback. } } } }
6,944
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroInputStream.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import io.grpc.Drainable; /** * An {@link InputStream} backed by Avro RPC request/response that can drained * to a{@link OutputStream}. */ public abstract class AvroInputStream extends InputStream implements Drainable { /** * Container to hold the serialized Avro payload when its read before draining * it. */ private ByteArrayInputStream partial; @Override public int read(byte[] b, int off, int len) throws IOException { return getPartialInternal().read(b, off, len); } @Override public int read() throws IOException { return getPartialInternal().read(); } private ByteArrayInputStream getPartialInternal() throws IOException { if (partial == null) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); drainTo(outputStream); partial = new ByteArrayInputStream(outputStream.toByteArray()); } return partial; } protected ByteArrayInputStream getPartial() { return partial; } /** * An {@link OutputStream} that writes to a target {@link OutputStream} and * provides total number of bytes written to it. */ protected static class CountingOutputStream extends OutputStream { private final OutputStream target; private int writtenCount = 0; public CountingOutputStream(OutputStream target) { this.target = target; } @Override public void write(byte[] b, int off, int len) throws IOException { target.write(b, off, len); writtenCount += len; } @Override public void write(int b) throws IOException { target.write(b); writtenCount += 1; } @Override public void flush() throws IOException { target.flush(); } @Override public void close() throws IOException { target.close(); } public int getWrittenCount() { return writtenCount; } } }
6,945
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/ServiceDescriptor.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.Protocol; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import io.grpc.MethodDescriptor; import org.apache.avro.util.MapUtil; import static io.grpc.MethodDescriptor.generateFullMethodName; /** Descriptor for a gRPC service based on a Avro interface. */ class ServiceDescriptor { // cache for service descriptors. private static final ConcurrentMap<String, ServiceDescriptor> SERVICE_DESCRIPTORS = new ConcurrentHashMap<>(); private final String serviceName; private final Protocol protocol; // cache for method descriptors. private final ConcurrentMap<String, MethodDescriptor<Object[], Object>> methods = new ConcurrentHashMap<>(); private ServiceDescriptor(Class iface, String serviceName) { this.serviceName = serviceName; this.protocol = AvroGrpcUtils.getProtocol(iface); } /** * Creates a Service Descriptor. * * @param iface Avro RPC interface. */ public static ServiceDescriptor create(Class iface) { String serviceName = AvroGrpcUtils.getServiceName(iface); return MapUtil.computeIfAbsent(SERVICE_DESCRIPTORS, serviceName, key -> new ServiceDescriptor(iface, serviceName)); } /** * provides name of the service. */ public String getServiceName() { return serviceName; } /** * Provides a gRPC {@link MethodDescriptor} for a RPC method/message of Avro * {@link Protocol}. * * @param methodType gRPC type for the method. * @return a {@link MethodDescriptor} */ public MethodDescriptor<Object[], Object> getMethod(String methodName, MethodDescriptor.MethodType methodType) { return MapUtil.computeIfAbsent(methods, methodName, key -> MethodDescriptor.<Object[], Object>newBuilder() .setFullMethodName(generateFullMethodName(serviceName, methodName)).setType(methodType) .setRequestMarshaller(new AvroRequestMarshaller(protocol.getMessages().get(methodName))) .setResponseMarshaller(new AvroResponseMarshaller(protocol.getMessages().get(methodName))).build()); } }
6,946
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Protocol; import java.io.InputStream; import java.util.logging.Level; import java.util.logging.Logger; import io.grpc.KnownLength; /** Utility methods for using Avro IDL and serialization with gRPC. */ public final class AvroGrpcUtils { private static final Logger LOG = Logger.getLogger(AvroGrpcUtils.class.getName()); private AvroGrpcUtils() { } /** * Provides a a unique gRPC service name for Avro RPC interface or its subclass * Callback Interface. * * @param iface Avro RPC interface. * @return unique service name for gRPC. */ public static String getServiceName(Class iface) { Protocol protocol = getProtocol(iface); return protocol.getNamespace() + "." + protocol.getName(); } /** * Gets the {@link Protocol} from the Avro Interface. */ public static Protocol getProtocol(Class iface) { try { Protocol p = (Protocol) (iface.getDeclaredField("PROTOCOL").get(null)); return p; } catch (NoSuchFieldException e) { throw new AvroRuntimeException("Not a Specific protocol: " + iface); } catch (IllegalAccessException e) { throw new AvroRuntimeException(e); } } /** * Skips any unread bytes from InputStream and closes it. */ static void skipAndCloseQuietly(InputStream stream) { try { if (stream instanceof KnownLength && stream.available() > 0) { stream.skip(stream.available()); } else { // don't expect this for an inputStream provided by gRPC but just to be on safe // side. byte[] skipBuffer = new byte[4096]; while (true) { int read = stream.read(skipBuffer); if (read < skipBuffer.length) { break; } } } stream.close(); } catch (Exception e) { LOG.log(Level.WARNING, "failed to skip/close the input stream, may cause memory leak", e); } } }
6,947
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroGrpcServer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import org.apache.avro.Protocol; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import io.grpc.MethodDescriptor; import io.grpc.ServerServiceDefinition; import io.grpc.stub.ServerCalls; import io.grpc.stub.StreamObserver; /** * Provides components to set up a gRPC Server for Avro's IDL and serialization. */ public abstract class AvroGrpcServer { private AvroGrpcServer() { } /** * Creates a {@link ServerServiceDefinition} for Avro Interface and its * implementation that can be passed a gRPC Server. * * @param iface Avro generated RPC service interface for which service defintion * is created. * @param impl Implementation of the service interface to be invoked for * requests. * @return a new server service definition. */ public static ServerServiceDefinition createServiceDefinition(Class iface, Object impl) { Protocol protocol = AvroGrpcUtils.getProtocol(iface); ServiceDescriptor serviceDescriptor = ServiceDescriptor.create(iface); ServerServiceDefinition.Builder serviceDefinitionBuilder = ServerServiceDefinition .builder(serviceDescriptor.getServiceName()); Map<String, Protocol.Message> messages = protocol.getMessages(); for (Method method : iface.getMethods()) { Protocol.Message msg = messages.get(method.getName()); // setup a method handler only if corresponding message exists in avro protocol. if (msg != null) { UnaryMethodHandler methodHandler = msg.isOneWay() ? new OneWayUnaryMethodHandler(impl, method) : new UnaryMethodHandler(impl, method); serviceDefinitionBuilder.addMethod( serviceDescriptor.getMethod(method.getName(), MethodDescriptor.MethodType.UNARY), ServerCalls.asyncUnaryCall(methodHandler)); } } return serviceDefinitionBuilder.build(); } private static class UnaryMethodHandler implements ServerCalls.UnaryMethod<Object[], Object> { private final Object serviceImpl; private final Method method; UnaryMethodHandler(Object serviceImpl, Method method) { this.serviceImpl = serviceImpl; this.method = method; } @Override public void invoke(Object[] request, StreamObserver<Object> responseObserver) { Object methodResponse = null; try { methodResponse = method.invoke(getServiceImpl(), request); } catch (InvocationTargetException e) { methodResponse = e.getTargetException(); } catch (Exception e) { methodResponse = e; } responseObserver.onNext(methodResponse); responseObserver.onCompleted(); } public Method getMethod() { return method; } public Object getServiceImpl() { return serviceImpl; } } private static class OneWayUnaryMethodHandler extends UnaryMethodHandler { private static final Logger LOG = Logger.getLogger(OneWayUnaryMethodHandler.class.getName()); OneWayUnaryMethodHandler(Object serviceImpl, Method method) { super(serviceImpl, method); } @Override public void invoke(Object[] request, StreamObserver<Object> responseObserver) { // first respond back with a fixed void response in order for call to be // complete responseObserver.onNext(null); responseObserver.onCompleted(); // process the rpc request try { getMethod().invoke(getServiceImpl(), request); } catch (Exception e) { Throwable cause = e; while (cause.getCause() != null && cause != cause.getCause()) { cause = cause.getCause(); } LOG.log(Level.WARNING, "Error processing one-way rpc", cause); } } } }
6,948
0
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro
Create_ds/avro/lang/java/grpc/src/main/java/org/apache/avro/grpc/AvroResponseMarshaller.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.grpc; import com.google.common.io.ByteStreams; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Protocol; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.util.Utf8; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import io.grpc.MethodDescriptor; import io.grpc.Status; /** Marshaller for Avro RPC response. */ public class AvroResponseMarshaller implements MethodDescriptor.Marshaller<Object> { private static final EncoderFactory ENCODER_FACTORY = new EncoderFactory(); private static final DecoderFactory DECODER_FACTORY = new DecoderFactory(); private final Protocol.Message message; public AvroResponseMarshaller(Protocol.Message message) { this.message = message; } @Override public InputStream stream(Object value) { return new AvroResponseInputStream(value, message); } @Override public Object parse(InputStream stream) { try { if (message.isOneWay()) return null; BinaryDecoder in = DECODER_FACTORY.binaryDecoder(stream, null); if (!in.readBoolean()) { Object response = new SpecificDatumReader(message.getResponse()).read(null, in); return response; } else { Object value = new SpecificDatumReader(message.getErrors()).read(null, in); if (value instanceof Exception) { return value; } return new AvroRuntimeException(value.toString()); } } catch (IOException e) { throw Status.INTERNAL.withCause(e).withDescription("Error deserializing avro response").asRuntimeException(); } finally { AvroGrpcUtils.skipAndCloseQuietly(stream); } } private static class AvroResponseInputStream extends AvroInputStream { private final Protocol.Message message; private Object response; AvroResponseInputStream(Object response, Protocol.Message message) { this.response = response; this.message = message; } @Override public int drainTo(OutputStream target) throws IOException { int written; if (getPartial() != null) { written = (int) ByteStreams.copy(getPartial(), target); } else { written = writeResponse(target); } return written; } private int writeResponse(OutputStream target) throws IOException { int written; if (message.isOneWay()) { written = 0; } else if (response instanceof Exception) { ByteArrayOutputStream bao = new ByteArrayOutputStream(); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(bao, null); try { out.writeBoolean(true); new SpecificDatumWriter(message.getErrors()).write(response, out); } catch (Exception e) { bao = new ByteArrayOutputStream(); out = ENCODER_FACTORY.binaryEncoder(bao, null); out.writeBoolean(true); new SpecificDatumWriter(Protocol.SYSTEM_ERRORS).write(new Utf8(e.toString()), out); } out.flush(); byte[] serializedError = bao.toByteArray(); target.write(serializedError); written = serializedError.length; } else { CountingOutputStream outputStream = new CountingOutputStream(target); BinaryEncoder out = ENCODER_FACTORY.binaryEncoder(outputStream, null); out.writeBoolean(false); new SpecificDatumWriter(message.getResponse()).write(response, out); out.flush(); written = outputStream.getWrittenCount(); } response = null; return written; } } }
6,949
0
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/test/java/integration/SimpleOrderServiceIntegrationTest.java
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ${package}.integration; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.List; import ${package}.transport.SimpleOrderServiceEndpoint; import ${package}.transport.SimpleOrderServiceClient; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import ${package}.service.Confirmation; import ${package}.service.Item; import ${package}.service.Order; /** * <code>SimpleOrderServiceIntegrationTest</code> runs as part of the Integration phase of the build and is * meant for end to end service testing. */ class SimpleOrderServiceIntegrationTest { private static SimpleOrderServiceEndpoint service; private static SimpleOrderServiceClient client; @Test void simpleRoundTripTest() throws Exception { Order simpleOrder = createOrder(); Confirmation c = client.submitOrder(simpleOrder); assertEquals(c.getOrderId(), simpleOrder.getOrderId()); assertEquals(c.getCustomerId(), simpleOrder.getCustomerId()); assertTrue(c.getEstimatedCompletion() > 0); } @BeforeAll public static void setupTransport() throws Exception { InetSocketAddress endpointAddress = new InetSocketAddress("0.0.0.0", 12345); service = new SimpleOrderServiceEndpoint(endpointAddress); client = new SimpleOrderServiceClient(endpointAddress); service.start(); client.start(); } @AfterAll public static void shutdownTransport() throws Exception { client.stop(); service.stop(); } public Order createOrder() { return Order.newBuilder().setOrderId(1).setCustomerId(1).setOrderItems(createItems()).build(); } public List<Item> createItems() { List<Item> items = new ArrayList<Item>(); for (int x = 0; x < 5; x++) items.add(Item.newBuilder().setName("Item-" + x).setQuantity(x + 1).setSku(1230 + x).build()); return items; } }
6,950
0
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceEndpoint.java
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ${package}.transport; import java.net.InetSocketAddress; import ${package}.service.SimpleOrderService; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.netty.NettyServer; import org.apache.avro.ipc.specific.SpecificResponder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ${package}.service.OrderProcessingService; /** * {@code SimpleOrderProcessingServer} provides a very basic example Netty endpoint for the * {@link SimpleOrderService} implementation */ public class SimpleOrderServiceEndpoint { private static final Logger log = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class); private InetSocketAddress endpointAddress; private Server service; public SimpleOrderServiceEndpoint(InetSocketAddress endpointAddress) { this.endpointAddress = endpointAddress; } public synchronized void start() throws Exception { if (log.isInfoEnabled()) { log.info("Starting Simple Ordering Netty Server on '{}'", endpointAddress); } SpecificResponder responder = new SpecificResponder(OrderProcessingService.class, new SimpleOrderService()); service = new NettyServer(responder, endpointAddress); service.start(); } public synchronized void stop() throws Exception { if (log.isInfoEnabled()) { log.info("Stopping Simple Ordering Server on '{}'", endpointAddress); } service.start(); } }
6,951
0
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/transport/SimpleOrderServiceClient.java
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ${package}.transport; import java.io.IOException; import java.net.InetSocketAddress; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.netty.NettyTransceiver; import org.apache.avro.ipc.specific.SpecificRequestor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ${package}.service.Confirmation; import ${package}.service.Order; import ${package}.service.OrderFailure; import ${package}.service.OrderProcessingService; /** * {@code SimpleOrderServiceClient} is a basic client for the Netty backed {@link OrderProcessingService} * implementation. */ public class SimpleOrderServiceClient implements OrderProcessingService { private static final Logger LOG = LoggerFactory.getLogger(SimpleOrderServiceEndpoint.class); private InetSocketAddress endpointAddress; private Transceiver transceiver; private OrderProcessingService service; public SimpleOrderServiceClient(InetSocketAddress endpointAddress) { this.endpointAddress = endpointAddress; } public synchronized void start() throws IOException { if (LOG.isInfoEnabled()) { LOG.info("Starting Simple Ordering Netty client on '{}'", endpointAddress); } transceiver = new NettyTransceiver(endpointAddress); service = SpecificRequestor.getClient(OrderProcessingService.class, transceiver); } public void stop() throws IOException { if (LOG.isInfoEnabled()) { LOG.info("Stopping Simple Ordering Netty client on '{}'", endpointAddress); } if (transceiver != null && transceiver.isConnected()) { transceiver.close(); } } @Override public Confirmation submitOrder(Order order) throws OrderFailure { return service.submitOrder(order); } }
6,952
0
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java
Create_ds/avro/lang/java/archetypes/avro-service-archetype/src/main/resources/archetype-resources/src/main/java/service/SimpleOrderService.java
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ${package}.service; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@code SimpleOrderService} is a simple example implementation of an Avro service generated from the * order-service.avpr protocol definition. */ public class SimpleOrderService implements OrderProcessingService { private Logger log = LoggerFactory.getLogger(SimpleOrderService.class); @Override public Confirmation submitOrder(Order order) throws OrderFailure { log.info("Received order for '{}' items from customer with id '{}'", new Object[] {order.getOrderItems().size(), order.getCustomerId()}); long estimatedCompletion = System.currentTimeMillis() + (5 * 60 * 60); return Confirmation.newBuilder().setCustomerId(order.getCustomerId()).setEstimatedCompletion(estimatedCompletion) .setOrderId(order.getOrderId()).build(); } }
6,953
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestIDLMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import java.io.File; import java.util.Collections; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.codehaus.plexus.util.FileUtils; import org.junit.Test; import static java.util.Arrays.asList; /** * Test the IDL Protocol Mojo. */ public class TestIDLMojo extends AbstractAvroMojoTest { private File testPom = new File(getBasedir(), "src/test/resources/unit/idl/pom.xml"); private File injectingVelocityToolsTestPom = new File(getBasedir(), "src/test/resources/unit/idl/pom-injecting-velocity-tools.xml"); @Test public void testIdlProtocolMojo() throws Exception { final IDLMojo mojo = (IDLMojo) lookupMojo("idl", testPom); final TestLog log = new TestLog(); mojo.setLog(log); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/test-harness/idl/test/"); final Set<String> generatedFiles = new HashSet<>( asList("IdlPrivacy.java", "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java")); assertFilesExist(outputDir, generatedFiles); final String idlUserContent = FileUtils.fileRead(new File(outputDir, "IdlUser.java")); assertTrue(idlUserContent.contains("java.time.Instant")); assertEquals(Collections.singletonList("[WARN] Line 22, char 1: Ignoring out-of-place documentation comment.\n" + "Did you mean to use a multiline comment ( /* ... */ ) instead?"), log.getLogEntries()); } @Test public void testSetCompilerVelocityAdditionalTools() throws Exception { final IDLProtocolMojo mojo = (IDLProtocolMojo) lookupMojo("idl-protocol", injectingVelocityToolsTestPom); final TestLog log = new TestLog(); mojo.setLog(log); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/test-harness/idl-inject/test"); final Set<String> generatedFiles = new HashSet<>( asList("IdlPrivacy.java", "IdlTest.java", "IdlUser.java", "IdlUserWrapper.java")); assertFilesExist(outputDir, generatedFiles); final String schemaUserContent = FileUtils.fileRead(new File(outputDir, "IdlUser.java")); assertTrue(schemaUserContent.contains("It works!")); // The previous test already verifies the warnings. assertFalse(log.getLogEntries().isEmpty()); } }
6,954
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestInduceMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import java.io.File; import java.util.Arrays; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.entities.Person; import org.apache.avro.protocols.Remote; import org.apache.avro.reflect.ReflectData; import org.apache.maven.plugin.testing.AbstractMojoTestCase; import org.junit.Test; public class TestInduceMojo extends AbstractMojoTestCase { protected File schemaPom; protected File protocolPom; @Override protected void setUp() throws Exception { String baseDir = getBasedir(); schemaPom = new File(baseDir, "src/test/resources/unit/schema/induce-pom.xml"); protocolPom = new File(baseDir, "src/test/resources/unit/protocol/induce-pom.xml"); super.setUp(); } @Override protected void tearDown() throws Exception { super.tearDown(); } public void testInduceMojoExists() throws Exception { InduceMojo mojo = (InduceMojo) lookupMojo("induce", schemaPom); assertNotNull(mojo); } @Test public void testInduceSchema() throws Exception { executeMojo(schemaPom); File outputDir = new File(getBasedir(), "target/test-harness/schemas/org/apache/avro/entities"); assertTrue(outputDir.listFiles().length != 0); File personSchemaFile = Arrays.stream(outputDir.listFiles()).filter(file -> file.getName().endsWith("Person.avsc")) .findFirst().orElseThrow(AssertionError::new); assertEquals(ReflectData.get().getSchema(Person.class), new Schema.Parser().parse(personSchemaFile)); } @Test public void testInducedSchemasFileExtension() throws Exception { executeMojo(schemaPom); File outputDir = new File(getBasedir(), "target/test-harness/schemas/org/apache/avro/entities"); for (File file : outputDir.listFiles()) { assertTrue(file.getName().contains(".avsc")); } } @Test public void testInduceProtocol() throws Exception { executeMojo(protocolPom); File outputDir = new File(getBasedir(), "target/test-harness/protocol/org/apache/avro/protocols"); assertTrue(outputDir.listFiles().length != 0); File remoteProtocolFile = Arrays.stream(outputDir.listFiles()) .filter(file -> file.getName().endsWith("Remote.avpr")).findFirst().orElseThrow(AssertionError::new); assertEquals(ReflectData.get().getProtocol(Remote.class), Protocol.parse(remoteProtocolFile)); } @Test public void testInducedProtocolsFileExtension() throws Exception { executeMojo(protocolPom); File outputDir = new File(getBasedir(), "target/test-harness/protocol/org/apache/avro/protocols"); for (File file : outputDir.listFiles()) { assertTrue(file.getName().contains(".avpr")); } } private void executeMojo(File pom) throws Exception { InduceMojo mojo = (InduceMojo) lookupMojo("induce", pom); mojo.execute(); } }
6,955
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestSchemaMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.apache.maven.plugin.MojoExecutionException; import org.codehaus.plexus.util.FileUtils; import org.junit.Test; import org.junit.jupiter.api.Assertions; import java.io.File; import java.nio.file.Files; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Test the Schema Mojo. */ public class TestSchemaMojo extends AbstractAvroMojoTest { private File testPom = new File(getBasedir(), "src/test/resources/unit/schema/pom.xml"); private File injectingVelocityToolsTestPom = new File(getBasedir(), "src/test/resources/unit/schema/pom-injecting-velocity-tools.xml"); private File testNonexistentFilePom = new File(getBasedir(), "src/test/resources/unit/schema/pom-nonexistent-file.xml"); private File testNonexistentSecondFilePom = new File(getBasedir(), "src/test/resources/unit/schema/pom-nonexistent-second-file.xml"); private File testExtendsFilePom = new File(getBasedir(), "src/test/resources/unit/schema/pom-customExtends.xml"); @Test public void testSchemaMojo() throws Exception { final SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", testPom); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/test-harness/schema/test"); final Set<String> generatedFiles = new HashSet<>(Arrays.asList("PrivacyDirectImport.java", "PrivacyImport.java", "SchemaPrivacy.java", "SchemaUser.java", "SchemaCustom.java", "SchemaCustom.java")); assertFilesExist(outputDir, generatedFiles); final String schemaUserContent = FileUtils.fileRead(new File(outputDir, "SchemaUser.java")); assertTrue(schemaUserContent.contains("java.time.Instant")); } @Test public void testSetCompilerVelocityAdditionalTools() throws Exception { final SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", injectingVelocityToolsTestPom); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/test-harness/schema-inject/test"); final Set<String> generatedFiles = new HashSet<>(Arrays.asList("PrivacyDirectImport.java", "PrivacyImport.java", "SchemaPrivacy.java", "SchemaUser.java", "SchemaCustom.java")); assertFilesExist(outputDir, generatedFiles); final String schemaUserContent = FileUtils.fileRead(new File(outputDir, "SchemaUser.java")); assertTrue("Got " + schemaUserContent + " instead", schemaUserContent.contains("It works!")); } @Test public void testThrowsErrorForNonexistentFile() throws Exception { try { final SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", testNonexistentFilePom); mojo.execute(); fail("MojoExecutionException not thrown!"); } catch (MojoExecutionException ignored) { } } @Test public void testThrowsErrorForNonexistentSecondFile() throws Exception { try { final SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", testNonexistentSecondFilePom); mojo.execute(); fail("MojoExecutionException not thrown!"); } catch (MojoExecutionException ignored) { } } @Test public void testExtends() throws Exception { final SchemaMojo mojo = (SchemaMojo) lookupMojo("schema", testExtendsFilePom); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/extends/schema/test"); File outputFile = new File(outputDir, "SchemaCustom.java"); assertTrue(outputFile.exists()); List<String> extendsLines = Files.readAllLines(outputFile.toPath()).stream() .filter((String line) -> line.contains("class SchemaCustom extends ")).collect(Collectors.toList()); assertEquals(1, extendsLines.size()); String extendLine = extendsLines.get(0); assertTrue(extendLine.contains(" org.apache.avro.custom.CustomRecordBase ")); assertFalse(extendLine.contains("org.apache.avro.specific.SpecificRecordBase")); } }
6,956
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestLog.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.apache.maven.plugin.logging.Log; import org.apache.velocity.util.StringBuilderWriter; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; public class TestLog implements Log { private final List<String> logEntries = new ArrayList<>(); public List<String> getLogEntries() { return logEntries; } private void log(String level, CharSequence content, Throwable error) { StringBuilder buffer = new StringBuilder(); buffer.append('[').append(level).append("]"); if (content != null) { buffer.append(' ').append(content); } if (error != null) { buffer.append(content == null ? " " : System.lineSeparator()); error.printStackTrace(new PrintWriter(new StringBuilderWriter(buffer))); } logEntries.add(buffer.toString()); } @Override public boolean isDebugEnabled() { return true; } @Override public void debug(CharSequence content) { debug(content, null); } @Override public void debug(CharSequence content, Throwable error) { log("DEBUG", content, error); } @Override public void debug(Throwable error) { debug(null, error); } @Override public boolean isInfoEnabled() { return true; } @Override public void info(CharSequence content) { info(content, null); } @Override public void info(CharSequence content, Throwable error) { log("INFO", content, error); } @Override public void info(Throwable error) { info(null, error); } @Override public boolean isWarnEnabled() { return true; } @Override public void warn(CharSequence content) { warn(content, null); } @Override public void warn(CharSequence content, Throwable error) { log("WARN", content, error); } @Override public void warn(Throwable error) { warn(null, error); } @Override public boolean isErrorEnabled() { return true; } @Override public void error(CharSequence content) { error(content, null); } @Override public void error(CharSequence content, Throwable error) { log("ERROR", content, error); } @Override public void error(Throwable error) { error(null, error); } }
6,957
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/AbstractAvroMojoTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import java.io.File; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.apache.maven.plugin.testing.AbstractMojoTestCase; /** * Base class for all Avro mojo test classes. */ public abstract class AbstractAvroMojoTest extends AbstractMojoTestCase { @Override protected void setUp() throws Exception { super.setUp(); } @Override protected void tearDown() throws Exception { super.tearDown(); } /** * Assert the existence files in the given given directory. * * @param directory the directory being checked * @param expectedFiles the files whose existence is being checked. */ void assertFilesExist(File directory, Set<String> expectedFiles) { assertNotNull(directory); assertTrue("Directory " + directory.toString() + " does not exists", directory.exists()); assertNotNull(expectedFiles); assertTrue(expectedFiles.size() > 0); final Set<String> filesInDirectory = new HashSet<>(Arrays.asList(directory.list())); assertEquals(expectedFiles, filesInDirectory); } }
6,958
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/mojo/TestProtocolMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.codehaus.plexus.util.FileUtils; import org.junit.Test; import java.io.File; import java.util.Arrays; import java.util.HashSet; import java.util.Set; /** * Test the Protocol Mojo. */ public class TestProtocolMojo extends AbstractAvroMojoTest { private File testPom = new File(getBasedir(), "src/test/resources/unit/protocol/pom.xml"); private File injectingVelocityToolsTestPom = new File(getBasedir(), "src/test/resources/unit/protocol/pom-injecting-velocity-tools.xml"); @Test public void testProtocolMojo() throws Exception { final ProtocolMojo mojo = (ProtocolMojo) lookupMojo("protocol", testPom); assertNotNull(mojo); mojo.execute(); final File outputDir = new File(getBasedir(), "target/test-harness/protocol/test"); final Set<String> generatedFiles = new HashSet<>( Arrays.asList("ProtocolPrivacy.java", "ProtocolTest.java", "ProtocolUser.java")); assertFilesExist(outputDir, generatedFiles); final String protocolUserContent = FileUtils.fileRead(new File(outputDir, "ProtocolUser.java")); assertTrue("Got " + protocolUserContent + " instead", protocolUserContent.contains("java.time.Instant")); } @Test public void testSetCompilerVelocityAdditionalTools() throws Exception { ProtocolMojo mojo = (ProtocolMojo) lookupMojo("protocol", injectingVelocityToolsTestPom); assertNotNull(mojo); mojo.execute(); File outputDir = new File(getBasedir(), "target/test-harness/protocol-inject/test"); final Set<String> generatedFiles = new HashSet<>( Arrays.asList("ProtocolPrivacy.java", "ProtocolTest.java", "ProtocolUser.java")); assertFilesExist(outputDir, generatedFiles); String schemaUserContent = FileUtils.fileRead(new File(outputDir, "ProtocolUser.java")); assertTrue(schemaUserContent.contains("It works!")); } }
6,959
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/custom/CustomRecordBase.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.custom; import org.apache.avro.specific.SpecificRecordBase; public abstract class CustomRecordBase extends SpecificRecordBase { }
6,960
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/protocols/Remote.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.protocols; public interface Remote { Object fetch(int id); }
6,961
0
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/test/java/org/apache/avro/entities/Person.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.entities; public class Person { private String name; }
6,962
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/InduceMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import java.io.File; import java.io.PrintWriter; import java.lang.reflect.Constructor; import java.net.URL; import java.net.URLClassLoader; import java.nio.charset.Charset; import java.util.List; import org.apache.avro.AvroRuntimeException; import org.apache.avro.reflect.ReflectData; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; /** * Generate Avro files (.avsc and .avpr) from Java classes or interfaces * * @goal induce * @phase process-classes * @threadSafe */ public class InduceMojo extends AbstractMojo { /** * The Java source directories. * * @parameter property="javaSourceDirectories" * default-value="${basedir}/src/main/java" */ private File[] javaSourceDirectories; /** * Directory where to output Avro schemas (.avsc) or protocols (.avpr). * * @parameter property="avroOutputDirectory" * default-value="${project.build.directory}/generated-resources/avro" */ private File avroOutputDirectory; /** * The output encoding. * * @parameter default-value="${project.build.sourceEncoding}" */ private String encoding; /** * Whether to use ReflectData.AllowNull. * * @parameter default-value="false" */ private boolean allowNull; /** * Override the default ReflectData implementation with an extension. Must be a * subclass of ReflectData. * * @parameter property="reflectDataImplementation" */ private String reflectDataImplementation; /** * The current Maven project. * * @parameter default-value="${project}" * @readonly * @required */ protected MavenProject project; private ClassLoader classLoader; private ReflectData reflectData; public void execute() throws MojoExecutionException { classLoader = getClassLoader(); reflectData = getReflectData(); if (encoding == null) { encoding = Charset.defaultCharset().name(); getLog().warn("Property project.build.sourceEncoding not set, using system default " + encoding); } for (File sourceDirectory : javaSourceDirectories) { induceClasses(sourceDirectory); } } private void induceClasses(File sourceDirectory) throws MojoExecutionException { File[] files = sourceDirectory.listFiles(); if (files == null) { throw new MojoExecutionException("Unable to list files from directory: " + sourceDirectory.getName()); } for (File inputFile : files) { if (inputFile.isDirectory()) { induceClasses(inputFile); continue; } String className = parseClassName(inputFile.getPath()); if (className == null) { continue; // Not a java file, continue } Class<?> klass = loadClass(classLoader, className); String fileName = getOutputFileName(klass); File outputFile = new File(fileName); outputFile.getParentFile().mkdirs(); try (PrintWriter writer = new PrintWriter(fileName, encoding)) { if (klass.isInterface()) { writer.println(reflectData.getProtocol(klass).toString(true)); } else { writer.println(reflectData.getSchema(klass).toString(true)); } } catch (AvroRuntimeException e) { throw new MojoExecutionException("Failed to resolve schema or protocol for class " + klass.getCanonicalName(), e); } catch (Exception e) { throw new MojoExecutionException("Failed to write output file for class " + klass.getCanonicalName(), e); } } } private String parseClassName(String fileName) { String indentifier = "java" + File.separator; int index = fileName.lastIndexOf(indentifier); String namespacedFileName = fileName.substring(index + indentifier.length()); if (!namespacedFileName.endsWith(".java")) { return null; } return namespacedFileName.replace(File.separator, ".").replaceFirst("\\.java$", ""); } private String getOutputFileName(Class klass) { String filename = avroOutputDirectory.getPath() + File.separator + klass.getName().replace(".", File.separator); if (klass.isInterface()) { return filename.concat(".avpr"); } else { return filename.concat(".avsc"); } } private ReflectData getReflectData() throws MojoExecutionException { if (reflectDataImplementation == null) { return allowNull ? ReflectData.AllowNull.get() : ReflectData.get(); } try { Constructor<? extends ReflectData> constructor = loadClass(classLoader, reflectDataImplementation) .asSubclass(ReflectData.class).getConstructor(); constructor.setAccessible(true); return constructor.newInstance(); } catch (Exception e) { throw new MojoExecutionException(String.format( "Could not load ReflectData custom implementation %s. Make sure that it has a no-args constructor", reflectDataImplementation), e); } } private Class<?> loadClass(ClassLoader classLoader, String className) throws MojoExecutionException { try { return classLoader.loadClass(className); } catch (ClassNotFoundException e) { throw new MojoExecutionException("Failed to load class " + className, e); } } private ClassLoader getClassLoader() throws MojoExecutionException { ClassLoader classLoader; try { List<String> classpathElements = project.getRuntimeClasspathElements(); if (null == classpathElements) { return Thread.currentThread().getContextClassLoader(); } URL[] urls = new URL[classpathElements.size()]; for (int i = 0; i < classpathElements.size(); ++i) { urls[i] = new File(classpathElements.get(i)).toURI().toURL(); } classLoader = new URLClassLoader(urls, getClass().getClassLoader()); } catch (Exception e) { throw new MojoExecutionException("Failed to obtain ClassLoader", e); } return classLoader; } }
6,963
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/ProtocolMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.apache.avro.Protocol; import java.io.File; import java.io.IOException; /** * Generate Java classes and interfaces from Avro protocol files (.avpr) * * @goal protocol * @phase generate-sources * @requiresDependencyResolution runtime * @threadSafe */ public class ProtocolMojo extends AbstractAvroMojo { /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avpr</code> * is used to select grammar files. * * @parameter */ private String[] includes = new String[] { "**/*.avpr" }; /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avpr</code> * is used to select grammar files. * * @parameter */ private String[] testIncludes = new String[] { "**/*.avpr" }; @Override protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException { final File src = new File(sourceDirectory, filename); final Protocol protocol = Protocol.parse(src); doCompile(src, protocol, outputDirectory); } @Override protected String[] getIncludes() { return includes; } @Override protected String[] getTestIncludes() { return testIncludes; } }
6,964
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/SchemaMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.apache.avro.Schema; import org.apache.avro.SchemaParseException; import org.apache.maven.plugin.MojoExecutionException; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.stream.Collectors; /** * Generate Java classes from Avro schema files (.avsc) * * @goal schema * @phase generate-sources * @requiresDependencyResolution runtime+test * @threadSafe */ public class SchemaMojo extends AbstractAvroMojo { /** * A parser used to parse all schema files. Using a common parser will * facilitate the import of external schemas. */ private Schema.Parser schemaParser = new Schema.Parser(); /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avsc</code> * is used to select grammar files. * * @parameter */ private String[] includes = new String[] { "**/*.avsc" }; /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avsc</code> * is used to select grammar files. * * @parameter */ private String[] testIncludes = new String[] { "**/*.avsc" }; @Override protected void doCompile(String[] fileNames, File sourceDirectory, File outputDirectory) throws MojoExecutionException { final List<File> sourceFiles = Arrays.stream(fileNames) .map((String filename) -> new File(sourceDirectory, filename)).collect(Collectors.toList()); final File sourceFileForModificationDetection = sourceFiles.stream().filter(file -> file.lastModified() > 0) .max(Comparator.comparing(File::lastModified)).orElse(null); final List<Schema> schemas; try { // This is necessary to maintain backward-compatibility. If there are // no imported files then isolate the schemas from each other, otherwise // allow them to share a single schema so reuse and sharing of schema // is possible. if (imports == null) { schemas = new Schema.Parser().parse(sourceFiles); } else { schemas = schemaParser.parse(sourceFiles); } doCompile(sourceFileForModificationDetection, schemas, outputDirectory); } catch (IOException | SchemaParseException ex) { throw new MojoExecutionException("Error compiling a file in " + sourceDirectory + " to " + outputDirectory, ex); } } @Override protected String[] getIncludes() { return includes; } @Override protected String[] getTestIncludes() { return testIncludes; } }
6,965
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import java.io.File; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; import org.apache.avro.Protocol; import org.apache.avro.compiler.specific.SpecificCompiler; import org.apache.avro.generic.GenericData; import org.apache.avro.idl.IdlFile; import org.apache.avro.idl.IdlReader; import org.apache.maven.artifact.DependencyResolutionRequiredException; /** * Generate Java classes and interfaces from AvroIDL files (.avdl) * * @goal idl * @requiresDependencyResolution runtime * @phase generate-sources * @threadSafe */ public class IDLMojo extends AbstractAvroMojo { /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avdl</code> * is used to select IDL files. * * @parameter */ private String[] includes = new String[] { "**/*.avdl" }; /** * A set of Ant-like inclusion patterns used to select files from the source * directory for processing. By default, the pattern <code>**&#47;*.avdl</code> * is used to select IDL files. * * @parameter */ private String[] testIncludes = new String[] { "**/*.avdl" }; @Override protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException { try { @SuppressWarnings("rawtypes") List runtimeClasspathElements = project.getRuntimeClasspathElements(); List<URL> runtimeUrls = new ArrayList<>(); // Add the source directory of avro files to the classpath so that // imports can refer to other idl files as classpath resources runtimeUrls.add(sourceDirectory.toURI().toURL()); // If runtimeClasspathElements is not empty values add its values to Idl path. if (runtimeClasspathElements != null && !runtimeClasspathElements.isEmpty()) { for (Object runtimeClasspathElement : runtimeClasspathElements) { String element = (String) runtimeClasspathElement; runtimeUrls.add(new File(element).toURI().toURL()); } } final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); URLClassLoader projPathLoader = new URLClassLoader(runtimeUrls.toArray(new URL[0]), contextClassLoader); Thread.currentThread().setContextClassLoader(projPathLoader); try { IdlReader parser = new IdlReader(); IdlFile idlFile = parser.parse(sourceDirectory.toPath().resolve(filename)); for (String warning : idlFile.getWarnings()) { getLog().warn(warning); } final SpecificCompiler compiler; final Protocol protocol = idlFile.getProtocol(); if (protocol != null) { compiler = new SpecificCompiler(protocol); } else { compiler = new SpecificCompiler(idlFile.getNamedSchemas().values()); } compiler.setStringType(GenericData.StringType.valueOf(stringType)); compiler.setTemplateDir(templateDirectory); compiler.setFieldVisibility(getFieldVisibility()); compiler.setCreateOptionalGetters(createOptionalGetters); compiler.setGettersReturnOptional(gettersReturnOptional); compiler.setOptionalGettersForNullableFieldsOnly(optionalGettersForNullableFieldsOnly); compiler.setCreateSetters(createSetters); compiler.setAdditionalVelocityTools(instantiateAdditionalVelocityTools()); compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); for (String customConversion : customConversions) { compiler.addCustomConversion(projPathLoader.loadClass(customConversion)); } compiler.setOutputCharacterEncoding(project.getProperties().getProperty("project.build.sourceEncoding")); compiler.compileToDestination(null, outputDirectory); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } } catch (ClassNotFoundException | DependencyResolutionRequiredException e) { throw new IOException(e); } } @Override protected String[] getIncludes() { return includes; } @Override protected String[] getTestIncludes() { return testIncludes; } }
6,966
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/AbstractAvroMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; import org.apache.avro.LogicalTypes; import org.apache.avro.Protocol; import org.apache.avro.Schema; import org.apache.avro.compiler.specific.SpecificCompiler; import org.apache.avro.generic.GenericData; import org.apache.maven.artifact.DependencyResolutionRequiredException; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.project.MavenProject; import org.apache.maven.shared.model.fileset.FileSet; import org.apache.maven.shared.model.fileset.util.FileSetManager; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * Base for Avro Compiler Mojos. */ public abstract class AbstractAvroMojo extends AbstractMojo { /** * The source directory of avro files. This directory is added to the classpath * at schema compiling time. All files can therefore be referenced as classpath * resources following the directory structure under the source directory. * * @parameter property="sourceDirectory" * default-value="${basedir}/src/main/avro" */ private File sourceDirectory; /** * @parameter property="outputDirectory" * default-value="${project.build.directory}/generated-sources/avro" */ private File outputDirectory; /** * @parameter property="sourceDirectory" * default-value="${basedir}/src/test/avro" */ private File testSourceDirectory; /** * @parameter property="outputDirectory" * default-value="${project.build.directory}/generated-test-sources/avro" */ private File testOutputDirectory; /** * The field visibility indicator for the fields of the generated class, as * string values of SpecificCompiler.FieldVisibility. The text is case * insensitive. * * @parameter default-value="PRIVATE" */ private String fieldVisibility; /** * A list of files or directories that should be compiled first thus making them * importable by subsequently compiled schemas. Note that imported files should * not reference each other. * * @parameter */ protected String[] imports; /** * A set of Ant-like exclusion patterns used to prevent certain files from being * processed. By default, this set is empty such that no files are excluded. * * @parameter */ protected String[] excludes = new String[0]; /** * A set of Ant-like exclusion patterns used to prevent certain files from being * processed. By default, this set is empty such that no files are excluded. * * @parameter */ protected String[] testExcludes = new String[0]; /** * The Java type to use for Avro strings. May be one of CharSequence, String or * Utf8. CharSequence by default. * * @parameter property="stringType" */ protected String stringType = "CharSequence"; /** * The directory (within the java classpath) that contains the velocity * templates to use for code generation. The default value points to the * templates included with the avro-maven-plugin. * * @parameter property="templateDirectory" */ protected String templateDirectory = "/org/apache/avro/compiler/specific/templates/java/classic/"; /** * The qualified names of classes which the plugin will look up, instantiate * (through an empty constructor that must exist) and set up to be injected into * Velocity templates by Avro compiler. * * @parameter property="velocityToolsClassesNames" */ protected String[] velocityToolsClassesNames = new String[0]; /** * Generated record schema classes will extend this class. * * @parameter property="recordSpecificClass" */ private String recordSpecificClass = "org.apache.avro.specific.SpecificRecordBase"; /** * Generated error schema classes will extend this class. * * @parameter property="errorSpecificClass" */ private String errorSpecificClass = "org.apache.avro.specific.SpecificExceptionBase"; /** * The createOptionalGetters parameter enables generating the getOptional... * methods that return an Optional of the requested type. This works ONLY on * Java 8+ * * @parameter property="createOptionalGetters" */ protected boolean createOptionalGetters = false; /** * The gettersReturnOptional parameter enables generating get... methods that * return an Optional of the requested type. This works ONLY on Java 8+ * * @parameter property="gettersReturnOptional" */ protected boolean gettersReturnOptional = false; /** * The optionalGettersForNullableFieldsOnly parameter works in conjunction with * gettersReturnOptional option. If it is set, Optional getters will be * generated only for fields that are nullable. If the field is mandatory, * regular getter will be generated. This works ONLY on Java 8+. * * @parameter property="optionalGettersForNullableFieldsOnly" */ protected boolean optionalGettersForNullableFieldsOnly = false; /** * Determines whether or not to create setters for the fields of the record. The * default is to create setters. * * @parameter default-value="true" */ protected boolean createSetters; /** * The createNullSafeAnnotations parameter adds JetBrains {@literal @}Nullable * and {@literal @}NotNull annotations for fhe fields of the record. The default * is to not include annotations. * * @parameter property="createNullSafeAnnotations" * * @see <a href= * "https://www.jetbrains.com/help/idea/annotating-source-code.html#nullability-annotations"> * JetBrains nullability annotations</a> */ protected boolean createNullSafeAnnotations = false; /** * A set of fully qualified class names of custom * {@link org.apache.avro.Conversion} implementations to add to the compiler. * The classes must be on the classpath at compile time and whenever the Java * objects are serialized. * * @parameter property="customConversions" */ protected String[] customConversions = new String[0]; /** * A set of fully qualified class names of custom * {@link org.apache.avro.LogicalTypes.LogicalTypeFactory} implementations to * add to the compiler. The classes must be on the classpath at compile time and * whenever the Java objects are serialized. * * @parameter property="customLogicalTypeFactories" */ protected String[] customLogicalTypeFactories = new String[0]; /** * Determines whether or not to use Java classes for decimal types * * @parameter default-value="false" */ protected boolean enableDecimalLogicalType; /** * The current Maven project. * * @parameter default-value="${project}" * @readonly * @required */ protected MavenProject project; @Override public void execute() throws MojoExecutionException { boolean hasSourceDir = null != sourceDirectory && sourceDirectory.isDirectory(); boolean hasImports = null != imports; boolean hasTestDir = null != testSourceDirectory && testSourceDirectory.isDirectory(); if (!hasSourceDir && !hasTestDir) { throw new MojoExecutionException("neither sourceDirectory: " + sourceDirectory + " or testSourceDirectory: " + testSourceDirectory + " are directories"); } if (hasImports) { checkImportPaths(); for (String importedFile : imports) { File file = new File(importedFile); if (file.isDirectory()) { String[] includedFiles = getIncludedFiles(file.getAbsolutePath(), excludes, getIncludes()); getLog().info("Importing Directory: " + file.getAbsolutePath()); getLog().debug("Importing Directory Files: " + Arrays.toString(includedFiles)); compileFiles(includedFiles, file, outputDirectory); } else if (file.isFile()) { getLog().info("Importing File: " + file.getAbsolutePath()); compileFiles(new String[] { file.getName() }, file.getParentFile(), outputDirectory); } } } if (hasSourceDir) { String[] includedFiles = getIncludedFiles(sourceDirectory.getAbsolutePath(), excludes, getIncludes()); compileFiles(includedFiles, sourceDirectory, outputDirectory); } if (hasImports || hasSourceDir) { project.addCompileSourceRoot(outputDirectory.getAbsolutePath()); } if (hasTestDir) { String[] includedFiles = getIncludedFiles(testSourceDirectory.getAbsolutePath(), testExcludes, getTestIncludes()); compileFiles(includedFiles, testSourceDirectory, testOutputDirectory); project.addTestCompileSourceRoot(testOutputDirectory.getAbsolutePath()); } } private void checkImportPaths() throws MojoExecutionException { for (String importedFile : imports) { File file = new File(importedFile); if (!file.exists()) { throw new MojoExecutionException("Path " + file.getAbsolutePath() + " does not exist"); } } } private String[] getIncludedFiles(String absPath, String[] excludes, String[] includes) { final FileSetManager fileSetManager = new FileSetManager(); final FileSet fs = new FileSet(); fs.setDirectory(absPath); fs.setFollowSymlinks(false); // exclude imports directory since it has already been compiled. if (imports != null) { String importExclude = null; for (String importFile : this.imports) { File file = new File(importFile); if (file.isDirectory()) { importExclude = file.getName() + "/**"; } else if (file.isFile()) { importExclude = "**/" + file.getName(); } fs.addExclude(importExclude); } } for (String include : includes) { fs.addInclude(include); } for (String exclude : excludes) { fs.addExclude(exclude); } return fileSetManager.getIncludedFiles(fs); } private void compileFiles(String[] files, File sourceDir, File outDir) throws MojoExecutionException { final ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(createClassLoader()); // Need to register custom logical type factories before schema compilation. try { loadLogicalTypesFactories(); } catch (IOException e) { throw new MojoExecutionException("Error while loading logical types factories ", e); } this.doCompile(files, sourceDir, outDir); } catch (MalformedURLException | DependencyResolutionRequiredException e) { throw new MojoExecutionException("Cannot locate classpath entries", e); } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } } private void loadLogicalTypesFactories() throws IOException, MojoExecutionException { try (URLClassLoader classLoader = createClassLoader()) { for (String factory : customLogicalTypeFactories) { Class<LogicalTypes.LogicalTypeFactory> logicalTypeFactoryClass = (Class<LogicalTypes.LogicalTypeFactory>) classLoader .loadClass(factory); LogicalTypes.LogicalTypeFactory factoryInstance = logicalTypeFactoryClass.getDeclaredConstructor() .newInstance(); LogicalTypes.register(factoryInstance); } } catch (DependencyResolutionRequiredException | ClassNotFoundException e) { throw new IOException(e); } catch (InstantiationException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { throw new MojoExecutionException("Failed to instantiate logical type factory class", e); } } protected SpecificCompiler.FieldVisibility getFieldVisibility() { try { String upper = String.valueOf(this.fieldVisibility).trim().toUpperCase(); return SpecificCompiler.FieldVisibility.valueOf(upper); } catch (IllegalArgumentException e) { return SpecificCompiler.FieldVisibility.PRIVATE; } } protected List<Object> instantiateAdditionalVelocityTools() { final List<Object> velocityTools = new ArrayList<>(velocityToolsClassesNames.length); for (String velocityToolClassName : velocityToolsClassesNames) { try { Class<?> klass = Class.forName(velocityToolClassName); velocityTools.add(klass.getDeclaredConstructor().newInstance()); } catch (Exception e) { throw new RuntimeException(e); } } return velocityTools; } protected void doCompile(String[] files, File sourceDirectory, File outputDirectory) throws MojoExecutionException { for (String filename : files) { try { doCompile(filename, sourceDirectory, outputDirectory); } catch (IOException e) { throw new MojoExecutionException("Error compiling file " + filename + " to " + outputDirectory, e); } } } protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException { throw new UnsupportedOperationException( "Programmer error: AbstractAvroMojo.doCompile(String, java.io.File, java.io.File) called directly"); }; protected void doCompile(File sourceFileForModificationDetection, Collection<Schema> schemas, File outputDirectory) throws IOException { doCompile(sourceFileForModificationDetection, new SpecificCompiler(schemas), outputDirectory); } protected void doCompile(File sourceFileForModificationDetection, Protocol protocol, File outputDirectory) throws IOException { doCompile(sourceFileForModificationDetection, new SpecificCompiler(protocol), outputDirectory); } private void doCompile(File sourceFileForModificationDetection, SpecificCompiler compiler, File outputDirectory) throws IOException { compiler.setTemplateDir(templateDirectory); compiler.setStringType(GenericData.StringType.valueOf(stringType)); compiler.setFieldVisibility(getFieldVisibility()); compiler.setCreateOptionalGetters(createOptionalGetters); compiler.setGettersReturnOptional(gettersReturnOptional); compiler.setOptionalGettersForNullableFieldsOnly(optionalGettersForNullableFieldsOnly); compiler.setCreateSetters(createSetters); compiler.setCreateNullSafeAnnotations(createNullSafeAnnotations); compiler.setEnableDecimalLogicalType(enableDecimalLogicalType); try { for (String customConversion : customConversions) { compiler.addCustomConversion(Thread.currentThread().getContextClassLoader().loadClass(customConversion)); } } catch (ClassNotFoundException e) { throw new IOException(e); } compiler.setOutputCharacterEncoding(project.getProperties().getProperty("project.build.sourceEncoding")); compiler.setAdditionalVelocityTools(instantiateAdditionalVelocityTools()); compiler.setRecordSpecificClass(this.recordSpecificClass); compiler.setErrorSpecificClass(this.errorSpecificClass); compiler.compileToDestination(sourceFileForModificationDetection, outputDirectory); } protected List<URL> findClasspath() throws DependencyResolutionRequiredException, MalformedURLException { final List<URL> urls = appendElements(project.getRuntimeClasspathElements()); urls.addAll(appendElements(project.getTestClasspathElements())); return urls; } protected URLClassLoader createClassLoader() throws DependencyResolutionRequiredException, MalformedURLException { final List<URL> urls = findClasspath(); return new URLClassLoader(urls.toArray(new URL[0]), Thread.currentThread().getContextClassLoader()); } private List<URL> appendElements(List<String> runtimeClasspathElements) throws MalformedURLException { if (runtimeClasspathElements == null) { return new ArrayList<>(); } List<URL> runtimeUrls = new ArrayList<>(runtimeClasspathElements.size()); for (String runtimeClasspathElement : runtimeClasspathElements) { runtimeUrls.add(new File(runtimeClasspathElement).toURI().toURL()); } return runtimeUrls; } protected abstract String[] getIncludes(); protected abstract String[] getTestIncludes(); }
6,967
0
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro
Create_ds/avro/lang/java/maven-plugin/src/main/java/org/apache/avro/mojo/IDLProtocolMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mojo; /** * Generate Java classes and interfaces from AvroIDL files (.avdl) * * @goal idl-protocol * @requiresDependencyResolution runtime * @phase generate-sources * @threadSafe */ public class IDLProtocolMojo extends IDLMojo { // Empty; kept for backwards compatibility. }
6,968
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestGenericJob.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.Schema.Field; import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericData; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.TextInputFormat; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; @SuppressWarnings("deprecation") public class TestGenericJob { @TempDir public File DIR; private static Schema createSchema() { List<Field> fields = new ArrayList<>(); fields.add(new Field("Optional", createArraySchema(), "", new ArrayList<>())); Schema recordSchema = Schema.createRecord("Container", "", "org.apache.avro.mapred", false); recordSchema.setFields(fields); return recordSchema; } private static Schema createArraySchema() { List<Schema> schemas = new ArrayList<>(); for (int i = 0; i < 5; i++) { schemas.add(createInnerSchema("optional_field_" + i)); } Schema unionSchema = Schema.createUnion(schemas); return Schema.createArray(unionSchema); } private static Schema createInnerSchema(String name) { Schema innerrecord = Schema.createRecord(name, "", "", false); innerrecord.setFields(Collections.singletonList(new Field(name, Schema.create(Type.LONG), "", 0L))); return innerrecord; } @BeforeEach public void setup() throws IOException { // needed to satisfy the framework only - input ignored in mapper String dir = DIR.getPath(); File infile = new File(dir + "/in"); RandomAccessFile file = new RandomAccessFile(infile, "rw"); // add some data so framework actually calls our mapper file.writeChars("aa bb cc\ndd ee ff\n"); file.close(); } static class AvroTestConverter extends MapReduceBase implements Mapper<LongWritable, Text, AvroWrapper<Pair<Long, GenericData.Record>>, NullWritable> { public void map(LongWritable key, Text value, OutputCollector<AvroWrapper<Pair<Long, GenericData.Record>>, NullWritable> out, Reporter reporter) throws IOException { GenericData.Record optional_entry = new GenericData.Record(createInnerSchema("optional_field_1")); optional_entry.put("optional_field_1", 0L); GenericData.Array<GenericData.Record> array = new GenericData.Array<>(1, createArraySchema()); array.add(optional_entry); GenericData.Record container = new GenericData.Record(createSchema()); container.put("Optional", array); out.collect(new AvroWrapper<>(new Pair<>(key.get(), container)), NullWritable.get()); } } @Test void job() throws Exception { JobConf job = new JobConf(); Path outputPath = new Path(DIR.getPath() + "/out"); outputPath.getFileSystem(job).delete(outputPath); job.setInputFormat(TextInputFormat.class); FileInputFormat.setInputPaths(job, DIR.getPath() + "/in"); job.setMapperClass(AvroTestConverter.class); job.setNumReduceTasks(0); FileOutputFormat.setOutputPath(job, outputPath); System.out.println(createSchema()); AvroJob.setOutputSchema(job, Pair.getPairSchema(Schema.create(Schema.Type.LONG), createSchema())); job.setOutputFormat(AvroOutputFormat.class); JobClient.runJob(job); } }
6,969
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.File; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroTextSort { @TempDir public File INPUT_DIR; @TempDir public File OUTPUT_DIR; /** * Run the identity job on a "bytes" Avro file using AvroAsTextInputFormat and * AvroTextOutputFormat to produce a sorted "bytes" Avro file. */ @Test void sort() throws Exception { JobConf job = new JobConf(); String inputPath = INPUT_DIR.getPath(); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); WordCountUtil.writeLinesBytesFile(inputPath); job.setInputFormat(AvroAsTextInputFormat.class); job.setOutputFormat(AvroTextOutputFormat.class); job.setOutputKeyClass(Text.class); FileInputFormat.setInputPaths(job, new Path(inputPath)); FileOutputFormat.setOutputPath(job, outputPath); JobClient.runJob(job); WordCountUtil.validateSortedFile(outputPath.toString() + "/part-00000.avro"); } }
6,970
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWeather.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.IOException; import java.io.File; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.Reporter; import org.apache.avro.Schema; import org.apache.avro.Schema.Type; import org.apache.avro.io.DatumReader; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.file.DataFileReader; import static org.apache.avro.file.DataFileConstants.SNAPPY_CODEC; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import test.Weather; /** Tests mapred API with a specific record. */ public class TestWeather { private static final AtomicInteger mapCloseCalls = new AtomicInteger(); private static final AtomicInteger mapConfigureCalls = new AtomicInteger(); private static final AtomicInteger reducerCloseCalls = new AtomicInteger(); private static final AtomicInteger reducerConfigureCalls = new AtomicInteger(); @AfterEach public void tearDown() { mapCloseCalls.set(0); mapConfigureCalls.set(0); reducerCloseCalls.set(0); reducerConfigureCalls.set(0); } /** Uses default mapper with no reduces for a map-only identity job. */ @Test @SuppressWarnings("deprecation") void mapOnly() throws Exception { JobConf job = new JobConf(); String inDir = System.getProperty("share.dir", "../../../share") + "/test/data"; Path input = new Path(inDir + "/weather.avro"); Path output = new Path("target/test/weather-ident"); output.getFileSystem(job).delete(output); job.setJobName("identity map weather"); AvroJob.setInputSchema(job, Weather.SCHEMA$); AvroJob.setOutputSchema(job, Weather.SCHEMA$); FileInputFormat.setInputPaths(job, input); FileOutputFormat.setOutputPath(job, output); FileOutputFormat.setCompressOutput(job, true); job.setNumReduceTasks(0); // map-only JobClient.runJob(job); // check output is correct DatumReader<Weather> reader = new SpecificDatumReader<>(); DataFileReader<Weather> check = new DataFileReader<>(new File(inDir + "/weather.avro"), reader); DataFileReader<Weather> sorted = new DataFileReader<>(new File(output.toString() + "/part-00000.avro"), reader); for (Weather w : sorted) assertEquals(check.next(), w); check.close(); sorted.close(); } // maps input Weather to Pair<Weather,Void>, to sort by Weather public static class SortMapper extends AvroMapper<Weather, Pair<Weather, Void>> { @Override public void map(Weather w, AvroCollector<Pair<Weather, Void>> collector, Reporter reporter) throws IOException { collector.collect(new Pair<>(w, (Void) null)); } @Override public void close() throws IOException { mapCloseCalls.incrementAndGet(); } @Override public void configure(JobConf jobConf) { mapConfigureCalls.incrementAndGet(); } } // output keys only, since values are empty public static class SortReducer extends AvroReducer<Weather, Void, Weather> { @Override public void reduce(Weather w, Iterable<Void> ignore, AvroCollector<Weather> collector, Reporter reporter) throws IOException { collector.collect(w); } @Override public void close() throws IOException { reducerCloseCalls.incrementAndGet(); } @Override public void configure(JobConf jobConf) { reducerConfigureCalls.incrementAndGet(); } } @Test @SuppressWarnings("deprecation") void sort() throws Exception { JobConf job = new JobConf(); String inDir = "../../../share/test/data"; Path input = new Path(inDir + "/weather.avro"); Path output = new Path("target/test/weather-sort"); output.getFileSystem(job).delete(output); job.setJobName("sort weather"); AvroJob.setInputSchema(job, Weather.SCHEMA$); AvroJob.setMapOutputSchema(job, Pair.getPairSchema(Weather.SCHEMA$, Schema.create(Type.NULL))); AvroJob.setOutputSchema(job, Weather.SCHEMA$); AvroJob.setMapperClass(job, SortMapper.class); AvroJob.setReducerClass(job, SortReducer.class); FileInputFormat.setInputPaths(job, input); FileOutputFormat.setOutputPath(job, output); FileOutputFormat.setCompressOutput(job, true); AvroJob.setOutputCodec(job, SNAPPY_CODEC); JobClient.runJob(job); // check output is correct DatumReader<Weather> reader = new SpecificDatumReader<>(); DataFileReader<Weather> check = new DataFileReader<>(new File(inDir + "/weather-sorted.avro"), reader); DataFileReader<Weather> sorted = new DataFileReader<>(new File(output.toString() + "/part-00000.avro"), reader); for (Weather w : sorted) assertEquals(check.next(), w); check.close(); sorted.close(); // check that AvroMapper and AvroReducer get close() and configure() called assertEquals(1, mapCloseCalls.get()); assertEquals(1, reducerCloseCalls.get()); assertEquals(1, mapConfigureCalls.get()); assertEquals(1, reducerConfigureCalls.get()); } }
6,971
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/WordCountUtil.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.File; import java.io.InputStream; import java.io.FileInputStream; import java.io.BufferedInputStream; import java.io.PrintStream; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.StringTokenizer; import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.mapred.JobConf; import org.apache.avro.Schema; import org.apache.avro.util.Utf8; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.file.DataFileStream; public class WordCountUtil { public static final String[] LINES = new String[] { "the quick brown fox jumps over the lazy dog", "the cow jumps over the moon", "the rain in spain falls mainly on the plains" }; public static final Map<String, Long> COUNTS = new TreeMap<>(); static { for (String line : LINES) { StringTokenizer tokens = new StringTokenizer(line); while (tokens.hasMoreTokens()) { String word = tokens.nextToken(); long count = COUNTS.getOrDefault(word, 0L); count++; COUNTS.put(word, count); } } } public static void writeLinesFile(String dir) throws IOException { writeLinesFile(new File(dir)); } public static void writeLinesFile(File dir) throws IOException { DatumWriter<Utf8> writer = new GenericDatumWriter<>(); try (DataFileWriter<Utf8> out = new DataFileWriter<>(writer)) { out.create(Schema.create(Schema.Type.STRING), dir); for (String line : LINES) { out.append(new Utf8(line)); } } } public static void writeLinesBytesFile(String dir) throws IOException { writeLinesBytesFile(new File(dir)); } public static void writeLinesBytesFile(File dir) throws IOException { FileUtil.fullyDelete(dir); File fileLines = new File(dir + "/lines.avro"); fileLines.getParentFile().mkdirs(); DatumWriter<ByteBuffer> writer = new GenericDatumWriter<>(); try (DataFileWriter<ByteBuffer> out = new DataFileWriter<>(writer)) { out.create(Schema.create(Schema.Type.BYTES), fileLines); for (String line : LINES) { out.append(ByteBuffer.wrap(line.getBytes(StandardCharsets.UTF_8))); } } } public static void writeLinesTextFile(File dir) throws IOException { FileUtil.fullyDelete(dir); File fileLines = new File(dir, "lines.avro"); fileLines.getParentFile().mkdirs(); try (PrintStream out = new PrintStream(fileLines)) { for (String line : LINES) { out.println(line); } } } public static void validateCountsFile(File file) throws Exception { int numWords = 0; DatumReader<Pair<Utf8, Long>> reader = new SpecificDatumReader<>(); try (InputStream in = new BufferedInputStream(new FileInputStream(file))) { try (DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>(in, reader)) { for (Pair<Utf8, Long> wc : counts) { assertEquals(COUNTS.get(wc.key().toString()), wc.value(), wc.key().toString()); numWords++; } checkMeta(counts); } } assertEquals(COUNTS.size(), numWords); } public static void validateSortedFile(String file) throws Exception { validateSortedFile(new File(file)); } public static void validateSortedFile(File file) throws Exception { DatumReader<ByteBuffer> reader = new GenericDatumReader<>(); try (InputStream in = new BufferedInputStream(new FileInputStream(file))) { try (DataFileStream<ByteBuffer> lines = new DataFileStream<>(in, reader)) { List<String> sortedLines = new ArrayList<>(Arrays.asList(LINES)); Collections.sort(sortedLines); for (String expectedLine : sortedLines) { ByteBuffer buf = lines.next(); byte[] b = new byte[buf.remaining()]; buf.get(b); assertEquals(expectedLine, new String(b, StandardCharsets.UTF_8).trim()); } assertFalse(lines.hasNext()); } } } // metadata tests private static final String STRING_KEY = "string-key"; private static final String LONG_KEY = "long-key"; private static final String BYTES_KEY = "bytes-key"; private static final String STRING_META_VALUE = "value"; private static final long LONG_META_VALUE = 666; private static final byte[] BYTES_META_VALUE = new byte[] { (byte) 0x00, (byte) 0x80, (byte) 0xff }; public static void setMeta(JobConf job) { AvroJob.setOutputMeta(job, STRING_KEY, STRING_META_VALUE); AvroJob.setOutputMeta(job, LONG_KEY, LONG_META_VALUE); AvroJob.setOutputMeta(job, BYTES_KEY, BYTES_META_VALUE); } public static void checkMeta(DataFileStream<?> in) throws Exception { assertEquals(STRING_META_VALUE, in.getMetaString(STRING_KEY)); assertEquals(LONG_META_VALUE, in.getMetaLong(LONG_KEY)); assertTrue(Arrays.equals(BYTES_META_VALUE, in.getMeta(BYTES_KEY))); } }
6,972
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestSequenceFileReader.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.IOException; import java.io.File; import static org.junit.jupiter.api.Assertions.assertEquals; import java.net.URI; import java.util.Iterator; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.avro.Schema; import org.apache.avro.file.FileReader; import org.apache.avro.file.DataFileReader; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.util.Utf8; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestSequenceFileReader { private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "10")); @TempDir public static File INPUT_DIR; @TempDir public File OUTPUT_DIR; public static File file() { return new File(INPUT_DIR.getPath(), "test.seq"); } private static final Schema SCHEMA = Pair.getPairSchema(Schema.create(Schema.Type.LONG), Schema.create(Schema.Type.STRING)); @BeforeAll public static void testWriteSequenceFile() throws IOException { Configuration c = new Configuration(); URI uri = file().toURI(); try (SequenceFile.Writer writer = new SequenceFile.Writer(FileSystem.get(uri, c), c, new Path(uri.toString()), LongWritable.class, Text.class)) { final LongWritable key = new LongWritable(); final Text val = new Text(); for (int i = 0; i < COUNT; ++i) { key.set(i); val.set(Integer.toString(i)); writer.append(key, val); } } } @Test void readSequenceFile() throws Exception { checkFile(new SequenceFileReader<>(file())); } public void checkFile(FileReader<Pair<Long, CharSequence>> reader) throws Exception { long i = 0; for (Pair<Long, CharSequence> p : reader) { assertEquals((Long) i, p.key()); assertEquals(Long.toString(i), p.value().toString()); i++; } assertEquals(COUNT, i); reader.close(); } @Test void sequenceFileInputFormat() throws Exception { JobConf job = new JobConf(); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); // configure input for Avro from sequence file AvroJob.setInputSequenceFile(job); FileInputFormat.setInputPaths(job, file().toURI().toString()); AvroJob.setInputSchema(job, SCHEMA); // mapper is default, identity // reducer is default, identity // configure output for avro AvroJob.setOutputSchema(job, SCHEMA); FileOutputFormat.setOutputPath(job, outputPath); JobClient.runJob(job); checkFile(new DataFileReader<>(new File(outputPath.toString() + "/part-00000.avro"), new SpecificDatumReader<>())); } private static class NonAvroMapper extends MapReduceBase implements Mapper<LongWritable, Text, AvroKey<Long>, AvroValue<Utf8>> { public void map(LongWritable key, Text value, OutputCollector<AvroKey<Long>, AvroValue<Utf8>> out, Reporter reporter) throws IOException { out.collect(new AvroKey<>(key.get()), new AvroValue<>(new Utf8(value.toString()))); } } @Test void nonAvroMapper() throws Exception { JobConf job = new JobConf(); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); // configure input for non-Avro sequence file job.setInputFormat(SequenceFileInputFormat.class); FileInputFormat.setInputPaths(job, file().toURI().toString()); // use a hadoop mapper that emits Avro output job.setMapperClass(NonAvroMapper.class); // reducer is default, identity // configure output for avro FileOutputFormat.setOutputPath(job, outputPath); AvroJob.setOutputSchema(job, SCHEMA); JobClient.runJob(job); checkFile(new DataFileReader<>(new File(outputPath.toString() + "/part-00000.avro"), new SpecificDatumReader<>())); } private static class NonAvroOnlyMapper extends MapReduceBase implements Mapper<LongWritable, Text, AvroWrapper<Pair<Long, Utf8>>, NullWritable> { public void map(LongWritable key, Text value, OutputCollector<AvroWrapper<Pair<Long, Utf8>>, NullWritable> out, Reporter reporter) throws IOException { out.collect(new AvroWrapper<>(new Pair<>(key.get(), new Utf8(value.toString()))), NullWritable.get()); } } @Test void nonAvroMapOnly() throws Exception { JobConf job = new JobConf(); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); // configure input for non-Avro sequence file job.setInputFormat(SequenceFileInputFormat.class); FileInputFormat.setInputPaths(job, file().toURI().toString()); // use a hadoop mapper that emits Avro output job.setMapperClass(NonAvroOnlyMapper.class); // configure output for avro job.setNumReduceTasks(0); // map-only FileOutputFormat.setOutputPath(job, outputPath); AvroJob.setOutputSchema(job, SCHEMA); JobClient.runJob(job); checkFile(new DataFileReader<>(new File(outputPath.toString() + "/part-00000.avro"), new SpecificDatumReader<>())); } private static class NonAvroReducer extends MapReduceBase implements Reducer<AvroKey<Long>, AvroValue<Utf8>, LongWritable, Text> { public void reduce(AvroKey<Long> key, Iterator<AvroValue<Utf8>> values, OutputCollector<LongWritable, Text> out, Reporter reporter) throws IOException { while (values.hasNext()) { AvroValue<Utf8> value = values.next(); out.collect(new LongWritable(key.datum()), new Text(value.datum().toString())); } } } @Test void nonAvroReducer() throws Exception { JobConf job = new JobConf(); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); // configure input for Avro from sequence file AvroJob.setInputSequenceFile(job); AvroJob.setInputSchema(job, SCHEMA); FileInputFormat.setInputPaths(job, file().toURI().toString()); // mapper is default, identity // use a hadoop reducer that consumes Avro input AvroJob.setMapOutputSchema(job, SCHEMA); job.setReducerClass(NonAvroReducer.class); // configure outputPath for non-Avro SequenceFile job.setOutputFormat(SequenceFileOutputFormat.class); FileOutputFormat.setOutputPath(job, outputPath); // output key/value classes are default, LongWritable/Text JobClient.runJob(job); checkFile(new SequenceFileReader<>(new File(outputPath.toString() + "/part-00000"))); } }
6,973
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroInputFormat.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.mapred; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.JobConf; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.util.HashSet; import java.util.Set; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; public class TestAvroInputFormat { @TempDir public File DIR; private JobConf conf; private FileSystem fs; private Path inputDir; @BeforeEach public void setUp() throws Exception { conf = new JobConf(); fs = FileSystem.getLocal(conf); inputDir = new Path(DIR.getPath()); } @AfterEach public void tearDown() throws Exception { fs.delete(inputDir, true); } @SuppressWarnings("rawtypes") @Test void ignoreFilesWithoutExtension() throws Exception { fs.mkdirs(inputDir); Path avroFile = new Path(inputDir, "somefile.avro"); Path textFile = new Path(inputDir, "someotherfile.txt"); fs.create(avroFile).close(); fs.create(textFile).close(); FileInputFormat.setInputPaths(conf, inputDir); AvroInputFormat inputFormat = new AvroInputFormat(); FileStatus[] statuses = inputFormat.listStatus(conf); assertEquals(1, statuses.length); assertEquals("somefile.avro", statuses[0].getPath().getName()); conf.setBoolean(AvroInputFormat.IGNORE_FILES_WITHOUT_EXTENSION_KEY, false); statuses = inputFormat.listStatus(conf); assertEquals(2, statuses.length); Set<String> names = new HashSet<>(); names.add(statuses[0].getPath().getName()); names.add(statuses[1].getPath().getName()); assertTrue(names.contains("somefile.avro")); assertTrue(names.contains("someotherfile.txt")); } }
6,974
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroOutputFormat.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import org.apache.avro.file.CodecFactory; import org.apache.hadoop.mapred.JobConf; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; public class TestAvroOutputFormat { @Test void setSyncInterval() { JobConf jobConf = new JobConf(); int newSyncInterval = 100000; AvroOutputFormat.setSyncInterval(jobConf, newSyncInterval); assertEquals(newSyncInterval, jobConf.getInt(AvroOutputFormat.SYNC_INTERVAL_KEY, -1)); } @Test void noCodec() { JobConf job = new JobConf(); assertNull(AvroOutputFormat.getCodecFactory(job)); job = new JobConf(); job.set("mapred.output.compress", "false"); job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec"); assertNull(AvroOutputFormat.getCodecFactory(job)); job = new JobConf(); job.set("mapred.output.compress", "false"); job.set(AvroJob.OUTPUT_CODEC, "bzip2"); assertNull(AvroOutputFormat.getCodecFactory(job)); } @Test void bZip2CodecUsingHadoopClass() { CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroBZip2Codec.getClass()); } @Test void bZip2CodecUsingAvroCodec() { CodecFactory avroBZip2Codec = CodecFactory.fromString("bzip2"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set(AvroJob.OUTPUT_CODEC, "bzip2"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroBZip2Codec.getClass()); } @Test void deflateCodecUsingHadoopClass() { CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.DeflateCodec"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroDeflateCodec.getClass()); } @Test void deflateCodecUsingAvroCodec() { CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set(AvroJob.OUTPUT_CODEC, "deflate"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroDeflateCodec.getClass()); } @Test void snappyCodecUsingHadoopClass() { CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.SnappyCodec"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroSnappyCodec.getClass()); } @Test void snappyCodecUsingAvroCodec() { CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set(AvroJob.OUTPUT_CODEC, "snappy"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroSnappyCodec.getClass()); } @Test void gZipCodecUsingHadoopClass() { CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate"); JobConf job = new JobConf(); job.set("mapred.output.compress", "true"); job.set("mapred.output.compression.codec", "org.apache.hadoop.io.compress.GZipCodec"); CodecFactory factory = AvroOutputFormat.getCodecFactory(job); assertNotNull(factory); assertEquals(factory.getClass(), avroDeflateCodec.getClass()); } }
6,975
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestWordCount.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; import java.util.StringTokenizer; import org.apache.avro.Schema; import org.apache.avro.util.Utf8; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestWordCount { @TempDir public static File INPUT_DIR; @TempDir public static File OUTPUT_DIR; public static class MapImpl extends AvroMapper<Utf8, Pair<Utf8, Long>> { @Override public void map(Utf8 text, AvroCollector<Pair<Utf8, Long>> collector, Reporter reporter) throws IOException { StringTokenizer tokens = new StringTokenizer(text.toString()); while (tokens.hasMoreTokens()) collector.collect(new Pair<>(new Utf8(tokens.nextToken()), 1L)); } } public static class ReduceImpl extends AvroReducer<Utf8, Long, Pair<Utf8, Long>> { @Override public void reduce(Utf8 word, Iterable<Long> counts, AvroCollector<Pair<Utf8, Long>> collector, Reporter reporter) throws IOException { long sum = 0; for (long count : counts) sum += count; collector.collect(new Pair<>(word, sum)); } } @Test void runTestsInOrder() throws Exception { String pathOut = OUTPUT_DIR.getPath(); testJob(pathOut); testProjection(pathOut); } @SuppressWarnings("deprecation") public void testJob(String pathOut) throws Exception { JobConf job = new JobConf(); String pathIn = INPUT_DIR.getPath(); WordCountUtil.writeLinesFile(pathIn + "/lines.avro"); Path outputPath = new Path(pathOut); outputPath.getFileSystem(job).delete(outputPath); job.setJobName("wordcount"); AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING)); AvroJob.setOutputSchema(job, new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema()); AvroJob.setMapperClass(job, MapImpl.class); AvroJob.setCombinerClass(job, ReduceImpl.class); AvroJob.setReducerClass(job, ReduceImpl.class); FileInputFormat.setInputPaths(job, new Path(pathIn)); FileOutputFormat.setOutputPath(job, new Path(pathOut)); FileOutputFormat.setCompressOutput(job, true); WordCountUtil.setMeta(job); JobClient.runJob(job); WordCountUtil.validateCountsFile(new File(pathOut, "part-00000.avro")); } @SuppressWarnings("deprecation") public void testProjection(String inputPathString) throws Exception { JobConf job = new JobConf(); Integer defaultRank = -1; String jsonSchema = "{\"type\":\"record\"," + "\"name\":\"org.apache.avro.mapred.Pair\"," + "\"fields\": [ " + "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," + "{\"name\":\"value\", \"type\":\"long\"}" + "]}"; Schema readerSchema = Schema.parse(jsonSchema); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputPathString + "/part-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job); AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<>(job, fileSplit); AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); long sumOfCounts = 0; long numOfCounts = 0; while (recordReader.next(inputPair, ignore)) { assertEquals(inputPair.datum().get(0), defaultRank); sumOfCounts += (Long) inputPair.datum().get(1); numOfCounts++; } assertEquals(numOfCounts, WordCountUtil.COUNTS.size()); long actualSumOfCounts = 0; for (Long count : WordCountUtil.COUNTS.values()) { actualSumOfCounts += count; } assertEquals(sumOfCounts, actualSumOfCounts); } }
6,976
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextOutputFormat.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.File; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.RecordWriter; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroTextOutputFormat { @TempDir public File tmpFolder; @Test void avroTextRecordWriter() throws Exception { File file = new File(tmpFolder.getPath(), "writer"); Schema schema = Schema.create(Schema.Type.BYTES); DatumWriter<ByteBuffer> datumWriter = new GenericDatumWriter<>(schema); DataFileWriter<ByteBuffer> fileWriter = new DataFileWriter<>(datumWriter); fileWriter.create(schema, file); RecordWriter<Object, Object> rw = new AvroTextOutputFormat<>().new AvroTextRecordWriter(fileWriter, "\t".getBytes(StandardCharsets.UTF_8)); rw.write(null, null); rw.write(null, NullWritable.get()); rw.write(NullWritable.get(), null); rw.write(NullWritable.get(), NullWritable.get()); rw.write("k1", null); rw.write("k2", NullWritable.get()); rw.write(null, "v1"); rw.write(NullWritable.get(), "v2"); rw.write("k3", "v3"); rw.write(new Text("k4"), new Text("v4")); rw.close(null); DatumReader<ByteBuffer> reader = new GenericDatumReader<>(); DataFileReader<ByteBuffer> fileReader = new DataFileReader<>(file, reader); assertEquals("k1", asString(fileReader.next())); assertEquals("k2", asString(fileReader.next())); assertEquals("v1", asString(fileReader.next())); assertEquals("v2", asString(fileReader.next())); assertEquals("k3\tv3", asString(fileReader.next())); assertEquals("k4\tv4", asString(fileReader.next())); assertFalse(fileReader.hasNext(), "End"); fileReader.close(); } private String asString(ByteBuffer buf) { byte[] b = new byte[buf.remaining()]; buf.get(b); return new String(b, StandardCharsets.UTF_8); } }
6,977
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroWrapper.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; public class TestAvroWrapper { @Test void testToString() { String datum = "my string"; AvroWrapper<CharSequence> wrapper = new AvroWrapper<>(datum); assertEquals(datum, wrapper.toString()); } }
6,978
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleOutputs.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.File; import java.io.IOException; import java.util.StringTokenizer; import org.apache.avro.Schema; import org.apache.avro.util.Utf8; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroMultipleOutputs { @TempDir public File INPUT_DIR; @TempDir public File OUTPUT_DIR; public static class MapImpl extends AvroMapper<Utf8, Pair<Utf8, Long>> { private AvroMultipleOutputs amos; public void configure(JobConf Job) { this.amos = new AvroMultipleOutputs(Job); } @Override public void map(Utf8 text, AvroCollector<Pair<Utf8, Long>> collector, Reporter reporter) throws IOException { StringTokenizer tokens = new StringTokenizer(text.toString()); while (tokens.hasMoreTokens()) { String tok = tokens.nextToken(); collector.collect(new Pair<>(new Utf8(tok), 1L)); amos.getCollector("myavro2", reporter).collect(new Pair<Utf8, Long>(new Utf8(tok), 1L).toString()); } } public void close() throws IOException { amos.close(); } } public static class ReduceImpl extends AvroReducer<Utf8, Long, Pair<Utf8, Long>> { private AvroMultipleOutputs amos; public void configure(JobConf Job) { amos = new AvroMultipleOutputs(Job); } @Override public void reduce(Utf8 word, Iterable<Long> counts, AvroCollector<Pair<Utf8, Long>> collector, Reporter reporter) throws IOException { long sum = 0; for (long count : counts) sum += count; Pair<Utf8, Long> outputvalue = new Pair<>(word, sum); amos.getCollector("myavro", reporter).collect(outputvalue); amos.collect("myavro1", reporter, outputvalue.toString()); amos.collect("myavro", reporter, new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema(), outputvalue, "testavrofile"); amos.collect("myavro", reporter, Schema.create(Schema.Type.STRING), outputvalue.toString(), "testavrofile1"); collector.collect(new Pair<>(word, sum)); } @Override public void close() throws IOException { amos.close(); } } @Test void runTestsInOrder() throws Exception { String avroPath = OUTPUT_DIR.getPath(); testJob(avroPath); testProjection(avroPath); testProjectionNewMethodsOne(avroPath); testProjectionNewMethodsTwo(avroPath); testProjection1(avroPath); testJobNoreducer(); testProjectionNoreducer(avroPath); } @SuppressWarnings("deprecation") public void testJob(String pathOut) throws Exception { JobConf job = new JobConf(); String pathIn = INPUT_DIR.getPath(); File fileIn = new File(pathIn, "lines.avro"); Path outputPath = new Path(pathOut); outputPath.getFileSystem(job).delete(outputPath); WordCountUtil.writeLinesFile(fileIn); job.setJobName("AvroMultipleOutputs"); AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING)); AvroJob.setOutputSchema(job, new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema()); AvroJob.setMapperClass(job, MapImpl.class); AvroJob.setReducerClass(job, ReduceImpl.class); FileInputFormat.setInputPaths(job, pathIn); FileOutputFormat.setOutputPath(job, outputPath); FileOutputFormat.setCompressOutput(job, false); AvroMultipleOutputs.addNamedOutput(job, "myavro", AvroOutputFormat.class, new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema()); AvroMultipleOutputs.addNamedOutput(job, "myavro1", AvroOutputFormat.class, Schema.create(Schema.Type.STRING)); AvroMultipleOutputs.addNamedOutput(job, "myavro2", AvroOutputFormat.class, Schema.create(Schema.Type.STRING)); WordCountUtil.setMeta(job); JobClient.runJob(job); WordCountUtil.validateCountsFile(new File(outputPath.toString(), "/part-00000.avro")); } @SuppressWarnings("deprecation") public void testProjection(String inputDirectory) throws Exception { JobConf job = new JobConf(); Integer defaultRank = -1; String jsonSchema = "{\"type\":\"record\"," + "\"name\":\"org.apache.avro.mapred.Pair\"," + "\"fields\": [ " + "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," + "{\"name\":\"value\", \"type\":\"long\"}" + "]}"; Schema readerSchema = Schema.parse(jsonSchema); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputDirectory + "/myavro-r-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job); AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<>(job, fileSplit); AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); long sumOfCounts = 0; long numOfCounts = 0; while (recordReader.next(inputPair, ignore)) { assertEquals(inputPair.datum().get(0), defaultRank); sumOfCounts += (Long) inputPair.datum().get(1); numOfCounts++; } assertEquals(numOfCounts, WordCountUtil.COUNTS.size()); long actualSumOfCounts = 0; for (Long count : WordCountUtil.COUNTS.values()) { actualSumOfCounts += count; } assertEquals(sumOfCounts, actualSumOfCounts); } @SuppressWarnings("deprecation") public void testProjectionNewMethodsOne(String inputDirectory) throws Exception { JobConf job = new JobConf(); Integer defaultRank = -1; String jsonSchema = "{\"type\":\"record\"," + "\"name\":\"org.apache.avro.mapred.Pair\"," + "\"fields\": [ " + "{\"name\":\"rank\", \"type\":\"int\", \"default\": -1}," + "{\"name\":\"value\", \"type\":\"long\"}" + "]}"; Schema readerSchema = Schema.parse(jsonSchema); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputDirectory + "/testavrofile-r-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job); AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<>(job, fileSplit); AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); long sumOfCounts = 0; long numOfCounts = 0; while (recordReader.next(inputPair, ignore)) { assertEquals(inputPair.datum().get(0), defaultRank); sumOfCounts += (Long) inputPair.datum().get(1); numOfCounts++; } assertEquals(numOfCounts, WordCountUtil.COUNTS.size()); long actualSumOfCounts = 0; for (Long count : WordCountUtil.COUNTS.values()) { actualSumOfCounts += count; } assertEquals(sumOfCounts, actualSumOfCounts); } @SuppressWarnings("deprecation") // Test for a different schema output public void testProjection1(String inputDirectory) throws Exception { JobConf job = new JobConf(); Schema readerSchema = Schema.create(Schema.Type.STRING); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputDirectory + "/myavro1-r-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job); AvroWrapper<Utf8> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); AvroRecordReader<Utf8> recordReader = new AvroRecordReader<>(job, fileSplit); long sumOfCounts = 0; long numOfCounts = 0; while (recordReader.next(inputPair, ignore)) { sumOfCounts += Long.parseLong(inputPair.datum().toString().split(":")[2].replace("}", "").trim()); numOfCounts++; } assertEquals(numOfCounts, WordCountUtil.COUNTS.size()); long actualSumOfCounts = 0; for (Long count : WordCountUtil.COUNTS.values()) { actualSumOfCounts += count; } assertEquals(sumOfCounts, actualSumOfCounts); } @SuppressWarnings("deprecation") // Test for a different schema output public void testProjectionNewMethodsTwo(String inputDirectory) throws Exception { JobConf job = new JobConf(); Schema readerSchema = Schema.create(Schema.Type.STRING); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputDirectory + "/testavrofile1-r-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job); AvroWrapper<Utf8> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); AvroRecordReader<Utf8> recordReader = new AvroRecordReader<>(job, fileSplit); long sumOfCounts = 0; long numOfCounts = 0; while (recordReader.next(inputPair, ignore)) { sumOfCounts += Long.parseLong(inputPair.datum().toString().split(":")[2].replace("}", "").trim()); numOfCounts++; } assertEquals(numOfCounts, WordCountUtil.COUNTS.size()); long actualSumOfCounts = 0; for (Long count : WordCountUtil.COUNTS.values()) { actualSumOfCounts += count; } assertEquals(sumOfCounts, actualSumOfCounts); } @SuppressWarnings("deprecation") public void testJobNoreducer() throws Exception { JobConf job = new JobConf(); job.setNumReduceTasks(0); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); WordCountUtil.writeLinesFile(new File(INPUT_DIR, "lines.avro")); job.setJobName("AvroMultipleOutputs_noreducer"); AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING)); AvroJob.setOutputSchema(job, new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema()); AvroJob.setMapperClass(job, MapImpl.class); FileInputFormat.setInputPaths(job, new Path(INPUT_DIR.toString())); FileOutputFormat.setOutputPath(job, outputPath); FileOutputFormat.setCompressOutput(job, false); AvroMultipleOutputs.addNamedOutput(job, "myavro2", AvroOutputFormat.class, Schema.create(Schema.Type.STRING)); JobClient.runJob(job); } public void testProjectionNoreducer(String inputDirectory) throws Exception { JobConf job = new JobConf(); long onel = 1; Schema readerSchema = Schema.create(Schema.Type.STRING); AvroJob.setInputSchema(job, readerSchema); Path inputPath = new Path(inputDirectory + "/myavro2-m-00000.avro"); FileStatus fileStatus = FileSystem.get(job).getFileStatus(inputPath); FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), (String[]) null); AvroRecordReader<Utf8> recordReader = new AvroRecordReader<>(job, fileSplit); AvroWrapper<Utf8> inputPair = new AvroWrapper<>(null); NullWritable ignore = NullWritable.get(); while (recordReader.next(inputPair, ignore)) { long testl = Long.parseLong(inputPair.datum().toString().split(":")[2].replace("}", "").trim()); assertEquals(onel, testl); } } }
6,979
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroMultipleInputs.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.IOException; import java.io.File; import java.io.InputStream; import java.io.FileInputStream; import java.io.BufferedInputStream; import static org.junit.jupiter.api.Assertions.assertEquals; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.Reporter; import org.apache.avro.Schema; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.avro.file.DataFileWriter; import org.apache.avro.file.DataFileStream; import org.apache.avro.reflect.ReflectData; import org.apache.avro.reflect.ReflectDatumWriter; import org.apache.avro.reflect.ReflectDatumReader; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroMultipleInputs { @TempDir public File OUTPUT_DIR; @TempDir public File INPUT_DIR_1; @TempDir public File INPUT_DIR_2; /** * The input-1 record. */ public static class NamesRecord { private int id = -1; private CharSequence name = ""; public NamesRecord() { } public NamesRecord(int id, CharSequence name) { this.id = id; this.name = name; } @Override public String toString() { return id + "\t" + name; } } /** * The input-2 record. */ public static class BalancesRecord { private int id = -1; private long balance = 0L; public BalancesRecord() { } public BalancesRecord(int id, long balance) { this.id = id; this.balance = balance; } @Override public String toString() { return id + "\t" + balance; } } /** * The map output key record. */ public static class KeyRecord { private int id = -1; public KeyRecord() { } public KeyRecord(int id) { this.id = id; } @Override public String toString() { return ((Integer) id).toString(); } } /** * The common map output value record. Carries a tag specifying what source * record type was. */ public static class JoinableRecord { private int id = -1; private CharSequence name = ""; private long balance = 0L; private CharSequence recType = ""; public JoinableRecord() { } public JoinableRecord(CharSequence recType, int id, CharSequence name, long balance) { this.id = id; this.recType = recType; this.name = name; this.balance = balance; } @Override public String toString() { return recType.toString(); } } /** * The output, combined record. */ public static class CompleteRecord { private int id = -1; private CharSequence name = ""; private long balance = 0L; public CompleteRecord() { } public CompleteRecord(int id, CharSequence name, long balance) { this.name = name; this.id = id; this.balance = balance; } void setId(int id) { this.id = id; } void setName(CharSequence name) { this.name = name; } void setBalance(long balance) { this.balance = balance; } @Override public String toString() { return id + "\t" + name + "\t" + balance; } } public static class NamesMapImpl extends AvroMapper<NamesRecord, Pair<KeyRecord, JoinableRecord>> { @Override public void map(NamesRecord nameRecord, AvroCollector<Pair<KeyRecord, JoinableRecord>> collector, Reporter reporter) throws IOException { collector.collect(new Pair<>(new KeyRecord(nameRecord.id), new JoinableRecord(nameRecord.getClass().getName(), nameRecord.id, nameRecord.name, -1L))); } } public static class BalancesMapImpl extends AvroMapper<BalancesRecord, Pair<KeyRecord, JoinableRecord>> { @Override public void map(BalancesRecord balanceRecord, AvroCollector<Pair<KeyRecord, JoinableRecord>> collector, Reporter reporter) throws IOException { collector.collect(new Pair<>(new KeyRecord(balanceRecord.id), new JoinableRecord(balanceRecord.getClass().getName(), balanceRecord.id, "", balanceRecord.balance))); } } public static class ReduceImpl extends AvroReducer<KeyRecord, JoinableRecord, CompleteRecord> { @Override public void reduce(KeyRecord ID, Iterable<JoinableRecord> joinables, AvroCollector<CompleteRecord> collector, Reporter reporter) throws IOException { CompleteRecord rec = new CompleteRecord(); for (JoinableRecord joinable : joinables) { rec.setId(joinable.id); if (joinable.recType.toString().contains("NamesRecord")) { rec.setName(joinable.name); } else { rec.setBalance(joinable.balance); } } collector.collect(rec); } } @Test void job() throws Exception { JobConf job = new JobConf(); Path inputPath1 = new Path(INPUT_DIR_1.getPath()); Path inputPath2 = new Path(INPUT_DIR_2.getPath()); Path outputPath = new Path(OUTPUT_DIR.getPath()); outputPath.getFileSystem(job).delete(outputPath, true); writeNamesFiles(new File(inputPath1.toUri().getPath())); writeBalancesFiles(new File(inputPath2.toUri().getPath())); job.setJobName("multiple-inputs-join"); AvroMultipleInputs.addInputPath(job, inputPath1, NamesMapImpl.class, ReflectData.get().getSchema(NamesRecord.class)); AvroMultipleInputs.addInputPath(job, inputPath2, BalancesMapImpl.class, ReflectData.get().getSchema(BalancesRecord.class)); Schema keySchema = ReflectData.get().getSchema(KeyRecord.class); Schema valueSchema = ReflectData.get().getSchema(JoinableRecord.class); AvroJob.setMapOutputSchema(job, Pair.getPairSchema(keySchema, valueSchema)); AvroJob.setOutputSchema(job, ReflectData.get().getSchema(CompleteRecord.class)); AvroJob.setReducerClass(job, ReduceImpl.class); job.setNumReduceTasks(1); FileOutputFormat.setOutputPath(job, outputPath); AvroJob.setReflect(job); JobClient.runJob(job); validateCompleteFile(new File(OUTPUT_DIR, "part-00000.avro")); } /** * Writes a "names.avro" file with five sequential <id, name> pairs. */ private void writeNamesFiles(File dir) throws IOException { DatumWriter<NamesRecord> writer = new ReflectDatumWriter<>(); File namesFile = new File(dir + "/names.avro"); try (DataFileWriter<NamesRecord> out = new DataFileWriter<>(writer)) { out.create(ReflectData.get().getSchema(NamesRecord.class), namesFile); for (int i = 0; i < 5; i++) { out.append(new NamesRecord(i, "record" + i)); } } } /** * Writes a "balances.avro" file with five sequential <id, balance> pairs. */ private void writeBalancesFiles(File dir) throws IOException { DatumWriter<BalancesRecord> writer = new ReflectDatumWriter<>(); File namesFile = new File(dir + "/balances.avro"); try (DataFileWriter<BalancesRecord> out = new DataFileWriter<>(writer)) { out.create(ReflectData.get().getSchema(BalancesRecord.class), namesFile); for (int i = 0; i < 5; i++) { out.append(new BalancesRecord(i, (long) i + 100)); } } } private void validateCompleteFile(File file) throws Exception { DatumReader<CompleteRecord> reader = new ReflectDatumReader<>(); int numRecs = 0; try (InputStream in = new BufferedInputStream(new FileInputStream(file))) { try (DataFileStream<CompleteRecord> records = new DataFileStream<>(in, reader)) { for (CompleteRecord rec : records) { assertEquals(rec.id, numRecs); assertEquals(rec.balance - 100, rec.id); assertEquals(rec.name, "record" + rec.id); numRecs++; } } } assertEquals(5, numRecs); } }
6,980
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestReflectJob.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.io.IOException; import java.io.File; import java.io.InputStream; import java.io.FileInputStream; import java.io.BufferedInputStream; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.StringTokenizer; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.Reporter; import org.apache.avro.io.DatumReader; import org.apache.avro.io.DatumWriter; import org.apache.avro.file.DataFileWriter; import org.apache.avro.file.DataFileStream; import org.apache.avro.reflect.ReflectData; import org.apache.avro.reflect.ReflectDatumWriter; import org.apache.avro.reflect.ReflectDatumReader; import org.junit.jupiter.api.Test; public class TestReflectJob { /** The input class. */ public static class Text { private String text = ""; public Text() { } public Text(String text) { this.text = text; } public String toString() { return text; } } /** The intermediate data class. */ public static class Count { private long count; public Count() { } public Count(long count) { this.count = count; } } /** The output class. */ public static class WordCount { private String word; private long count; public WordCount() { } public WordCount(String word, long count) { this.word = word; this.count = count; } } public static class MapImpl extends AvroMapper<Text, Pair<Text, Count>> { @Override public void map(Text text, AvroCollector<Pair<Text, Count>> collector, Reporter reporter) throws IOException { StringTokenizer tokens = new StringTokenizer(text.toString()); while (tokens.hasMoreTokens()) collector.collect(new Pair<>(new Text(tokens.nextToken()), new Count(1L))); } } public static class ReduceImpl extends AvroReducer<Text, Count, WordCount> { @Override public void reduce(Text word, Iterable<Count> counts, AvroCollector<WordCount> collector, Reporter reporter) throws IOException { long sum = 0; for (Count count : counts) sum += count.count; collector.collect(new WordCount(word.text, sum)); } } @Test @SuppressWarnings("deprecation") void job() throws Exception { JobConf job = new JobConf(); String dir = "target/testReflectJob"; Path inputPath = new Path(dir + "/in"); Path outputPath = new Path(dir + "/out"); outputPath.getFileSystem(job).delete(outputPath); inputPath.getFileSystem(job).delete(inputPath); writeLinesFile(new File(dir + "/in")); job.setJobName("reflect"); AvroJob.setInputSchema(job, ReflectData.get().getSchema(Text.class)); AvroJob.setMapOutputSchema(job, new Pair(new Text(""), new Count(0L)).getSchema()); AvroJob.setOutputSchema(job, ReflectData.get().getSchema(WordCount.class)); AvroJob.setMapperClass(job, MapImpl.class); // AvroJob.setCombinerClass(job, ReduceImpl.class); AvroJob.setReducerClass(job, ReduceImpl.class); FileInputFormat.setInputPaths(job, inputPath); FileOutputFormat.setOutputPath(job, outputPath); AvroJob.setReflect(job); // use reflection JobClient.runJob(job); validateCountsFile(new File(new File(dir, "out"), "part-00000.avro")); } private void writeLinesFile(File dir) throws IOException { DatumWriter<Text> writer = new ReflectDatumWriter<>(); DataFileWriter<Text> out = new DataFileWriter<>(writer); File linesFile = new File(dir + "/lines.avro"); dir.mkdirs(); out.create(ReflectData.get().getSchema(Text.class), linesFile); for (String line : WordCountUtil.LINES) out.append(new Text(line)); out.close(); } private void validateCountsFile(File file) throws Exception { DatumReader<WordCount> reader = new ReflectDatumReader<>(); InputStream in = new BufferedInputStream(new FileInputStream(file)); DataFileStream<WordCount> counts = new DataFileStream<>(in, reader); int numWords = 0; for (WordCount wc : counts) { assertEquals(WordCountUtil.COUNTS.get(wc.word), (Long) wc.count, wc.word); numWords++; } in.close(); assertEquals(WordCountUtil.COUNTS.size(), numWords); } }
6,981
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/TestPair.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred; import java.util.ArrayList; import org.apache.avro.AvroRuntimeException; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; public class TestPair { @Test void collectionFailure() throws Exception { try { new Pair("foo", new ArrayList()); } catch (AvroRuntimeException e) { assertTrue(e.getMessage().startsWith("Cannot infer schema")); return; } fail("Expected an AvroRuntimeException"); } }
6,982
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTaskRunner.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred.tether; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.avro.ipc.SaslSocketServer; import org.apache.avro.ipc.specific.SpecificResponder; import org.apache.avro.ipc.Server; import org.apache.avro.ipc.jetty.HttpServer; /** * Java implementation of a tether executable. Useless except for testing, since * it's already possible to write Java MapReduce programs without tethering. * Also serves as an example of how a framework may be implemented. */ public class TetherTaskRunner implements InputProtocol { static final Logger LOG = LoggerFactory.getLogger(TetherTaskRunner.class); private Server inputServer; private TetherTask task; public TetherTaskRunner(TetherTask task) throws IOException { this.task = task; // determine what protocol we are using String protocol = System.getenv("AVRO_TETHER_PROTOCOL"); if (protocol == null) { throw new RuntimeException("AVRO_TETHER_PROTOCOL env var is null"); } protocol = protocol.trim().toLowerCase(); TetheredProcess.Protocol proto; if (protocol.equals("http")) { LOG.info("Use HTTP protocol"); proto = TetheredProcess.Protocol.HTTP; } else if (protocol.equals("sasl")) { LOG.info("Use SASL protocol"); proto = TetheredProcess.Protocol.SASL; } else { throw new RuntimeException("AVRO_TETHER_PROTOCOL=" + protocol + " but this protocol is unsupported"); } InetSocketAddress iaddress = new InetSocketAddress(0); switch (proto) { case SASL: // start input server this.inputServer = new SaslSocketServer(new SpecificResponder(InputProtocol.class, this), iaddress); LOG.info("Started SaslSocketServer on port:" + iaddress.getPort()); break; case HTTP: this.inputServer = new HttpServer(new SpecificResponder(InputProtocol.class, this), iaddress.getPort()); LOG.info("Started HttpServer on port:" + iaddress.getPort()); break; } inputServer.start(); // open output to parent task.open(inputServer.getPort()); } @Override public void configure(TaskType taskType, String inSchema, String outSchema) { LOG.info("got configure"); task.configure(taskType, inSchema, outSchema); } @Override public synchronized void input(ByteBuffer data, long count) { task.input(data, count); } @Override public void partitions(int partitions) { task.partitions(partitions); } @Override public void abort() { LOG.info("got abort"); close(); } @Override public synchronized void complete() { LOG.info("got input complete"); task.complete(); } /** Wait for task to complete. */ public void join() throws InterruptedException { LOG.info("TetherTaskRunner: Start join."); inputServer.join(); LOG.info("TetherTaskRunner: Finish join."); } private void close() { LOG.info("Closing the task"); task.close(); LOG.info("Finished closing the task."); if (inputServer != null) inputServer.close(); } }
6,983
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/WordCountTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred.tether; import java.io.IOException; import java.util.StringTokenizer; import org.apache.avro.mapred.Pair; import org.apache.avro.util.Utf8; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Example Java tethered mapreduce executable. Implements map and reduce * functions for word count. */ public class WordCountTask extends TetherTask<Utf8, Pair<Utf8, Long>, Pair<Utf8, Long>> { static final Logger LOG = LoggerFactory.getLogger(WordCountTask.class); @Override public void map(Utf8 text, Collector<Pair<Utf8, Long>> collector) throws IOException { StringTokenizer tokens = new StringTokenizer(text.toString()); while (tokens.hasMoreTokens()) collector.collect(new Pair<>(new Utf8(tokens.nextToken()), 1L)); } private long sum; @Override public void reduce(Pair<Utf8, Long> wc, Collector<Pair<Utf8, Long>> c) { sum += wc.value(); } @Override public void reduceFlush(Pair<Utf8, Long> wc, Collector<Pair<Utf8, Long>> c) throws IOException { wc.value(sum); c.collect(wc); sum = 0; } public static void main(String[] args) throws Exception { new TetherTaskRunner(new WordCountTask()).join(); LOG.info("WordCountTask finished"); } }
6,984
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TestWordCountTether.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred.tether; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.apache.avro.file.DataFileStream; import org.apache.avro.io.DatumReader; import org.apache.avro.mapred.AvroJob; import org.apache.avro.mapred.WordCountUtil; import org.apache.avro.mapred.Pair; import org.apache.avro.Schema; import org.apache.avro.util.Utf8; import org.apache.avro.specific.SpecificDatumReader; /** * See also TestTetherTool for an example of how to submit jobs using the * thether tool. */ public class TestWordCountTether { @TempDir public File INPUT_DIR; @TempDir public File OUTPUT_DIR; /** * Run a job using the given transport protocol * * @param proto */ private void _runjob(String proto) throws Exception { String outputPathStr = OUTPUT_DIR.getPath(); File inputPath = new File(INPUT_DIR, "lines.avro"); JobConf job = new JobConf(); Path outputPath = new Path(outputPathStr); outputPath.getFileSystem(job).delete(outputPath, true); // create the input file WordCountUtil.writeLinesFile(inputPath); File exec = new File(System.getProperty("java.home") + "/bin/java"); // create a string of the arguments List<String> execargs = new ArrayList<>(); execargs.add("-classpath"); execargs.add(System.getProperty("java.class.path")); execargs.add("org.apache.avro.mapred.tether.WordCountTask"); FileInputFormat.addInputPaths(job, inputPath.toString()); FileOutputFormat.setOutputPath(job, outputPath); TetherJob.setExecutable(job, exec, execargs, false); Schema outscheme = new Pair<Utf8, Long>(new Utf8(""), 0L).getSchema(); AvroJob.setInputSchema(job, Schema.create(Schema.Type.STRING)); job.set(AvroJob.OUTPUT_SCHEMA, outscheme.toString()); TetherJob.setProtocol(job, proto); TetherJob.runJob(job); // validate the output DatumReader<Pair<Utf8, Long>> reader = new SpecificDatumReader<>(); DataFileStream<Pair<Utf8, Long>> counts = new DataFileStream<>( new BufferedInputStream(new FileInputStream(outputPath + "/part-00000.avro")), reader); int numWords = 0; for (Pair<Utf8, Long> wc : counts) { assertEquals(WordCountUtil.COUNTS.get(wc.key().toString()), wc.value(), wc.key().toString()); numWords++; } counts.close(); assertEquals(WordCountUtil.COUNTS.size(), numWords); } /** * Test the job using the sasl protocol * * @throws Exception */ @Test @SuppressWarnings("deprecation") void job() throws Exception { _runjob("sasl"); } /** * Test the job using the http protocol * * @throws Exception */ @Test @SuppressWarnings("deprecation") void htp() throws Exception { _runjob("http"); } }
6,985
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapred/tether/TetherTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.mapred.tether; import java.io.IOException; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.net.InetSocketAddress; import java.net.URL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.avro.Schema; import org.apache.avro.ipc.HttpTransceiver; import org.apache.avro.ipc.Transceiver; import org.apache.avro.ipc.SaslSocketTransceiver; import org.apache.avro.ipc.specific.SpecificRequestor; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; /** * Base class for Java tether mapreduce programs. Useless except for testing, * since it's already possible to write Java MapReduce programs without * tethering. Also serves as an example of how a framework may be implemented. */ public abstract class TetherTask<IN, MID, OUT> { static final Logger LOG = LoggerFactory.getLogger(TetherTask.class); private Transceiver clientTransceiver; private OutputProtocol outputClient; private TaskType taskType; private int partitions; private DecoderFactory decoderFactory = DecoderFactory.get(); private BinaryDecoder decoder; private SpecificDatumReader<IN> inReader; private SpecificDatumReader<MID> midReader; private IN inRecord; private MID midRecord; private MID midRecordSpare; private Collector<MID> midCollector; private Collector<OUT> outCollector; private static class Buffer extends ByteArrayOutputStream { public ByteBuffer data() { return ByteBuffer.wrap(buf, 0, count); } } /** Collector for map and reduce output values. */ public class Collector<T> { private SpecificDatumWriter<T> writer; private Buffer buffer = new Buffer(); private BinaryEncoder encoder = new EncoderFactory().configureBlockSize(512).binaryEncoder(buffer, null); private Collector(Schema schema) { this.writer = new SpecificDatumWriter<>(schema); } /** Collect a map or reduce output value. */ public void collect(T record) throws IOException { buffer.reset(); writer.write(record, encoder); encoder.flush(); outputClient.output(buffer.data()); } /** Collect a pre-partitioned map output value. */ public void collect(T record, int partition) throws IOException { buffer.reset(); writer.write(record, encoder); encoder.flush(); outputClient.outputPartitioned(partition, buffer.data()); } } void open(int inputPort) throws IOException { // open output client, connecting to parent String clientPortString = System.getenv("AVRO_TETHER_OUTPUT_PORT"); String protocol = System.getenv("AVRO_TETHER_PROTOCOL"); if (clientPortString == null) throw new RuntimeException("AVRO_TETHER_OUTPUT_PORT env var is null"); int clientPort = Integer.parseInt(clientPortString); if (protocol == null) { throw new RuntimeException("AVRO_TETHER_PROTOCOL env var is null"); } protocol = protocol.trim().toLowerCase(); TetheredProcess.Protocol proto; if (protocol.equals("http")) { proto = TetheredProcess.Protocol.HTTP; } else if (protocol.equals("sasl")) { proto = TetheredProcess.Protocol.SASL; } else { throw new RuntimeException("AVROT_TETHER_PROTOCOL=" + protocol + " but this protocol is unsupported"); } switch (proto) { case SASL: this.clientTransceiver = new SaslSocketTransceiver(new InetSocketAddress(clientPort)); this.outputClient = SpecificRequestor.getClient(OutputProtocol.class, clientTransceiver); break; case HTTP: this.clientTransceiver = new HttpTransceiver(new URL("http://127.0.0.1:" + clientPort)); this.outputClient = SpecificRequestor.getClient(OutputProtocol.class, clientTransceiver); break; } // send inputPort to parent outputClient.configure(inputPort); } void configure(TaskType taskType, CharSequence inSchemaText, CharSequence outSchemaText) { this.taskType = taskType; try { Schema inSchema = new Schema.Parser().parse(inSchemaText.toString()); Schema outSchema = new Schema.Parser().parse(outSchemaText.toString()); switch (taskType) { case MAP: this.inReader = new SpecificDatumReader<>(inSchema); this.midCollector = new Collector<>(outSchema); break; case REDUCE: this.midReader = new SpecificDatumReader<>(inSchema); this.outCollector = new Collector<>(outSchema); break; } } catch (Throwable e) { fail(e.toString()); } } void partitions(int partitions) { this.partitions = partitions; } /** Return the number of map output partitions of this job. */ public int partitions() { return partitions; } void input(ByteBuffer data, long count) { try { decoder = decoderFactory.binaryDecoder(data.array(), decoder); for (long i = 0; i < count; i++) { switch (taskType) { case MAP: inRecord = inReader.read(inRecord, decoder); map(inRecord, midCollector); break; case REDUCE: MID prev = midRecord; midRecord = midReader.read(midRecordSpare, decoder); if (prev != null && !midRecord.equals(prev)) reduceFlush(prev, outCollector); reduce(midRecord, outCollector); midRecordSpare = prev; break; } } } catch (Throwable e) { LOG.warn("failing: " + e, e); fail(e.toString()); } } void complete() { if (taskType == TaskType.REDUCE && midRecord != null) try { reduceFlush(midRecord, outCollector); } catch (Throwable e) { LOG.warn("failing: " + e, e); fail(e.toString()); } LOG.info("TetherTask: Sending complete to parent process."); outputClient.complete(); LOG.info("TetherTask: Done sending complete to parent process."); } /** Called with input values to generate intermediate values. */ public abstract void map(IN record, Collector<MID> collector) throws IOException; /** Called with sorted intermediate values. */ public abstract void reduce(MID record, Collector<OUT> collector) throws IOException; /** Called with the last intermediate value in each equivalence run. */ public abstract void reduceFlush(MID record, Collector<OUT> collector) throws IOException; /** Call to update task status. */ public void status(String message) { outputClient.status(message); } /** Call to increment a counter. */ public void count(String group, String name, long amount) { outputClient.count(group, name, amount); } /** Call to fail the task. */ public void fail(String message) { outputClient.fail(message); close(); } void close() { LOG.info("Closing the transceiver"); if (clientTransceiver != null) try { clientTransceiver.close(); } catch (IOException e) { } // ignore } }
6,986
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/util/TestAvroCharSequenceComparator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.util; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.apache.avro.util.Utf8; public class TestAvroCharSequenceComparator { private AvroCharSequenceComparator<CharSequence> mComparator; @BeforeEach public void setup() { mComparator = new AvroCharSequenceComparator<>(); } @Test void compareString() { assertEquals(0, mComparator.compare("", "")); assertThat(mComparator.compare("", "a"), lessThan(0)); assertThat(mComparator.compare("a", ""), greaterThan(0)); assertEquals(0, mComparator.compare("a", "a")); assertThat(mComparator.compare("a", "b"), lessThan(0)); assertThat(mComparator.compare("b", "a"), greaterThan(0)); assertEquals(0, mComparator.compare("ab", "ab")); assertThat(mComparator.compare("a", "aa"), lessThan(0)); assertThat(mComparator.compare("aa", "a"), greaterThan(0)); assertThat(mComparator.compare("abc", "abcdef"), lessThan(0)); assertThat(mComparator.compare("abcdef", "abc"), greaterThan(0)); } @Test void compareUtf8() { assertEquals(0, mComparator.compare(new Utf8(""), new Utf8(""))); assertThat(mComparator.compare(new Utf8(""), new Utf8("a")), lessThan(0)); assertThat(mComparator.compare(new Utf8("a"), new Utf8("")), greaterThan(0)); assertEquals(0, mComparator.compare(new Utf8("a"), new Utf8("a"))); assertThat(mComparator.compare(new Utf8("a"), new Utf8("b")), lessThan(0)); assertThat(mComparator.compare(new Utf8("b"), new Utf8("a")), greaterThan(0)); assertEquals(0, mComparator.compare(new Utf8("ab"), new Utf8("ab"))); assertThat(mComparator.compare(new Utf8("a"), new Utf8("aa")), lessThan(0)); assertThat(mComparator.compare(new Utf8("aa"), new Utf8("a")), greaterThan(0)); assertThat(mComparator.compare(new Utf8("abc"), new Utf8("abcdef")), lessThan(0)); assertThat(mComparator.compare(new Utf8("abcdef"), new Utf8("abc")), greaterThan(0)); } @Test void compareUtf8ToString() { assertEquals(0, mComparator.compare(new Utf8(""), "")); assertThat(mComparator.compare(new Utf8(""), "a"), lessThan(0)); assertThat(mComparator.compare(new Utf8("a"), ""), greaterThan(0)); assertEquals(0, mComparator.compare(new Utf8("a"), "a")); assertThat(mComparator.compare(new Utf8("a"), "b"), lessThan(0)); assertThat(mComparator.compare(new Utf8("b"), "a"), greaterThan(0)); assertEquals(0, mComparator.compare(new Utf8("ab"), "ab")); assertThat(mComparator.compare(new Utf8("a"), "aa"), lessThan(0)); assertThat(mComparator.compare(new Utf8("aa"), "a"), greaterThan(0)); assertThat(mComparator.compare(new Utf8("abc"), "abcdef"), lessThan(0)); assertThat(mComparator.compare(new Utf8("abcdef"), "abc"), greaterThan(0)); } }
6,987
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestHadoopCodecFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avro.hadoop.file; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.avro.file.CodecFactory; import org.junit.jupiter.api.Test; public class TestHadoopCodecFactory { @Test void hadoopCodecFactoryDeflate() { CodecFactory hadoopDeflateCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.DeflateCodec"); CodecFactory avroDeflateCodec = CodecFactory.fromString("deflate"); assertEquals(hadoopDeflateCodec.getClass(), avroDeflateCodec.getClass()); } @Test void hadoopCodecFactorySnappy() { CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.SnappyCodec"); CodecFactory avroSnappyCodec = CodecFactory.fromString("snappy"); assertEquals(hadoopSnappyCodec.getClass(), avroSnappyCodec.getClass()); } @Test void hadoopCodecFactoryBZip2() { CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.BZip2Codec"); CodecFactory avroSnappyCodec = CodecFactory.fromString("bzip2"); assertEquals(hadoopSnappyCodec.getClass(), avroSnappyCodec.getClass()); } @Test void hadoopCodecFactoryGZip() { CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.GZipCodec"); CodecFactory avroSnappyCodec = CodecFactory.fromString("deflate"); assertEquals(hadoopSnappyCodec.getClass(), avroSnappyCodec.getClass()); } @Test void hadoopCodecFactoryFail() { CodecFactory hadoopSnappyCodec = HadoopCodecFactory.fromHadoopString("org.apache.hadoop.io.compress.FooCodec"); assertNull(hadoopSnappyCodec); } }
6,988
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/file/TestSortedKeyValueFile.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.file; import static org.junit.jupiter.api.Assertions.*; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.avro.AvroRuntimeException; import org.apache.avro.Schema; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.reflect.ReflectData; import org.apache.avro.specific.SpecificData; import org.apache.avro.hadoop.io.AvroKeyValue; import org.apache.avro.mapred.FsInput; import org.apache.avro.io.DatumReader; import org.apache.avro.file.CodecFactory; import org.apache.avro.file.FileReader; import org.apache.avro.file.DataFileReader; import org.apache.avro.util.Utf8; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TestSortedKeyValueFile { private static final Logger LOG = LoggerFactory.getLogger(TestSortedKeyValueFile.class); @TempDir public File mTempDir; @Test void writeOutOfSortedOrder() throws IOException { assertThrows(IllegalArgumentException.class, () -> { LOG.debug("Writing some records to a SortedKeyValueFile..."); Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try (SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options)) { Utf8 key = new Utf8(); // re-use key, to test copied writer.append(key.set("banana"), "Banana"); writer.append(key.set("apple"), "Apple"); // Ruh, roh! } }); } @Test void namedCodecs() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options().withKeySchema(key) .withValueSchema(value).withConfiguration(conf).withPath(myfile); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer; for (String codec : new String[] { "null", "deflate", "snappy", "bzip2" }) { LOG.debug("Using " + codec + "codec for a SortedKeyValueFile..."); options.withCodec(codec); writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>(new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals(codec, reader.getMetaString("avro.codec")); reader.close(); } } @Test void deflateClassCodec() throws IOException { Configuration conf = new Configuration(); Path myfile = new Path(mTempDir.getPath(), "myfile"); Schema key = Schema.create(Schema.Type.STRING); Schema value = Schema.create(Schema.Type.STRING); Schema recordSchema = AvroKeyValue.getSchema(key, value); DatumReader<GenericRecord> datumReader = SpecificData.get().createDatumReader(recordSchema); DataFileReader<GenericRecord> reader; LOG.debug("Using CodecFactory.deflateCodec() for a SortedKeyValueFile..."); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options().withKeySchema(key) .withValueSchema(value).withConfiguration(conf).withPath(myfile).withCodec(CodecFactory.deflateCodec(9)); SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options); writer.close(); reader = new DataFileReader<>(new FsInput(new Path(myfile, SortedKeyValueFile.DATA_FILENAME), conf), datumReader); assertEquals("deflate", reader.getMetaString("avro.codec")); reader.close(); } @Test void badCodec() throws IOException { LOG.debug("Using a bad codec for a SortedKeyValueFile..."); try { new SortedKeyValueFile.Writer.Options().withCodec("foobar"); } catch (AvroRuntimeException e) { assertEquals("Unrecognized codec: foobar", e.getMessage()); } } @Test void writer() throws IOException { LOG.debug("Writing some records to a SortedKeyValueFile..."); Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try (SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(options)) { writer.append("apple", "Apple"); // Will be indexed. writer.append("banana", "Banana"); writer.append("carrot", "Carrot"); // Will be indexed. writer.append("durian", "Durian"); } LOG.debug("Checking the generated directory..."); File directory = new File(mTempDir.getPath(), "myfile"); assertTrue(directory.exists()); LOG.debug("Checking the generated index file..."); File indexFile = new File(directory, SortedKeyValueFile.INDEX_FILENAME); DatumReader<GenericRecord> indexReader = new GenericDatumReader<>( AvroKeyValue.getSchema(options.getKeySchema(), Schema.create(Schema.Type.LONG))); List<AvroKeyValue<CharSequence, Long>> indexRecords = new ArrayList<>(); try (FileReader<GenericRecord> indexFileReader = DataFileReader.openReader(indexFile, indexReader)) { for (GenericRecord indexRecord : indexFileReader) { indexRecords.add(new AvroKeyValue<>(indexRecord)); } } assertEquals(2, indexRecords.size()); assertEquals("apple", indexRecords.get(0).getKey().toString()); LOG.debug("apple's position in the file: " + indexRecords.get(0).getValue()); assertEquals("carrot", indexRecords.get(1).getKey().toString()); LOG.debug("carrot's position in the file: " + indexRecords.get(1).getValue()); LOG.debug("Checking the generated data file..."); File dataFile = new File(directory, SortedKeyValueFile.DATA_FILENAME); DatumReader<GenericRecord> dataReader = new GenericDatumReader<>( AvroKeyValue.getSchema(options.getKeySchema(), options.getValueSchema())); try (DataFileReader<GenericRecord> dataFileReader = new DataFileReader<>(dataFile, dataReader)) { dataFileReader.seek(indexRecords.get(0).getValue()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> appleRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("apple", appleRecord.getKey().toString()); assertEquals("Apple", appleRecord.getValue().toString()); dataFileReader.seek(indexRecords.get(1).getValue()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> carrotRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("carrot", carrotRecord.getKey().toString()); assertEquals("Carrot", carrotRecord.getValue().toString()); assertTrue(dataFileReader.hasNext()); AvroKeyValue<CharSequence, CharSequence> durianRecord = new AvroKeyValue<>(dataFileReader.next()); assertEquals("durian", durianRecord.getKey().toString()); assertEquals("Durian", durianRecord.getValue().toString()); } } @Test void reader() throws IOException { Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options writerOptions = new SortedKeyValueFile.Writer.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")).withIndexInterval(2); // Index // every // other // record. try ( SortedKeyValueFile.Writer<CharSequence, CharSequence> writer = new SortedKeyValueFile.Writer<>(writerOptions)) { writer.append("apple", "Apple"); // Will be indexed. writer.append("banana", "Banana"); writer.append("carrot", "Carrot"); // Will be indexed. writer.append("durian", "Durian"); } LOG.debug("Reading the file back using a reader..."); SortedKeyValueFile.Reader.Options readerOptions = new SortedKeyValueFile.Reader.Options() .withKeySchema(Schema.create(Schema.Type.STRING)).withValueSchema(Schema.create(Schema.Type.STRING)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "myfile")); try ( SortedKeyValueFile.Reader<CharSequence, CharSequence> reader = new SortedKeyValueFile.Reader<>(readerOptions)) { assertEquals("Carrot", reader.get("carrot").toString()); assertEquals("Banana", reader.get("banana").toString()); assertNull(reader.get("a-vegetable")); assertNull(reader.get("beet")); assertNull(reader.get("zzz")); } } public static class Stringy implements Comparable<Stringy> { private String s; public Stringy() { }; public Stringy(String s) { this.s = s; } @Override public String toString() { return s; } @Override public int hashCode() { return s.hashCode(); } @Override public boolean equals(Object that) { return this.s.equals(that.toString()); } @Override public int compareTo(Stringy that) { return this.s.compareTo(that.s); } } @Test void alternateModel() throws Exception { LOG.debug("Writing some reflect records..."); ReflectData model = ReflectData.get(); Configuration conf = new Configuration(); SortedKeyValueFile.Writer.Options options = new SortedKeyValueFile.Writer.Options() .withKeySchema(model.getSchema(Stringy.class)).withValueSchema(model.getSchema(Stringy.class)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "reflect")).withDataModel(model) .withIndexInterval(2); try (SortedKeyValueFile.Writer<Stringy, Stringy> writer = new SortedKeyValueFile.Writer<>(options)) { writer.append(new Stringy("apple"), new Stringy("Apple")); writer.append(new Stringy("banana"), new Stringy("Banana")); writer.append(new Stringy("carrot"), new Stringy("Carrot")); writer.append(new Stringy("durian"), new Stringy("Durian")); } LOG.debug("Reading the file back using a reader..."); SortedKeyValueFile.Reader.Options readerOptions = new SortedKeyValueFile.Reader.Options() .withKeySchema(model.getSchema(Stringy.class)).withValueSchema(model.getSchema(Stringy.class)) .withConfiguration(conf).withPath(new Path(mTempDir.getPath(), "reflect")).withDataModel(model); try (SortedKeyValueFile.Reader<Stringy, Stringy> reader = new SortedKeyValueFile.Reader<>(readerOptions)) { assertEquals(new Stringy("Carrot"), reader.get(new Stringy("carrot"))); assertEquals(new Stringy("Banana"), reader.get(new Stringy("banana"))); assertNull(reader.get(new Stringy("a-vegetable"))); assertNull(reader.get(new Stringy("beet"))); assertNull(reader.get(new Stringy("zzz"))); } } }
6,989
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerialization.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.reflect.ReflectData; import org.apache.avro.reflect.ReflectDatumReader; import org.apache.avro.util.Utf8; import org.apache.avro.generic.GenericData; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapred.AvroValue; import org.apache.avro.mapred.AvroWrapper; import org.apache.avro.mapreduce.AvroJob; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.Serializer; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; import org.junit.jupiter.api.Test; public class TestAvroSerialization { @Test void accept() { AvroSerialization<CharSequence> serialization = new AvroSerialization<>(); assertTrue(serialization.accept(AvroKey.class)); assertTrue(serialization.accept(AvroValue.class)); assertFalse(serialization.accept(AvroWrapper.class)); assertFalse(serialization.accept(String.class)); } @Test void getSerializerForKey() throws IOException { // Set the writer schema in the job configuration. Schema writerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputKeySchema(job, writerSchema); // Get a serializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Serializer<AvroWrapper> serializer = serialization.getSerializer(AvroKey.class); assertTrue(serializer instanceof AvroSerializer); AvroSerializer avroSerializer = (AvroSerializer) serializer; // Check that the writer schema is set correctly on the serializer. assertEquals(writerSchema, avroSerializer.getWriterSchema()); } @Test void getSerializerForValue() throws IOException { // Set the writer schema in the job configuration. Schema writerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputValueSchema(job, writerSchema); // Get a serializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Serializer<AvroWrapper> serializer = serialization.getSerializer(AvroValue.class); assertTrue(serializer instanceof AvroSerializer); AvroSerializer avroSerializer = (AvroSerializer) serializer; // Check that the writer schema is set correctly on the serializer. assertEquals(writerSchema, avroSerializer.getWriterSchema()); } @Test void getDeserializerForKey() throws IOException { // Set the reader schema in the job configuration. Schema readerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputKeySchema(job, readerSchema); // Get a deserializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Deserializer<AvroWrapper> deserializer = serialization.getDeserializer(AvroKey.class); assertTrue(deserializer instanceof AvroKeyDeserializer); AvroKeyDeserializer avroDeserializer = (AvroKeyDeserializer) deserializer; // Check that the reader schema is set correctly on the deserializer. assertEquals(readerSchema, avroDeserializer.getReaderSchema()); } @Test void getDeserializerForValue() throws IOException { // Set the reader schema in the job configuration. Schema readerSchema = Schema.create(Schema.Type.STRING); Job job = Job.getInstance(); AvroJob.setMapOutputValueSchema(job, readerSchema); // Get a deserializer from the configuration. AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); @SuppressWarnings("unchecked") Deserializer<AvroWrapper> deserializer = serialization.getDeserializer(AvroValue.class); assertTrue(deserializer instanceof AvroValueDeserializer); AvroValueDeserializer avroDeserializer = (AvroValueDeserializer) deserializer; // Check that the reader schema is set correctly on the deserializer. assertEquals(readerSchema, avroDeserializer.getReaderSchema()); } @Test void classPath() throws Exception { Configuration conf = new Configuration(); ClassLoader loader = conf.getClass().getClassLoader(); AvroSerialization serialization = new AvroSerialization(); serialization.setConf(conf); AvroDeserializer des = (AvroDeserializer) serialization.getDeserializer(AvroKey.class); ReflectData data = (ReflectData) ((ReflectDatumReader) des.mAvroDatumReader).getData(); assertEquals(loader, data.getClassLoader()); } private <T, O> O roundTrip(Schema schema, T data, Class<? extends GenericData> modelClass) throws IOException { Job job = Job.getInstance(); AvroJob.setMapOutputKeySchema(job, schema); if (modelClass != null) AvroJob.setDataModelClass(job, modelClass); AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration()); Serializer<AvroKey<T>> serializer = serialization.getSerializer(AvroKey.class); Deserializer<AvroKey<O>> deserializer = serialization.getDeserializer(AvroKey.class); ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.open(baos); serializer.serialize(new AvroKey<>(data)); serializer.close(); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); deserializer.open(bais); AvroKey<O> result = null; result = deserializer.deserialize(result); deserializer.close(); return result.datum(); } @Test void roundTrip() throws Exception { Schema schema = Schema.create(Schema.Type.STRING); assertTrue(roundTrip(schema, "record", null) instanceof String); assertTrue(roundTrip(schema, "record", GenericData.class) instanceof Utf8); } }
6,990
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSequenceFile.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.*; import java.io.File; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapred.AvroValue; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroSequenceFile { // Disable checkstyle for this variable. It must be public to work with JUnit // @Rule. // CHECKSTYLE:OFF @TempDir public File mTempDir; // CHECKSTYLE:ON /** Tests that reading and writing avro data works. */ @Test @SuppressWarnings("unchecked") void readAvro() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class, Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT), new AvroKey<CharSequence>("one"), new AvroValue<>(1), new AvroKey<CharSequence>("two"), new AvroValue<>(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withKeySchema(Schema.create(Schema.Type.STRING)) .withValueSchema(Schema.create(Schema.Type.INT)).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { AvroKey<CharSequence> key = new AvroKey<>(); AvroValue<Integer> value = new AvroValue<>(); // Read the first record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("one", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.datum().intValue()); // Read the second record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("two", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.datum().intValue()); assertNull(reader.next(key), "Should be no more records."); } } /** * Tests that reading and writing avro records without a reader schema works. */ @Test @SuppressWarnings("unchecked") void readAvroWithoutReaderSchemas() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, AvroKey.class, AvroValue.class, Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT), new AvroKey<CharSequence>("one"), new AvroValue<>(1), new AvroKey<CharSequence>("two"), new AvroValue<>(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { AvroKey<CharSequence> key = new AvroKey<>(); AvroValue<Integer> value = new AvroValue<>(); // Read the first record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("one", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.datum().intValue()); // Read the second record. key = (AvroKey<CharSequence>) reader.next(key); assertNotNull(key); assertEquals("two", key.datum().toString()); value = (AvroValue<Integer>) reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.datum().intValue()); assertNull(reader.next(key), "Should be no more records."); } } /** Tests that reading and writing ordinary Writables still works. */ @Test void readWritables() throws IOException { Path sequenceFilePath = new Path(new File(mTempDir, "output.seq").getPath()); writeSequenceFile(sequenceFilePath, Text.class, IntWritable.class, null, null, new Text("one"), new IntWritable(1), new Text("two"), new IntWritable(2)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Reader.Options options = new AvroSequenceFile.Reader.Options().withFileSystem(fs) .withInputPath(sequenceFilePath).withConfiguration(conf); try (SequenceFile.Reader reader = new AvroSequenceFile.Reader(options)) { Text key = new Text(); IntWritable value = new IntWritable(); // Read the first record. assertTrue(reader.next(key)); assertEquals("one", key.toString()); reader.getCurrentValue(value); assertNotNull(value); assertEquals(1, value.get()); // Read the second record. assertTrue(reader.next(key)); assertEquals("two", key.toString()); reader.getCurrentValue(value); assertNotNull(value); assertEquals(2, value.get()); assertFalse(reader.next(key), "Should be no more records."); } } /** * Writes a sequence file of records. * * @param file The target file path. * @param keySchema The schema of the key if using Avro, else null. * @param valueSchema The schema of the value if using Avro, else null. * @param records <i>key1</i>, <i>value1</i>, <i>key2</i>, <i>value2</i>, * ... */ private void writeSequenceFile(Path file, Class<?> keyClass, Class<?> valueClass, Schema keySchema, Schema valueSchema, Object... records) throws IOException { // Make sure the key/value records have an even size. if (0 != records.length % 2) { throw new IllegalArgumentException("Expected a value for each key record."); } // Open a AvroSequenceFile writer. Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); AvroSequenceFile.Writer.Options options = new AvroSequenceFile.Writer.Options().withFileSystem(fs) .withConfiguration(conf).withOutputPath(file); if (null != keySchema) { options.withKeySchema(keySchema); } else { options.withKeyClass(keyClass); } if (null != valueSchema) { options.withValueSchema(valueSchema); } else { options.withValueClass(valueClass); } try (SequenceFile.Writer writer = new AvroSequenceFile.Writer(options)) { // Write some records. for (int i = 0; i < records.length; i += 2) { writer.append(records[i], records[i + 1]); } } } }
6,991
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroDatumConverterFactory.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.avro.Schema; import org.apache.avro.generic.GenericFixed; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapred.AvroValue; import org.apache.avro.mapreduce.AvroJob; import org.apache.hadoop.io.BooleanWritable; import org.apache.hadoop.io.ByteWritable; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.DoubleWritable; import org.apache.hadoop.io.FloatWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; public class TestAvroDatumConverterFactory { private Job mJob; private AvroDatumConverterFactory mFactory; @BeforeEach public void setup() throws IOException { mJob = Job.getInstance(); mFactory = new AvroDatumConverterFactory(mJob.getConfiguration()); } @Test void convertAvroKey() throws IOException { AvroJob.setOutputKeySchema(mJob, Schema.create(Schema.Type.STRING)); AvroKey<CharSequence> avroKey = new AvroKey<>("foo"); @SuppressWarnings("unchecked") AvroDatumConverter<AvroKey<CharSequence>, ?> converter = mFactory .create((Class<AvroKey<CharSequence>>) avroKey.getClass()); assertEquals("foo", converter.convert(avroKey).toString()); } @Test void convertAvroValue() throws IOException { AvroJob.setOutputValueSchema(mJob, Schema.create(Schema.Type.INT)); AvroValue<Integer> avroValue = new AvroValue<>(42); @SuppressWarnings("unchecked") AvroDatumConverter<AvroValue<Integer>, Integer> converter = mFactory .create((Class<AvroValue<Integer>>) avroValue.getClass()); assertEquals(42, converter.convert(avroValue).intValue()); } @Test void convertBooleanWritable() { AvroDatumConverter<BooleanWritable, Boolean> converter = mFactory.create(BooleanWritable.class); assertEquals(true, converter.convert(new BooleanWritable(true))); } @Test void convertBytesWritable() { AvroDatumConverter<BytesWritable, ByteBuffer> converter = mFactory.create(BytesWritable.class); ByteBuffer bytes = converter.convert(new BytesWritable(new byte[] { 1, 2, 3 })); assertEquals(1, bytes.get(0)); assertEquals(2, bytes.get(1)); assertEquals(3, bytes.get(2)); } @Test void convertByteWritable() { AvroDatumConverter<ByteWritable, GenericFixed> converter = mFactory.create(ByteWritable.class); assertEquals(42, converter.convert(new ByteWritable((byte) 42)).bytes()[0]); } @Test void convertDoubleWritable() { AvroDatumConverter<DoubleWritable, Double> converter = mFactory.create(DoubleWritable.class); assertEquals(2.0, converter.convert(new DoubleWritable(2.0)), 0.00001); } @Test void convertFloatWritable() { AvroDatumConverter<FloatWritable, Float> converter = mFactory.create(FloatWritable.class); assertEquals(2.2f, converter.convert(new FloatWritable(2.2f)), 0.00001); } @Test void convertIntWritable() { AvroDatumConverter<IntWritable, Integer> converter = mFactory.create(IntWritable.class); assertEquals(2, converter.convert(new IntWritable(2)).intValue()); } @Test void convertLongWritable() { AvroDatumConverter<LongWritable, Long> converter = mFactory.create(LongWritable.class); assertEquals(123L, converter.convert(new LongWritable(123L)).longValue()); } @Test void convertNullWritable() { AvroDatumConverter<NullWritable, Object> converter = mFactory.create(NullWritable.class); assertNull(converter.convert(NullWritable.get())); } @Test void convertText() { AvroDatumConverter<Text, CharSequence> converter = mFactory.create(Text.class); assertEquals("foo", converter.convert(new Text("foo")).toString()); } }
6,992
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroKeyDeserializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.mapred.AvroWrapper; import org.junit.jupiter.api.Test; public class TestAvroKeyDeserializer { @Test void deserialize() throws IOException { // Create a deserializer. Schema writerSchema = Schema.create(Schema.Type.STRING); Schema readerSchema = Schema.create(Schema.Type.STRING); ClassLoader classLoader = this.getClass().getClassLoader(); AvroKeyDeserializer<CharSequence> deserializer = new AvroKeyDeserializer<>(writerSchema, readerSchema, classLoader); // Check the schemas. assertEquals(writerSchema, deserializer.getWriterSchema()); assertEquals(readerSchema, deserializer.getReaderSchema()); // Write some records to deserialize. DatumWriter<CharSequence> datumWriter = new GenericDatumWriter<>(writerSchema); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null); datumWriter.write("record1", encoder); datumWriter.write("record2", encoder); encoder.flush(); // Deserialize the records. ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); deserializer.open(inputStream); AvroWrapper<CharSequence> record = null; record = deserializer.deserialize(record); assertEquals("record1", record.datum().toString()); record = deserializer.deserialize(record); assertEquals("record2", record.datum().toString()); deserializer.close(); } }
6,993
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroValueDeserializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.mapred.AvroWrapper; import org.junit.jupiter.api.Test; public class TestAvroValueDeserializer { @Test void deserialize() throws IOException { // Create a deserializer. Schema writerSchema = Schema.create(Schema.Type.STRING); Schema readerSchema = Schema.create(Schema.Type.STRING); ClassLoader classLoader = this.getClass().getClassLoader(); AvroValueDeserializer<CharSequence> deserializer = new AvroValueDeserializer<>(writerSchema, readerSchema, classLoader); // Check the schemas. assertEquals(writerSchema, deserializer.getWriterSchema()); assertEquals(readerSchema, deserializer.getReaderSchema()); // Write some records to deserialize. DatumWriter<CharSequence> datumWriter = new GenericDatumWriter<>(writerSchema); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(outputStream, null); datumWriter.write("record1", encoder); datumWriter.write("record2", encoder); encoder.flush(); // Deserialize the records. ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); deserializer.open(inputStream); AvroWrapper<CharSequence> record = null; record = deserializer.deserialize(record); assertEquals("record1", record.datum().toString()); record = deserializer.deserialize(record); assertEquals("record2", record.datum().toString()); deserializer.close(); } }
6,994
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/hadoop/io/TestAvroSerializer.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.hadoop.io; import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.io.DatumReader; import org.apache.avro.io.Decoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.generic.GenericDatumReader; import org.apache.avro.mapred.AvroKey; import org.junit.jupiter.api.Test; public class TestAvroSerializer { @Test void serialize() throws IOException { // Create a serializer. Schema writerSchema = Schema.create(Schema.Type.STRING); AvroSerializer<CharSequence> serializer = new AvroSerializer<>(writerSchema); // Check the writer schema. assertEquals(writerSchema, serializer.getWriterSchema()); // Serialize two records, 'record1' and 'record2'. ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); serializer.open(outputStream); serializer.serialize(new AvroKey<>("record1")); serializer.serialize(new AvroKey<>("record2")); serializer.close(); // Make sure the records were serialized correctly. ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); Schema readerSchema = Schema.create(Schema.Type.STRING); DatumReader<CharSequence> datumReader = new GenericDatumReader<>(readerSchema); Decoder decoder = DecoderFactory.get().binaryDecoder(inputStream, null); CharSequence record = null; record = datumReader.read(record, decoder); assertEquals("record1", record.toString()); record = datumReader.read(record, decoder); assertEquals("record2", record.toString()); inputStream.close(); } }
6,995
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyInputFormat.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.mapreduce; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.Mockito.*; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.mapred.AvroKey; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.junit.jupiter.api.Test; public class TestAvroKeyInputFormat { /** * Verifies that a non-null record reader can be created, and the key/value * types are as expected. */ @Test void createRecordReader() throws IOException, InterruptedException { // Set up the job configuration. Job job = Job.getInstance(); AvroJob.setInputKeySchema(job, Schema.create(Schema.Type.STRING)); Configuration conf = job.getConfiguration(); FileSplit inputSplit = mock(FileSplit.class); TaskAttemptContext context = mock(TaskAttemptContext.class); when(context.getConfiguration()).thenReturn(conf); AvroKeyInputFormat inputFormat = new AvroKeyInputFormat(); @SuppressWarnings("unchecked") RecordReader<AvroKey<Object>, NullWritable> recordReader = inputFormat.createRecordReader(inputSplit, context); assertNotNull(inputFormat); recordReader.close(); verify(context).getConfiguration(); } }
6,996
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestAvroKeyRecordReader.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.mapreduce; import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.*; import java.io.File; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.file.SeekableFileInput; import org.apache.avro.file.SeekableInput; import org.apache.avro.mapred.AvroKey; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileSplit; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestAvroKeyRecordReader { /** A temporary directory for test data. */ @TempDir public File mTempDir; /** * Verifies that avro records can be read and progress is reported correctly. */ @Test void readRecords() throws IOException, InterruptedException { // Create the test avro file input with two records: // 1. "first" // 2. "second" final SeekableInput avroFileInput = new SeekableFileInput(AvroFiles .createFile(new File(mTempDir, "myStringfile.avro"), Schema.create(Schema.Type.STRING), "first", "second")); // Create the record reader. Schema readerSchema = Schema.create(Schema.Type.STRING); RecordReader<AvroKey<CharSequence>, NullWritable> recordReader = new AvroKeyRecordReader<CharSequence>( readerSchema) { @Override protected SeekableInput createSeekableInput(Configuration conf, Path path) throws IOException { return avroFileInput; } }; // Set up the job configuration. Configuration conf = new Configuration(); // Create a mock input split for this record reader. FileSplit inputSplit = mock(FileSplit.class); when(inputSplit.getPath()).thenReturn(new Path("/path/to/an/avro/file")); when(inputSplit.getStart()).thenReturn(0L); when(inputSplit.getLength()).thenReturn(avroFileInput.length()); // Create a mock task attempt context for this record reader. TaskAttemptContext context = mock(TaskAttemptContext.class); when(context.getConfiguration()).thenReturn(conf); // Initialize the record reader. recordReader.initialize(inputSplit, context); assertEquals(0.0f, recordReader.getProgress(), 0.0f, "Progress should be zero before any records are read"); // Some variables to hold the records. AvroKey<CharSequence> key; NullWritable value; // Read the first record. assertTrue(recordReader.nextKeyValue(), "Expected at least one record"); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertNotNull(key, "First record had null key"); assertNotNull(value, "First record had null value"); CharSequence firstString = key.datum(); assertEquals("first", firstString.toString()); assertEquals(key, recordReader.getCurrentKey()); assertEquals(value, recordReader.getCurrentValue()); // Read the second record. assertTrue(recordReader.nextKeyValue(), "Expected to read a second record"); key = recordReader.getCurrentKey(); value = recordReader.getCurrentValue(); assertNotNull(key, "Second record had null key"); assertNotNull(value, "Second record had null value"); CharSequence secondString = key.datum(); assertEquals("second", secondString.toString()); assertEquals(1.0f, recordReader.getProgress(), 0.0f, "Progress should be complete (2 out of 2 records processed)"); // There should be no more records. assertFalse(recordReader.nextKeyValue(), "Expected only 2 records"); // Close the record reader. recordReader.close(); // Verify the expected calls on the mocks. verify(inputSplit).getPath(); verify(inputSplit, times(2)).getStart(); verify(inputSplit).getLength(); verify(context, atLeastOnce()).getConfiguration(); } }
6,997
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/TestKeyValueWordCount.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.mapreduce; import static org.junit.jupiter.api.Assertions.*; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import org.apache.avro.Schema; import org.apache.avro.file.DataFileReader; import org.apache.avro.generic.GenericRecord; import org.apache.avro.hadoop.io.AvroKeyValue; import org.apache.avro.io.DatumReader; import org.apache.avro.specific.SpecificDatumReader; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; public class TestKeyValueWordCount { @TempDir public File mTempDir; public static class LineCountMapper extends Mapper<LongWritable, Text, Text, IntWritable> { private IntWritable mOne; @Override protected void setup(Context context) { mOne = new IntWritable(1); } @Override protected void map(LongWritable fileByteOffset, Text line, Context context) throws IOException, InterruptedException { context.write(line, mOne); } } public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> { @Override protected void reduce(Text word, Iterable<IntWritable> counts, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable count : counts) { sum += count.get(); } context.write(word, new IntWritable(sum)); } } @Test void keyValueMapReduce() throws ClassNotFoundException, IOException, InterruptedException, URISyntaxException { // Configure a word count job over our test input file. Job job = Job.getInstance(); FileInputFormat.setInputPaths(job, new Path(getClass().getResource("/org/apache/avro/mapreduce/mapreduce-test-input.txt").toURI().toString())); job.setInputFormatClass(TextInputFormat.class); job.setMapperClass(LineCountMapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setOutputFormatClass(AvroKeyValueOutputFormat.class); Path outputPath = new Path(mTempDir.getPath() + "/out-wordcount"); FileOutputFormat.setOutputPath(job, outputPath); // Run the job. assertTrue(job.waitForCompletion(true)); // Verify that the Avro container file generated had the right KeyValuePair // generic records. File avroFile = new File(outputPath.toString(), "part-r-00000.avro"); DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>( AvroKeyValue.getSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.INT))); DataFileReader<GenericRecord> avroFileReader = new DataFileReader<>(avroFile, datumReader); assertTrue(avroFileReader.hasNext()); AvroKeyValue<CharSequence, Integer> appleRecord = new AvroKeyValue<>(avroFileReader.next()); assertNotNull(appleRecord.get()); assertEquals("apple", appleRecord.getKey().toString()); assertEquals(3, appleRecord.getValue().intValue()); assertTrue(avroFileReader.hasNext()); AvroKeyValue<CharSequence, Integer> bananaRecord = new AvroKeyValue<>(avroFileReader.next()); assertNotNull(bananaRecord.get()); assertEquals("banana", bananaRecord.getKey().toString()); assertEquals(2, bananaRecord.getValue().intValue()); assertTrue(avroFileReader.hasNext()); AvroKeyValue<CharSequence, Integer> carrotRecord = new AvroKeyValue<>(avroFileReader.next()); assertEquals("carrot", carrotRecord.getKey().toString()); assertEquals(1, carrotRecord.getValue().intValue()); assertFalse(avroFileReader.hasNext()); avroFileReader.close(); } }
6,998
0
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro
Create_ds/avro/lang/java/mapred/src/test/java/org/apache/avro/mapreduce/AvroFiles.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package org.apache.avro.mapreduce; import java.io.File; import java.io.IOException; import org.apache.avro.Schema; import org.apache.avro.file.DataFileWriter; import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumWriter; /** * A utility class for working with Avro container files within tests. */ public final class AvroFiles { private AvroFiles() { } /** * Creates an avro container file. * * @param file The file to create. * @param schema The schema for the records the file should contain. * @param records The records to put in the file. * @param <T> The (java) type of the avro records. * @return The created file. */ public static <T> File createFile(File file, Schema schema, T... records) throws IOException { DatumWriter<T> datumWriter = new GenericDatumWriter<>(schema); DataFileWriter<T> fileWriter = new DataFileWriter<>(datumWriter); fileWriter.create(schema, file); for (T record : records) { fileWriter.append(record); } fileWriter.close(); return file; } }
6,999