index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/RowCountTaskLevelPolicyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyCheckResults;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyChecker;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyCheckerBuilderFactory;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicy;
@Test(groups = {"gobblin.qualitychecker"})
public class RowCountTaskLevelPolicyTest {
public static final long EXTRACTOR_ROWS_READ = 1000;
public static final long WRITER_ROWS_WRITTEN = 1000;
@Test
public void testRowCountPolicyPassed()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.policies.count.RowCountPolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, WRITER_ROWS_WRITTEN);
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
@Test
public void testRowCountPolicyFailed()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.policies.count.RowCountPolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, -1);
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.FAILED);
}
}
@Test
public void testRowCountRangePolicyPassedExact()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.policies.count.RowCountRangePolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, WRITER_ROWS_WRITTEN);
state.setProp(ConfigurationKeys.ROW_COUNT_RANGE, "0.05");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
@Test
public void testRowCountRangePolicyPassedRange()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.policies.count.RowCountRangePolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, (long) 0.03 * EXTRACTOR_ROWS_READ + EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.ROW_COUNT_RANGE, "0.05");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
@Test
public void testRowCountRangePolicyFailed()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.policies.count.RowCountRangePolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, -1);
state.setProp(ConfigurationKeys.ROW_COUNT_RANGE, "0.05");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.FAILED);
}
}
@Test
public void testMultipleRowCountPolicies()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST,
"org.apache.gobblin.policies.count.RowCountPolicy,org.apache.gobblin.policies.count.RowCountRangePolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL,FAIL");
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, EXTRACTOR_ROWS_READ);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, WRITER_ROWS_WRITTEN);
state.setProp(ConfigurationKeys.ROW_COUNT_RANGE, "0.05");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
private TaskLevelPolicyCheckResults getPolicyResults(State state)
throws Exception {
TaskLevelPolicyChecker checker =
new TaskLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
return checker.executePolicies();
}
}
| 2,800 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/TestConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
public class TestConstants {
public static final String TEST_FS_URI = "file://localhost/";
public static final String TEST_STAGING_DIR = "test-rowcount-staging";
public static final String TEST_OUTPUT_DIR = "test-rowcount-output";
public static final String TEST_FILE_NAME = "test/resource/source/test.avro.0";
public static final String TEST_ERR_FILE = "test";
public static final String TEST_WRITER_ID = "writer-rowcount-1";
}
| 2,801 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/TestRowLevelPolicyFail.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicy;
public class TestRowLevelPolicyFail extends RowLevelPolicy {
public TestRowLevelPolicyFail(State state, Type type) {
super(state, type);
}
@Override
public Result executePolicy(Object record) {
return RowLevelPolicy.Result.FAILED;
}
}
| 2,802 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/TestTaskLevelPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicy;
public class TestTaskLevelPolicy extends TaskLevelPolicy {
public TestTaskLevelPolicy(State state, Type type) {
super(state, type);
}
@Override
public Result executePolicy() {
return Result.PASSED;
}
}
| 2,803 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/TaskLevelQualityCheckerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicy;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyCheckResults;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyChecker;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicyCheckerBuilderFactory;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
@Test(groups = {"gobblin.qualitychecker"})
public class TaskLevelQualityCheckerTest {
@Test
public void testPolicyChecker()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestTaskLevelPolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
@Test
public void testMultiplePolicies()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST,
"org.apache.gobblin.qualitychecker.TestTaskLevelPolicy,org.apache.gobblin.qualitychecker.TestTaskLevelPolicy");
state.setProp(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, "FAIL,FAIL");
TaskLevelPolicyCheckResults results = getPolicyResults(state);
for (Map.Entry<TaskLevelPolicy.Result, TaskLevelPolicy.Type> entry : results.getPolicyResults().entrySet()) {
Assert.assertEquals(entry.getKey(), TaskLevelPolicy.Result.PASSED);
}
}
private TaskLevelPolicyCheckResults getPolicyResults(State state)
throws Exception {
TaskLevelPolicyChecker checker =
new TaskLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
return checker.executePolicies();
}
}
| 2,804 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/TestRowLevelPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.row.RowLevelPolicy;
public class TestRowLevelPolicy extends RowLevelPolicy {
public TestRowLevelPolicy(State state, Type type) {
super(state, type);
}
@Override
public Result executePolicy(Object record) {
return RowLevelPolicy.Result.PASSED;
}
}
| 2,805 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/row/RowLevelQualityCheckerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import java.io.File;
import java.io.Flushable;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.FileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.avro.io.DatumReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.TestConstants;
import org.apache.gobblin.qualitychecker.TestRowLevelPolicy;
import org.apache.gobblin.records.ControlMessageHandler;
import org.apache.gobblin.records.FlushControlMessageHandler;
import org.apache.gobblin.stream.FlushControlMessage;
import static org.apache.gobblin.configuration.ConfigurationKeys.ROW_LEVEL_ERR_FILE;
import static org.apache.gobblin.qualitychecker.row.RowLevelPolicyChecker.ALLOW_SPECULATIVE_EXECUTION_WITH_ERR_FILE_POLICY;
import static org.apache.gobblin.qualitychecker.row.RowLevelPolicyCheckerBuilder.ROW_LEVEL_POLICY_CHECKER_TYPE;
@Test(groups = {"gobblin.qualitychecker"})
public class RowLevelQualityCheckerTest {
@Test(groups = {"ignore"})
public void testRowLevelPolicy()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicy");
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "FAIL");
RowLevelPolicyChecker checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();
FileReader<GenericRecord> fileReader = openFile(state);
for (GenericRecord datum : fileReader) {
Assert.assertTrue(checker.executePolicies(datum, results));
}
}
/**
* Verify close-on-flush for errFile in quality checker.
*/
public void testErrFileCloseOnFlush() throws Exception {
File dir = Files.createTempDir();
dir.deleteOnExit();
State state = new State();
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicyFail");
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, RowLevelPolicy.Type.ERR_FILE);
state.setProp(ROW_LEVEL_ERR_FILE, dir.getAbsolutePath());
RowLevelPolicyChecker checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();
Schema intSchema = SchemaBuilder.record("test")
.fields()
.name("a").type().intType().noDefault()
.endRecord();
GenericRecord intRecord = new GenericRecordBuilder(intSchema)
.set("a", 1)
.build();
GenericRecord intRecord_2 = new GenericRecordBuilder(intSchema)
.set("a", 2)
.build();
Assert.assertFalse(checker.executePolicies(intRecord, results));
// Inspect the folder: Should see files with zero byte
FileSystem fs = FileSystem.getLocal(new Configuration());
List<FileStatus> fileList = Arrays.asList(fs.listStatus(new Path(dir.getPath())));
Assert.assertEquals(fileList.size(), 1);
Assert.assertEquals(fileList.get(0).getLen(),0 );
// This should trigger errFile flush so that file size should be larger than zero.
checker.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
fileList = Arrays.asList(fs.listStatus(new Path(dir.getPath())));
Assert.assertTrue(fileList.get(0).getLen() > 0);
// This should trigger a new errFile created.
Assert.assertFalse(checker.executePolicies(intRecord_2, results));
fileList = Arrays.asList(fs.listStatus(new Path(dir.getPath())));
Assert.assertEquals(fileList.size(), 2);
}
// Verify rowPolicyChecker is configurable.
public void testRowPolicyCheckerBuilder() throws Exception {
State state = new State();
state.setProp(ROW_LEVEL_POLICY_CHECKER_TYPE,
"org.apache.gobblin.qualitychecker.row.RowLevelQualityCheckerTest$TestRowLevelPolicyChecker");
RowLevelPolicyChecker checker = RowLevelPolicyCheckerBuilderFactory.newPolicyCheckerBuilder(state, 0).build();
Assert.assertTrue(checker instanceof TestRowLevelPolicyChecker);
Assert.assertTrue(checker.getMessageHandler() instanceof FlushControlMessageHandler);
}
public void testFileNameWithTimestamp() throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicy");
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "ERR_FILE");
state.setProp(ROW_LEVEL_ERR_FILE, TestConstants.TEST_ERR_FILE);
RowLevelPolicyChecker checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
Path path = checker.getErrFilePath(new TestRowLevelPolicy(state, RowLevelPolicy.Type.ERR_FILE));
// Verify that path follows the structure which contains timestamp.
Pattern pattern = Pattern.compile("test\\/org.apache.gobblin.qualitychecker.TestRowLevelPolicy-\\d+\\.err");
Matcher matcher = pattern.matcher(path.toString());
Assert.assertTrue(matcher.matches());
// Positive case with multiple non-err_file policy specified.
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicy,org.apache.gobblin.qualitychecker.TestRowLevelPolicy");
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "FAIL,OPTIONAL");
state.setProp(ALLOW_SPECULATIVE_EXECUTION_WITH_ERR_FILE_POLICY, false);
checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
Assert.assertTrue(checker.isSpeculativeAttemptSafe());
// Negative case with multiple policy containing err_file
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "FAIL,ERR_FILE");
checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
Assert.assertFalse(checker.isSpeculativeAttemptSafe());
}
@Test(groups = {"ignore"})
public void testWriteToErrFile()
throws Exception {
State state = new State();
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicyFail");
state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "ERR_FILE");
state.setProp(ROW_LEVEL_ERR_FILE, TestConstants.TEST_ERR_FILE);
state.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, TestConstants.TEST_FS_URI);
RowLevelPolicyChecker checker =
new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();
FileReader<GenericRecord> fileReader = openFile(state);
for (GenericRecord datum : fileReader) {
Assert.assertFalse(checker.executePolicies(datum, results));
}
FileSystem fs = FileSystem.get(new URI(TestConstants.TEST_FS_URI), new Configuration());
Path outputPath = new Path(TestConstants.TEST_ERR_FILE,
state.getProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST).replaceAll("\\.", "-") + ".err");
Assert.assertTrue(fs.exists(outputPath));
fs.delete(new Path(TestConstants.TEST_ERR_FILE), true);
}
private FileReader<GenericRecord> openFile(State state)
throws Exception {
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
FileReader<GenericRecord> fileReader = DataFileReader.openReader(new File(TestConstants.TEST_FILE_NAME), reader);
return fileReader;
}
/**
* An extension of {@link RowLevelPolicyChecker} just for verifying class type when specifying derived class
* from configuration.
*/
public static class TestRowLevelPolicyChecker extends RowLevelPolicyChecker {
public TestRowLevelPolicyChecker(List list, String stateId, FileSystem fs, State state) {
super(list, stateId, fs, state);
}
@Override
protected ControlMessageHandler getMessageHandler() {
return new FlushControlMessageHandler(new Flushable() {
@Override
public void flush()
throws IOException {
// do nothing
}
});
}
}
}
| 2,806 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/qualitychecker/row/FrontLoadedSamplerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.testng.Assert;
import org.testng.annotations.Test;
public class FrontLoadedSamplerTest {
@Test
public void test() {
RowLevelPolicyChecker.FrontLoadedSampler sampler =
new RowLevelPolicyChecker.FrontLoadedSampler(10, 2);
List<Integer> sampled = Stream.iterate(0, i -> i++).limit(1000).filter(i -> sampler.acceptNext()).collect(Collectors.toList());
Assert.assertTrue(sampled.size() >= 10);
Assert.assertTrue(sampled.size() < 30);
// check the first 10 values are the integers from 1 to 9
Assert.assertEquals(sampled.subList(0, 10), Stream.iterate(0, i -> i++).limit(10).collect(Collectors.toList()));
// with a very large decay factor, should have very few additional samples
RowLevelPolicyChecker.FrontLoadedSampler sampler2 =
new RowLevelPolicyChecker.FrontLoadedSampler(10, 10);
sampled = Stream.iterate(0, i -> i++).limit(1000).filter(i -> sampler2.acceptNext()).collect(Collectors.toList());
Assert.assertTrue(sampled.size() >= 10);
Assert.assertTrue(sampled.size() < 15);
// check the first 10 values are the integers from 1 to 9
Assert.assertEquals(sampled.subList(0, 10), Stream.iterate(0, i -> i++).limit(10).collect(Collectors.toList()));
// with a low decay factor, should have many additional samples
RowLevelPolicyChecker.FrontLoadedSampler sampler3 =
new RowLevelPolicyChecker.FrontLoadedSampler(10, 1.2);
sampled = Stream.iterate(0, i -> i++).limit(1000).filter(i -> sampler3.acceptNext()).collect(Collectors.toList());
Assert.assertTrue(sampled.size() >= 30);
Assert.assertTrue(sampled.size() < 100);
// check the first 10 values are the integers from 1 to 9
Assert.assertEquals(sampled.subList(0, 10), Stream.iterate(0, i -> i++).limit(10).collect(Collectors.toList()));
}
}
| 2,807 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/EmptyIterableTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter;
import java.util.Iterator;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link EmptyIterable}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.converter"})
public class EmptyIterableTest {
@Test
public void testIterator() {
EmptyIterable<Object> emptyIterable = new EmptyIterable<>();
Iterator<Object> iterator = emptyIterable.iterator();
Assert.assertFalse(iterator.hasNext());
}
}
| 2,808 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/GobblinMetricsPinotFlattenerConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metrics.Metric;
import org.apache.gobblin.metrics.MetricReport;
import org.apache.gobblin.metrics.reporter.util.AvroBinarySerializer;
import org.apache.gobblin.metrics.reporter.util.AvroSerializer;
import org.apache.gobblin.metrics.reporter.util.NoopSchemaVersionWriter;
import org.apache.gobblin.util.AvroUtils;
public class GobblinMetricsPinotFlattenerConverterTest {
@Test
public void test() throws Exception {
MetricReport metricReport = new MetricReport();
metricReport.setTags(ImmutableMap.of("tag", "value", "tag2", "value2"));
metricReport.setTimestamp(10L);
metricReport.setMetrics(Lists.newArrayList(new Metric("metric", 1.0), new Metric("metric2", 2.0)));
AvroSerializer<MetricReport> serializer =
new AvroBinarySerializer<>(MetricReport.SCHEMA$, new NoopSchemaVersionWriter());
serializer.serializeRecord(metricReport);
Schema metricReportUtf8 = new Schema.Parser().parse(this.getClass().getClassLoader().getResourceAsStream("MetricReport.avsc"));
GenericRecord genericRecordMetric = AvroUtils.slowDeserializeGenericRecord(serializer.serializeRecord(metricReport), metricReportUtf8);
GobblinMetricsPinotFlattenerConverter converter = new GobblinMetricsPinotFlattenerConverter();
Schema outputSchema = converter.convertSchema(MetricReport.SCHEMA$, new WorkUnitState());
Iterable<GenericRecord> converted = converter.convertRecord(outputSchema, genericRecordMetric, new WorkUnitState());
List<GenericRecord> convertedList = Lists.newArrayList(converted);
Assert.assertEquals(convertedList.size(), 2);
Assert.assertEquals(Sets.newHashSet((List<Utf8>) convertedList.get(0).get("tags")),
Sets.newHashSet("tag:value", "tag2:value2"));
Assert.assertEquals(convertedList.get(0).get("timestamp"), 10L);
Assert.assertEquals(convertedList.get(0).get("metricName").toString(), "metric");
Assert.assertEquals(convertedList.get(0).get("metricValue"), 1.0);
Assert.assertEquals(Sets.newHashSet((List<Utf8>) convertedList.get(1).get("tags")),
Sets.newHashSet("tag:value", "tag2:value2"));
Assert.assertEquals(convertedList.get(1).get("timestamp"), 10L);
Assert.assertEquals(convertedList.get(1).get("metricName").toString(), "metric2");
Assert.assertEquals(convertedList.get(1).get("metricValue"), 2.0);
}
}
| 2,809 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/objectstore/ObjectStoreDeleteConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.objectstore;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Iterables;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import org.apache.gobblin.configuration.WorkUnitState;
public class ObjectStoreDeleteConverterTest {
@Test
public void convertStringObjId() throws Exception {
WorkUnitState wu = new WorkUnitState();
wu.setProp(ObjectStoreDeleteConverter.OBJECT_ID_FIELD, "objectId");
ObjectStoreDeleteConverter converter = new ObjectStoreDeleteConverter();
converter.init(wu);
Schema schema =
new Schema.Parser()
.parse("{ \"type\" : \"record\", \"name\" : \"test_schema\", \"namespace\" : \"com.gobblin.test\", "
+ "\"fields\" : [ { \"name\" : \"objectId\", \"type\" : \"string\"} ], \"doc:\" : \"\" }");
GenericRecord datum = new GenericData.Record(schema);
String objId = "abcd";
datum.put("objectId", objId);
Assert
.assertEquals(Iterables.getFirst(converter.convertRecord(converter.convertSchema(schema, wu), datum, wu), null)
.getObjectId(), objId.getBytes());
}
@Test
public void convertLongObjId() throws Exception {
WorkUnitState wu = new WorkUnitState();
wu.setProp(ObjectStoreDeleteConverter.OBJECT_ID_FIELD, "objectId");
ObjectStoreDeleteConverter converter = new ObjectStoreDeleteConverter();
converter.init(wu);
Schema schema =
new Schema.Parser()
.parse("{ \"type\" : \"record\", \"name\" : \"test_schema\", \"namespace\" : \"com.gobblin.test\", "
+ "\"fields\" : [ { \"name\" : \"objectId\", \"type\" : \"long\"} ], \"doc:\" : \"\" }");
GenericRecord datum = new GenericData.Record(schema);
long objId = 1234l;
datum.put("objectId", objId);
Assert
.assertEquals(Iterables.getFirst(converter.convertRecord(converter.convertSchema(schema, wu), datum, wu), null)
.getObjectId(), Longs.toByteArray(objId));
}
@Test
public void convertBytesObjId() throws Exception {
WorkUnitState wu = new WorkUnitState();
wu.setProp(ObjectStoreDeleteConverter.OBJECT_ID_FIELD, "objectId");
ObjectStoreDeleteConverter converter = new ObjectStoreDeleteConverter();
converter.init(wu);
Schema schema =
new Schema.Parser()
.parse("{ \"type\" : \"record\", \"name\" : \"test_schema\", \"namespace\" : \"com.gobblin.test\", "
+ "\"fields\" : [ { \"name\" : \"objectId\", \"type\" : \"bytes\"} ], \"doc:\" : \"\" }");
GenericRecord datum = new GenericData.Record(schema);
String objId = "abcd";
datum.put("objectId", objId.getBytes());
Assert
.assertEquals(Iterables.getFirst(converter.convertRecord(converter.convertSchema(schema, wu), datum, wu), null)
.getObjectId(), objId.getBytes());
}
@Test
public void convertIntObjId() throws Exception {
WorkUnitState wu = new WorkUnitState();
wu.setProp(ObjectStoreDeleteConverter.OBJECT_ID_FIELD, "objectId");
ObjectStoreDeleteConverter converter = new ObjectStoreDeleteConverter();
converter.init(wu);
Schema schema =
new Schema.Parser()
.parse("{ \"type\" : \"record\", \"name\" : \"test_schema\", \"namespace\" : \"com.gobblin.test\", "
+ "\"fields\" : [ { \"name\" : \"objectId\", \"type\" : \"int\"} ], \"doc:\" : \"\" }");
GenericRecord datum = new GenericData.Record(schema);
int objId = 123;
datum.put("objectId", objId);
Assert
.assertEquals(Iterables.getFirst(converter.convertRecord(converter.convertSchema(schema, wu), datum, wu), null)
.getObjectId(), Ints.toByteArray(objId));
}
}
| 2,810 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/json/BytesToJsonConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.json;
import java.io.IOException;
import org.apache.commons.io.IOUtils;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.junit.Assert;
import org.testng.annotations.Test;
import com.google.gson.JsonObject;
/**
* Unit test for {@link BytesToJsonConverter}
*/
@Test(groups = {"gobblin.converter"})
public class BytesToJsonConverterTest {
@Test
public void testConverter() throws DataConversionException, IOException {
BytesToJsonConverter converter = new BytesToJsonConverter();
WorkUnitState state = new WorkUnitState();
JsonObject record = converter.convertRecord("dummySchema",
IOUtils.toByteArray(this.getClass().getResourceAsStream("/converter/jsonToAvroRecord.json")), state).iterator().next();
Assert.assertEquals(record.get("longField").getAsLong(), 1234L);
Assert.assertEquals(record.get("nestedRecords").getAsJsonObject().get("nestedField2").getAsString(), "test2");
}
}
| 2,811 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/json/JsonStringToJsonIntermediateConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.json;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.util.Map;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
import gobblin.configuration.WorkUnitState;
import static org.junit.Assert.assertEquals;
/**
* Unit test for {@link JsonStringToJsonIntermediateConverter}
*
* @author Tilak Patidar
*/
@Test(groups = {"gobblin.converter"})
public class JsonStringToJsonIntermediateConverterTest {
private static JsonStringToJsonIntermediateConverter converter;
private static JsonObject testJsonData;
@BeforeClass
public static void setUp()
throws SchemaConversionException {
converter = new JsonStringToJsonIntermediateConverter();
WorkUnitState workUnit = new WorkUnitState();
workUnit.getPropAsBoolean("gobblin.converter.jsonStringToJsonIntermediate.unpackComplexSchemas", true);
converter.convertSchema("[]", workUnit);
Type jsonType = new TypeToken<JsonObject>() {
}.getType();
Gson gson = new Gson();
testJsonData = gson.fromJson(new InputStreamReader(JsonStringToJsonIntermediateConverterTest.class
.getResourceAsStream("/converter/JsonStringToJsonIntermediateConverter.json")), jsonType);
}
private JsonObject parseJsonObject(JsonObject json, JsonArray record)
throws DataConversionException {
return converter.convertRecord(record, json.toString(), new WorkUnitState()).iterator().next();
}
@Test
public void testAllCases()
throws DataConversionException {
for (Map.Entry<String, JsonElement> keyset : testJsonData.entrySet()) {
JsonArray testData = keyset.getValue().getAsJsonArray();
JsonObject json = testData.get(0).getAsJsonObject();
JsonArray schema = testData.get(1).getAsJsonArray();
JsonObject expected = testData.get(2).getAsJsonObject();
JsonObject result = null;
try {
result = parseJsonObject(json, schema);
} catch (Exception e) {
e.printStackTrace();
assertEquals("Test case failed : " + keyset.getKey(), "No exception", e.getMessage());
}
assertEquals("Test case failed : " + keyset.getKey(), expected, result);
}
}
@Test(expectedExceptions = DataConversionException.class, expectedExceptionsMessageRegExp = "Invalid symbol.*")
public void jsonWithInvalidEnumEntry()
throws DataConversionException {
String jsonStr = "{\"a\":\"somename\", \"b\":\"TROLL\"}";
String schemaStr =
" [{\"columnName\":\"a\", \"dataType\":{\"type\":\"string\"}},{\"columnName\":\"b\", \"dataType\":{\"type\":\"enum\", \"symbols\":[\"HELL\",\"BELLS\"]}}]";
parseJsonObject(buildJsonObject(jsonStr), buildJsonArray(schemaStr));
}
@Test(expectedExceptions = UnsupportedOperationException.class, expectedExceptionsMessageRegExp = "Array items can only be defined using JsonObject or JsonPrimitive.")
public void jsonWithArrayOfMapContainingRecordWithWrongSchema()
throws DataConversionException {
String jsonStr = "{\"a\":\"somename\", \"b\":[{\"d\":{\"age\":\"10\"}},{\"d\":{\"age\":\"1\"}}]}";
String schemaStr =
"[{\"columnName\":\"a\", \"dataType\":{\"type\":\"string\"}},{\"columnName\":\"b\", \"dataType\":{\"type\":\"array\", \"items\":[{\"dataType\":{\"type\":\"map\", \"values\":{\"dataType\":{\"type\":\"record\",\"values\":[{\"columnName\":\"age\", \"dataType\":{\"type\":\"int\"}}]}}}}]}}]";
parseJsonObject(buildJsonObject(jsonStr), buildJsonArray(schemaStr));
}
private JsonObject buildJsonObject(String s) {
JsonParser parser = new JsonParser();
return (JsonObject) parser.parse(s);
}
private JsonArray buildJsonArray(String schemaStr) {
JsonParser parser = new JsonParser();
return parser.parse(schemaStr).getAsJsonArray();
}
} | 2,812 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/JsonRecordAvroSchemaToAvroConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
/**
* Unit test for {@link JsonRecordAvroSchemaToAvroConverter}
*/
@Test(groups = {"gobblin.converter"})
public class JsonRecordAvroSchemaToAvroConverterTest {
@Test
public void testConverter() throws Exception {
String avroSchemaString = readResource("/converter/jsonToAvroSchema.avsc");
WorkUnitState workUnitState = createWorkUnitState(avroSchemaString, "fieldToIgnore");
JsonObject jsonObject = new JsonParser().parse(readResource("/converter/jsonToAvroRecord.json")).getAsJsonObject();
JsonRecordAvroSchemaToAvroConverter<String> converter = new JsonRecordAvroSchemaToAvroConverter<>();
converter.init(workUnitState);
Schema avroSchema = converter.convertSchema(avroSchemaString, workUnitState);
GenericRecord record = converter.convertRecord(avroSchema, jsonObject, workUnitState).iterator().next();
Assert.assertNull(record.get("fieldToIgnore"));
Assert.assertNull(record.get("nullableField"));
Assert.assertEquals(record.get("longField"), 1234L);
Assert.assertTrue(record.get("arrayField") instanceof GenericArray);
Assert.assertTrue(record.get("mapField") instanceof Map);
Assert.assertEquals(((GenericRecord) record.get("nestedRecords")).get("nestedField").toString(), "test");
Assert.assertEquals(((GenericRecord) record.get("nestedRecords")).get("nestedField2").toString(), "test2");
Assert.assertTrue(((GenericArray) record.get("emptyArrayOfRecords")).isEmpty());
GenericRecord recordInArray = (GenericRecord) (((GenericArray) record.get("arrayOfRecords")).get(0));
Assert.assertEquals(recordInArray.get("field1").toString(), "test1");
Assert.assertEquals((record.get("enumField")).toString(), "ENUM2");
Assert.assertTrue(record.get("arrayFieldWithUnion") instanceof GenericArray);
GenericArray arrayWithUnion = (GenericArray) record.get("arrayFieldWithUnion");
Assert.assertEquals(arrayWithUnion.size(), 4);
Assert.assertEquals(arrayWithUnion.get(0).toString(), "arrU1");
Assert.assertEquals(arrayWithUnion.get(1).toString(), "arrU2");
Assert.assertEquals(arrayWithUnion.get(2).toString(), "arrU3");
Assert.assertEquals(arrayWithUnion.get(3), 123L);
Assert.assertTrue(record.get("nullArrayFieldWithUnion") instanceof GenericArray);
GenericArray nullArrayWithUnion = (GenericArray) record.get("nullArrayFieldWithUnion");
Assert.assertEquals(nullArrayWithUnion.size(), 1);
Assert.assertNull(nullArrayWithUnion.get(0));
Assert.assertTrue(record.get("arrayFieldWithUnion2") instanceof GenericArray);
GenericArray arrayWithUnion2 = (GenericArray) record.get("arrayFieldWithUnion2");
Assert.assertEquals(arrayWithUnion2.size(), 3);
Assert.assertEquals(arrayWithUnion2.get(0).toString(), "arrU1");
Assert.assertNull(arrayWithUnion2.get(1));
Assert.assertEquals(arrayWithUnion2.get(2).toString(), "arrU3"); }
@Test(expectedExceptions = DataConversionException.class)
public void testConverterThrowsOnUnrecognizedEnumSymbols() throws IOException, DataConversionException {
String avroSchemaString = "{\"name\": \"TestRecord\", "
+ "\"type\": \"record\","
+ "\"namespace\": \"org.apache.gobblin.test\", "
+ "\"fields\": [ "
+ " { "
+ " \"name\": \"color\", "
+ " \"type\": { "
+ " \"type\": \"enum\","
+ " \"name\": \"Colors\","
+ " \"symbols\" : [\"RED\", \"GREEN\", \"BLUE\"]"
+ " }"
+ " } "
+ " ]"
+ "}";
String jsonString = "{\"color\": \"PURPLE\"}"; // PURPLE isn't a member of the Colors enum
WorkUnitState workUnitState = createWorkUnitState(avroSchemaString, null);
JsonObject jsonObject = new JsonParser().parse(jsonString).getAsJsonObject();
JsonRecordAvroSchemaToAvroConverter<String> converter = new JsonRecordAvroSchemaToAvroConverter<>();
converter.init(workUnitState);
Schema avroSchema = converter.convertSchema(avroSchemaString, workUnitState);
converter.convertRecord(avroSchema, jsonObject, workUnitState);
}
private static WorkUnitState createWorkUnitState(String avroSchemaString, String fieldToIgnore) {
SourceState sourceState = new SourceState();
WorkUnitState workUnitState = new WorkUnitState(
sourceState.createWorkUnit(sourceState.createExtract(TableType.SNAPSHOT_ONLY, "test_table", "test_namespace")));
workUnitState.setProp(ConfigurationKeys.CONVERTER_AVRO_SCHEMA_KEY, avroSchemaString);
if (fieldToIgnore != null) {
workUnitState.setProp(ConfigurationKeys.CONVERTER_IGNORE_FIELDS, fieldToIgnore);
}
return workUnitState;
}
private static String readResource(String path) throws IOException {
return IOUtils.toString(JsonRecordAvroSchemaToAvroConverterTest.class.getResourceAsStream(path), StandardCharsets.UTF_8);
}
} | 2,813 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/JsonIntermediateToAvroConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.TimeZone;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericArray;
import org.apache.avro.generic.GenericRecord;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
/**
* Unit test for {@link JsonIntermediateToAvroConverter}
* @author kgoodhop
*
*/
@Test(groups = {"gobblin.converter"})
public class JsonIntermediateToAvroConverterTest {
private JsonArray jsonSchema;
private JsonObject jsonRecord;
private WorkUnitState state;
/**
* To test schema and record using the path to their resource file.
* @param resourceFilePath
* @throws SchemaConversionException
* @throws DataConversionException
*/
private void complexSchemaTest(String resourceFilePath)
throws SchemaConversionException, DataConversionException {
JsonObject testData = initResources(resourceFilePath);
JsonIntermediateToAvroConverter converter = new JsonIntermediateToAvroConverter();
Schema avroSchema = converter.convertSchema(jsonSchema, state);
GenericRecord genericRecord = converter.convertRecord(avroSchema, jsonRecord, state).iterator().next();
JsonParser parser = new JsonParser();
Assert.assertEquals(parser.parse(avroSchema.toString()).getAsJsonObject(),
testData.get("expectedSchema").getAsJsonObject());
Assert.assertEquals(parser.parse(genericRecord.toString()), testData.get("expectedRecord").getAsJsonObject());
}
private JsonObject initResources(String resourceFilePath) {
Type listType = new TypeToken<JsonObject>() {
}.getType();
Gson gson = new Gson();
JsonObject testData =
gson.fromJson(new InputStreamReader(this.getClass().getResourceAsStream(resourceFilePath)), listType);
jsonRecord = testData.get("record").getAsJsonObject();
jsonSchema = testData.get("schema").getAsJsonArray();
WorkUnit workUnit = new WorkUnit(new SourceState(),
new Extract(new SourceState(), Extract.TableType.SNAPSHOT_ONLY, "namespace", "dummy_table"));
state = new WorkUnitState(workUnit);
state.setProp(ConfigurationKeys.CONVERTER_AVRO_TIME_FORMAT, "HH:mm:ss");
state.setProp(ConfigurationKeys.CONVERTER_AVRO_DATE_TIMEZONE, "PST");
return testData;
}
@Test
public void testConverter()
throws Exception {
initResources("/converter/schema.json");
JsonIntermediateToAvroConverter converter = new JsonIntermediateToAvroConverter();
Schema avroSchema = converter.convertSchema(jsonSchema, state);
GenericRecord record = converter.convertRecord(avroSchema, jsonRecord, state).iterator().next();
//testing output values are expected types and values
Assert.assertEquals(jsonRecord.get("Id").getAsString(), record.get("Id").toString());
Assert.assertEquals(jsonRecord.get("IsDeleted").getAsBoolean(), record.get("IsDeleted"));
if (!(record.get("Salutation") instanceof GenericArray)) {
Assert.fail("expected array, found " + record.get("Salutation").getClass().getName());
}
if (!(record.get("MapAccount") instanceof Map)) {
Assert.fail("expected map, found " + record.get("MapAccount").getClass().getName());
}
Assert.assertEquals(jsonRecord.get("Industry").getAsString(), record.get("Industry").toString());
DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss")
.withZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("PST")));
Assert.assertEquals(jsonRecord.get("LastModifiedDate").getAsString(),
new DateTime(record.get("LastModifiedDate")).toString(format));
Assert.assertEquals(jsonRecord.get("date_type").getAsString(),
new DateTime(record.get("date_type")).toString(format));
format = DateTimeFormat.forPattern("HH:mm:ss").withZone(DateTimeZone.forTimeZone(TimeZone.getTimeZone("PST")));
Assert.assertEquals(jsonRecord.get("time_type").getAsString(),
new DateTime(record.get("time_type")).toString(format));
Assert.assertEquals(jsonRecord.get("bytes_type").getAsString().getBytes(),
((ByteBuffer) record.get("bytes_type")).array());
Assert.assertEquals(jsonRecord.get("int_type").getAsInt(), record.get("int_type"));
Assert.assertEquals(jsonRecord.get("long_type").getAsLong(), record.get("long_type"));
Assert.assertEquals(jsonRecord.get("float_type").getAsFloat(), record.get("float_type"));
Assert.assertEquals(jsonRecord.get("double_type").getAsDouble(), record.get("double_type"));
//Testing timezone
state.setProp(ConfigurationKeys.CONVERTER_AVRO_DATE_TIMEZONE, "EST");
avroSchema = converter.convertSchema(jsonSchema, state);
GenericRecord record2 = converter.convertRecord(avroSchema, jsonRecord, state).iterator().next();
Assert.assertNotEquals(record.get("LastModifiedDate"), record2.get("LastModifiedDate"));
}
@Test
public void testComplexSchema1()
throws Exception {
complexSchemaTest("/converter/complex1.json");
}
@Test
public void testComplexSchema2()
throws Exception {
complexSchemaTest("/converter/complex2.json");
}
@Test
public void testComplexSchema3()
throws Exception {
complexSchemaTest("/converter/complex3.json");
}
@Test
public void testConverterWithNestJson() throws Exception {
Gson gson = new Gson();
jsonSchema = gson.fromJson(new InputStreamReader(
this.getClass().getResourceAsStream("/converter/nested_schema.json")),
JsonArray.class);
jsonRecord = gson.fromJson(new InputStreamReader(
this.getClass().getResourceAsStream("/converter/nested_json.json")),
JsonObject.class);
WorkUnit workUnit = new WorkUnit(new SourceState(),
new Extract(new SourceState(), Extract.TableType.SNAPSHOT_ONLY, "namespace", "dummy_table"));
state = new WorkUnitState(workUnit);
state.setProp(ConfigurationKeys.CONVERTER_AVRO_TIME_FORMAT, "HH:mm:ss");
state.setProp(ConfigurationKeys.CONVERTER_AVRO_DATE_TIMEZONE, "PST");
JsonIntermediateToAvroConverter converter = new JsonIntermediateToAvroConverter();
Schema avroSchema = converter.convertSchema(jsonSchema, state);
GenericRecord record = converter.convertRecord(avroSchema,
jsonRecord.getAsJsonObject(), state).iterator().next();
Assert.assertEquals(jsonRecord.getAsJsonObject().get("metaData").getAsJsonObject(),
gson.fromJson(record.get("metaData").toString(), JsonObject.class));
Assert.assertEquals(jsonRecord.getAsJsonObject().get("context").getAsJsonArray(),
gson.fromJson(record.get("context").toString(), JsonArray.class));
Assert.assertEquals(jsonRecord.getAsJsonObject().get("metaData").getAsJsonObject().get("id").getAsString(),
((GenericRecord)(record.get("metaData"))).get("id").toString());
}
}
| 2,814 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/AvroRecursionEliminatingConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.util.AvroUtils;
public class AvroRecursionEliminatingConverterTest {
public File generateRecord()
throws IOException {
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/recursive.avsc"));
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(inputSchema);
GenericRecord record = new GenericData.Record(inputSchema);
record.put("name", "John");
record.put("date_of_birth", 1234L);
record.put("last_modified", 4567L);
record.put("created", 6789L);
GenericRecord addressRecord = new GenericData.Record(inputSchema.getField("address").schema());
addressRecord.put("city", "Los Angeles");
addressRecord.put("street_number", 1234);
GenericRecord innerAddressRecord = new GenericData.Record(inputSchema.getField("address").schema());
innerAddressRecord.put("city", "San Francisco");
innerAddressRecord.put("street_number", 3456);
addressRecord.put("previous_address", innerAddressRecord);
record.put("address", addressRecord);
File recordFile = File.createTempFile(this.getClass().getSimpleName(),"avsc");
DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);
dataFileWriter.create(inputSchema, recordFile);
dataFileWriter.append(record);
dataFileWriter.close();
recordFile.deleteOnExit();
return recordFile;
}
/**
* Test schema and record conversion using a recursive schema
*/
@Test
public void testConversion()
throws IOException {
File inputFile = generateRecord();
WorkUnitState workUnitState = new WorkUnitState();
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/recursive.avsc"));
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(inputSchema);
DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(inputFile, datumReader);
GenericRecord inputRecord = dataFileReader.next();
AvroRecursionEliminatingConverter converter = new AvroRecursionEliminatingConverter();
Schema outputSchema = null;
String recursiveFieldPath = "address.previous_address";
// test that the inner recursive field is present in input schema
Assert.assertTrue(AvroUtils.getFieldSchema(inputSchema, recursiveFieldPath).isPresent());
try {
outputSchema = converter.convertSchema(inputSchema, workUnitState);
// test that the inner recursive field is no longer in the schema
Assert.assertTrue(!AvroUtils.getFieldSchema(outputSchema, recursiveFieldPath).isPresent(),
"Inner recursive field " + recursiveFieldPath + " should not be in output schema");
} catch (SchemaConversionException e) {
Assert.fail(e.getMessage());
}
GenericRecord outputRecord = null;
try {
outputRecord = converter.convertRecord(outputSchema, inputRecord, workUnitState).iterator().next();
} catch (DataConversionException e) {
Assert.fail(e.getMessage());
}
checkEquality("address.street_number", inputRecord, 1234, "Different value in input");
checkEquality("address.street_number", outputRecord, 1234, "Different value in output");
checkEquality("name", inputRecord, new Utf8("John"), "Different value in input");
checkEquality("name", outputRecord, new Utf8("John"), "Different value in output");
// check that inner address record exists in input record
checkEquality("address.previous_address.city", inputRecord, new Utf8("San Francisco"), "Different value in input");
checkEquality("address.previous_address", outputRecord, null, "Failed to remove recursive field");
}
private void checkEquality(String fieldPath, GenericRecord inputRecord, Object expected, String message) {
Optional inputValue = AvroUtils.getFieldValue(inputRecord, fieldPath);
if (expected != null) {
Assert.assertEquals(inputValue.get(), expected, message);
} else {
Assert.assertTrue(!inputValue.isPresent(), message);
}
}
}
| 2,815 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/FlattenNestedKeyConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.FileUtils;
import org.testng.annotations.Test;
import junit.framework.Assert;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.util.AvroUtils;
@Test
public class FlattenNestedKeyConverterTest {
/**
* Test schema and record conversion
* 1. A successful schema and record conversion
* 2. Another successful conversion by reusing the converter
* 3. An expected failed conversion by reusing the converter
*/
public void testConversion()
throws IOException {
String key = FlattenNestedKeyConverter.class.getSimpleName() + "." + FlattenNestedKeyConverter.FIELDS_TO_FLATTEN;
Properties props = new Properties();
props.put(key, "name,address.street_number");
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.addAll(props);
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/nested.avsc"));
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(inputSchema);
File tmp = File.createTempFile(this.getClass().getSimpleName(), null);
FileUtils.copyInputStreamToFile(getClass().getResourceAsStream("/converter/nested.avro"), tmp);
DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(tmp, datumReader);
GenericRecord inputRecord = dataFileReader.next();
FlattenNestedKeyConverter converter = new FlattenNestedKeyConverter();
Schema outputSchema = null;
try {
outputSchema = converter.convertSchema(inputSchema, workUnitState);
} catch (SchemaConversionException e) {
Assert.fail(e.getMessage());
}
Assert.assertTrue(outputSchema.getFields().size() == inputSchema.getFields().size() + 1);
Assert.assertTrue(outputSchema.getField("addressStreet_number") != null);
GenericRecord outputRecord = null;
try {
outputRecord = converter.convertRecord(outputSchema, inputRecord, workUnitState).iterator().next();
} catch (DataConversionException e) {
Assert.fail(e.getMessage());
}
Object expected = AvroUtils.getFieldValue(outputRecord, "address.street_number").get();
Assert.assertTrue(outputRecord.get("addressStreet_number") == expected);
// Reuse the converter to do another successful conversion
props.put(key, "name,address.city");
workUnitState.addAll(props);
try {
outputSchema = converter.convertSchema(inputSchema, workUnitState);
} catch (SchemaConversionException e) {
Assert.fail(e.getMessage());
}
Assert.assertTrue(outputSchema.getFields().size() == inputSchema.getFields().size() + 1);
Assert.assertTrue(outputSchema.getField("addressCity") != null);
try {
outputRecord = converter.convertRecord(outputSchema, inputRecord, workUnitState).iterator().next();
} catch (DataConversionException e) {
Assert.fail(e.getMessage());
}
expected = AvroUtils.getFieldValue(outputRecord, "address.city").get();
Assert.assertTrue(outputRecord.get("addressCity") == expected);
// Reuse the converter to do a failed conversion
props.put(key, "name,address.anInvalidField");
workUnitState.addAll(props);
boolean hasAnException = false;
try {
converter.convertSchema(inputSchema, workUnitState);
} catch (SchemaConversionException e) {
hasAnException = true;
}
Assert.assertTrue(hasAnException);
}
}
| 2,816 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/AvroToBytesConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.IOException;
import java.util.Iterator;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
public class AvroToBytesConverterTest {
@Test
public void testSerialization()
throws DataConversionException, IOException, SchemaConversionException {
Schema inputSchema = new Schema.Parser()
.parse(getClass().getClassLoader().getResourceAsStream("converter/bytes_to_avro/test_record_schema.avsc"));
AvroToBytesConverter converter = new AvroToBytesConverter();
WorkUnitState state = new WorkUnitState();
converter.init(state);
String outputSchema = converter.convertSchema(inputSchema, state);
// Write a record twice to make sure nothing goes wrong with caching
for (int i = 0; i < 2; i++) {
GenericRecord testRecord = new GenericData.Record(inputSchema);
testRecord.put("testStr", "testing12" + ((i == 0) ? "3": "4"));
testRecord.put("testInt", -2);
Iterator<byte[]> records = converter.convertRecord(outputSchema, testRecord, state).iterator();
byte[] record = records.next();
Assert.assertFalse(records.hasNext());
byte[] expectedRecord = IOUtils.toByteArray(getClass().getClassLoader().getResourceAsStream("converter/bytes_to_avro/test_record_binary.avro"));
// the serialized record was serialized with testing123 as the string; if we write testing124 out
// contents should be the same except for the 10th byte which will be '4' instead of '3'
if (i == 1) {
expectedRecord[10] = 52;
}
Assert.assertEquals(outputSchema, inputSchema.toString());
Assert.assertEquals(record, expectedRecord);
}
}
}
| 2,817 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/BytesToAvroConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
public class BytesToAvroConverterTest {
private static final String RESOURCE_PATH_PREFIX = "converter/bytes_to_avro/";
@Test
public void testCanParseBinary() throws DataConversionException, SchemaConversionException, IOException {
InputStream schemaIn = getClass().getClassLoader().getResourceAsStream(RESOURCE_PATH_PREFIX + "test_record_schema.avsc");
InputStream recordIn = getClass().getClassLoader().getResourceAsStream(RESOURCE_PATH_PREFIX + "test_record_binary.avro");
Assert.assertNotNull("Could not load test schema from resources", schemaIn);
Assert.assertNotNull("Could not load test record from resources", recordIn);
BytesToAvroConverter converter = new BytesToAvroConverter();
WorkUnitState state = new WorkUnitState();
converter.init(state);
Schema schema = converter.convertSchema(IOUtils.toString(schemaIn, StandardCharsets.UTF_8), state);
Assert.assertEquals(schema.getName(), "testRecord");
Iterator<GenericRecord> records = converter.convertRecord(schema, IOUtils.toByteArray(recordIn), state).iterator();
GenericRecord record = records.next();
Assert.assertFalse("Expected only 1 record", records.hasNext());
Assert.assertEquals(record.get("testStr").toString(), "testing123");
Assert.assertEquals(record.get("testInt"), -2);
}
}
| 2,818 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/avro/JsonElementConversionFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.avro;
import java.io.InputStreamReader;
import java.lang.reflect.Type;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.converter.avro.JsonElementConversionFactory.EnumConverter;
import org.apache.gobblin.converter.avro.JsonElementConversionFactory.MapConverter;
import org.apache.gobblin.converter.avro.JsonElementConversionFactory.NullConverter;
import org.apache.gobblin.converter.avro.JsonElementConversionFactory.RecordConverter;
import org.apache.gobblin.converter.avro.JsonElementConversionFactory.StringConverter;
import org.apache.gobblin.converter.json.JsonSchema;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.reflect.TypeToken;
import static org.apache.gobblin.converter.avro.JsonElementConversionFactory.ArrayConverter;
import static org.apache.gobblin.converter.avro.JsonElementConversionFactory.JsonElementConverter;
import static org.apache.gobblin.converter.avro.JsonElementConversionFactory.JsonElementConverter.buildNamespace;
import static org.apache.gobblin.converter.avro.JsonElementConversionFactory.Type.NULL;
import static org.apache.gobblin.converter.avro.JsonElementConversionFactory.UnionConverter;
/**
* Unit test for {@link JsonElementConversionFactory}
*
* @author Tilak Patidar
*/
@Test(groups = {"gobblin.converter"})
public class JsonElementConversionFactoryTest {
private static WorkUnitState state;
private static JsonObject testData;
private static JsonParser jsonParser = new JsonParser();
@BeforeClass
public static void setUp() {
WorkUnit workUnit = new WorkUnit(new SourceState(),
new Extract(new SourceState(), Extract.TableType.SNAPSHOT_ONLY, "namespace", "dummy_table"));
state = new WorkUnitState(workUnit);
Type listType = new TypeToken<JsonObject>() {
}.getType();
Gson gson = new Gson();
testData = gson.fromJson(new InputStreamReader(
JsonElementConversionFactoryTest.class.getResourceAsStream("/converter/JsonElementConversionFactoryTest.json")),
listType);
}
@Test
public void schemaWithArrayOfMaps()
throws Exception {
String testName = "schemaWithArrayOfMaps";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
JsonSchema jsonSchema = new JsonSchema(schema);
jsonSchema.setColumnName("dummy");
ArrayConverter converter = new ArrayConverter(jsonSchema, state, null);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithArrayOfRecords()
throws Exception {
String testName = "schemaWithArrayOfRecords";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
JsonSchema jsonSchema = new JsonSchema(schema);
jsonSchema.setColumnName("dummy1");
ArrayConverter converter = new ArrayConverter(jsonSchema, state, null);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecord()
throws DataConversionException, SchemaConversionException, UnsupportedDateTypeException {
String testName = "schemaWithRecord";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
JsonSchema jsonSchema = new JsonSchema(schema);
jsonSchema.setColumnName("dummy1");
RecordConverter converter =
new RecordConverter(jsonSchema, state, buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithArrayOfInts()
throws Exception {
String testName = "schemaWithArrayOfInts";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
ArrayConverter converter = new ArrayConverter(new JsonSchema(schema), state, null);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithNullType() {
NullConverter nullConverter = new NullConverter(JsonSchema.buildBaseSchema(NULL));
JsonObject expected = new JsonObject();
expected.addProperty("type", "null");
expected.addProperty("source.type", "null");
Assert.assertEquals(avroSchemaToJsonElement(nullConverter), expected);
}
@Test
public void schemaWithArrayOfEnums()
throws Exception {
String testName = "schemaWithArrayOfEnums";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
ArrayConverter converter = new ArrayConverter(new JsonSchema(schema), state, null);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithMap()
throws Exception {
String testName = "schemaWithMap";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
MapConverter converter = new MapConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithMapOfRecords()
throws Exception {
String testName = "schemaWithMapOfRecords";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
MapConverter converter = new MapConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithMapOfArrays()
throws Exception {
String testName = "schemaWithMapOfArrays";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
MapConverter converter = new MapConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithMapOfEnum()
throws Exception {
String testName = "schemaWithMapOfEnum";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
MapConverter converter = new MapConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecordOfMap()
throws Exception {
String testName = "schemaWithRecordOfMap";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecordOfArray()
throws Exception {
String testName = "schemaWithRecordOfArray";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecordOfEnum()
throws Exception {
String testName = "schemaWithRecordOfEnum";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test(expectedExceptions = IllegalStateException.class)
public void schemaWithMapValuesAsJsonArray()
throws Exception {
String testName = "schemaWithMapValuesAsJsonArray";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
new RecordConverter(new JsonSchema(schema), state, buildNamespace(state.getExtract().getNamespace(), "something"));
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void schemaWithMapValuesAsJsonNull()
throws Exception {
String testName = "schemaWithMapValuesAsJsonNull";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
new RecordConverter(new JsonSchema(schema), state, buildNamespace(state.getExtract().getNamespace(), "something"));
}
@Test
public void schemaWithRecordOfRecord()
throws Exception {
String testName = "schemaWithRecordOfRecord";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecordOfRecordCheckNamespace()
throws Exception {
String testName = "schemaWithRecordOfRecordCheckNamespace";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter =
new RecordConverter(new JsonSchema(schema), state, buildNamespace(state.getExtract().getNamespace(), "person"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
Assert.assertEquals(converter.schema().getField("someperson").schema().getNamespace(), "namespace.person.myrecord");
Assert.assertEquals(converter.schema().getNamespace(), "namespace.person");
}
@Test
public void schemaWithRecordOfEnumCheckNamespace()
throws Exception {
String testName = "schemaWithRecordOfEnumCheckNamespace";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonObject expected = getExpectedSchema(testName).getAsJsonObject();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
Assert.assertEquals(converter.schema().getField("someperson").schema().getNamespace(),
"namespace.something.myrecord");
Assert.assertEquals(converter.schema().getNamespace(), "namespace.something");
}
@Test
public void schemaWithUnion()
throws Exception {
String testName = "schemaWithUnion";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
UnionConverter converter = new UnionConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithComplexUnion()
throws Exception {
String testName = "schemaWithComplexUnion";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
UnionConverter converter = new UnionConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithIsNullable()
throws Exception {
String testName = "schemaWithIsNullable";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
StringConverter converter = new StringConverter(new JsonSchema(schema));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithRecordIsNullable()
throws Exception {
String testName = "schemaWithRecordIsNullable";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
RecordConverter converter = new RecordConverter(new JsonSchema(schema), state,
buildNamespace(state.getExtract().getNamespace(), "something"));
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithMapIsNullable()
throws Exception {
String testName = "schemaWithMapIsNullable";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
MapConverter converter = new MapConverter(new JsonSchema(schema), state);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithEnumIsNullable()
throws Exception {
String testName = "schemaWithEnumIsNullable";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
EnumConverter converter = new EnumConverter(new JsonSchema(schema), "something");
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
@Test
public void schemaWithArrayIsNullable()
throws Exception {
String testName = "schemaWithArrayIsNullable";
JsonObject schema = getSchemaData(testName).getAsJsonObject();
JsonArray expected = getExpectedSchema(testName).getAsJsonArray();
ArrayConverter converter = new ArrayConverter(new JsonSchema(schema), state, null);
Assert.assertEquals(avroSchemaToJsonElement(converter), expected);
}
private JsonElement avroSchemaToJsonElement(JsonElementConverter converter) {
return jsonParser.parse(converter.schema().toString());
}
private JsonElement getExpectedSchema(String methodName) {
return testData.get(methodName).getAsJsonArray().get(1);
}
private JsonElement getSchemaData(String methodName) {
return testData.get(methodName).getAsJsonArray().get(0);
}
} | 2,819 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/http/AvroToRestJsonEntryConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.http;
import java.io.File;
import java.io.IOException;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.FileUtils;
import org.json.JSONException;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.skyscreamer.jsonassert.JSONAssert;
import org.testng.Assert;
import org.testng.annotations.Test;
@Test(groups = {"gobblin.converter"})
public class AvroToRestJsonEntryConverterTest {
public void testConversionWithJsonTemplate() throws DataConversionException, IOException, JSONException {
JsonParser parser = new JsonParser();
String expectedResourceKey = "/sobject/user/John";
String expectedJsonStr = "{ \"name\" : \"John\", \"favoriteNumber\" : 9, \"city\" : \"Mountain view\" }";
RestEntry<JsonObject> expected = new RestEntry<JsonObject>(expectedResourceKey, parser.parse(expectedJsonStr).getAsJsonObject());
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(AvroToRestJsonEntryConverter.CONVERTER_AVRO_REST_ENTRY_RESOURCE_KEY, "/sobject/user/${name}");
String template = "name=${name},favoriteNumber=${favorite_number},city=${address.city}";
workUnitState.setProp(AvroToRestJsonEntryConverter.CONVERTER_AVRO_REST_JSON_ENTRY_TEMPLATE, template);
testConversion(expected, workUnitState);
}
public void testConversionWithJsonNestedTemplate() throws DataConversionException, IOException, JSONException {
JsonParser parser = new JsonParser();
String expectedResourceKey = "/sobject/user/John";
String expectedJsonStr = "{ \"name\" : \"John\", \"favoriteNumber\" : 9, \"address\" : { \"city\" : \"Mountain view\"} }";
RestEntry<JsonObject> expected = new RestEntry<JsonObject>(expectedResourceKey, parser.parse(expectedJsonStr).getAsJsonObject());
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(AvroToRestJsonEntryConverter.CONVERTER_AVRO_REST_ENTRY_RESOURCE_KEY, "/sobject/user/${name}");
String template = "name=${name},favoriteNumber=${favorite_number},address.city=${address.city}";
workUnitState.setProp(AvroToRestJsonEntryConverter.CONVERTER_AVRO_REST_JSON_ENTRY_TEMPLATE, template);
testConversion(expected, workUnitState);
}
public void testEqualConversion() throws DataConversionException, IOException, JSONException {
JsonParser parser = new JsonParser();
String expectedResourceKey = "/sobject/user/John";
String expectedJsonStr = "{ \"name\" : \"John\", \"favorite_number\" : 9, \"favorite_color\" : \"blue\", \"date_of_birth\" : 1462387756716, \"last_modified\" : 0, \"created\" : 1462387756716, \"address\" : {\"city\" : \"Mountain view\", \"street_number\" : 2029 } }";
RestEntry<JsonObject> expected = new RestEntry<JsonObject>(expectedResourceKey, parser.parse(expectedJsonStr).getAsJsonObject());
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(AvroToRestJsonEntryConverter.CONVERTER_AVRO_REST_ENTRY_RESOURCE_KEY, "/sobject/user/${name}");
testConversion(expected, workUnitState);
}
private void testConversion(RestEntry<JsonObject> expected, WorkUnitState actualWorkUnitState) throws DataConversionException, IOException, JSONException {
Schema schema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/nested.avsc"));
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
File tmp = File.createTempFile(this.getClass().getSimpleName(), null);
tmp.deleteOnExit();
try {
FileUtils.copyInputStreamToFile(getClass().getResourceAsStream("/converter/nested.avro"), tmp);
DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(tmp, datumReader);
GenericRecord avroRecord = dataFileReader.next();
AvroToRestJsonEntryConverter converter = new AvroToRestJsonEntryConverter();
RestEntry<JsonObject> actual = converter.convertRecord(null, avroRecord, actualWorkUnitState).iterator().next();
Assert.assertEquals(actual.getResourcePath(), expected.getResourcePath());
JSONAssert.assertEquals(expected.getRestEntryVal().toString(), actual.getRestEntryVal().toString(), false);
converter.close();
dataFileReader.close();
} finally {
if (tmp != null) {
tmp.delete();
}
}
}
} | 2,820 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/csv/CsvToJsonConverterV2Test.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.csv;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import java.io.IOException;
import java.io.InputStreamReader;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.opencsv.CSVParser;
@Test(groups = {"gobblin.converter"})
public class CsvToJsonConverterV2Test {
private String row11Cols = "20160924,desktop,Dynamic Segment,42935,0.0446255968324211,1590.4702457202748,348380,8.1141260044252945,232467,206.98603475430664,33028";
private String row10Cols = "20160924,desktop,42935,0.0446255968324211,1590.4702457202748,348380,8.1141260044252945,232467,206.98603475430664,33028";
public void convertOutput() throws IOException {
JsonParser parser = new JsonParser();
JsonElement jsonElement = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/schema_with_10_fields.json")));
JsonArray outputSchema = jsonElement.getAsJsonArray();
CSVParser csvParser = new CSVParser();
String[] inputRecord = csvParser.parseLine(row10Cols);
CsvToJsonConverterV2 converter = new CsvToJsonConverterV2();
converter.init(new WorkUnitState());
JsonObject actual = converter.createOutput(outputSchema, inputRecord);
JsonObject expected = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/10_fields.json")))
.getAsJsonObject();
Assert.assertEquals(expected, actual);
converter.close();
}
public void convertOutputSkippingField() throws IOException, DataConversionException {
JsonParser parser = new JsonParser();
JsonElement jsonElement = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/schema_with_10_fields.json")));
JsonArray outputSchema = jsonElement.getAsJsonArray();
CSVParser csvParser = new CSVParser();
String[] inputRecord = csvParser.parseLine(row11Cols);
CsvToJsonConverterV2 converter = new CsvToJsonConverterV2();
WorkUnitState wuState = new WorkUnitState();
wuState.setProp(CsvToJsonConverterV2.CUSTOM_ORDERING, "0,1,3,4,5,6,7,8,9,10");
converter.init(wuState);
JsonObject actual = converter.convertRecord(outputSchema, inputRecord, wuState).iterator().next();
JsonObject expected = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/10_fields.json")))
.getAsJsonObject();
Assert.assertEquals(expected, actual);
converter.close();
}
public void convertOutputMismatchFields() throws IOException {
JsonParser parser = new JsonParser();
JsonElement jsonElement = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/schema_with_10_fields.json")));
JsonArray outputSchema = jsonElement.getAsJsonArray();
CSVParser csvParser = new CSVParser();
String[] inputRecord = csvParser.parseLine(row11Cols);
CsvToJsonConverterV2 converter = new CsvToJsonConverterV2();
try {
converter.createOutput(outputSchema, inputRecord);
Assert.fail();
} catch (Exception e) {
}
converter.close();
}
public void convertOutputAddingNull() throws IOException, DataConversionException {
JsonParser parser = new JsonParser();
JsonElement jsonElement = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/schema_with_11_fields.json")));
JsonArray outputSchema = jsonElement.getAsJsonArray();
CSVParser csvParser = new CSVParser();
String[] inputRecord = csvParser.parseLine(row11Cols);
CsvToJsonConverterV2 converter = new CsvToJsonConverterV2();
WorkUnitState wuState = new WorkUnitState();
wuState.setProp(CsvToJsonConverterV2.CUSTOM_ORDERING, "0,1,-1,3,4,5,6,7,8,9,10");
converter.init(wuState);
JsonObject actual = converter.convertRecord(outputSchema, inputRecord, wuState).iterator().next();
JsonObject expected = parser.parse(new InputStreamReader(getClass().getResourceAsStream("/converter/csv/11_fields_with_null.json")))
.getAsJsonObject();
Assert.assertEquals(expected, actual);
converter.close();
}
}
| 2,821 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/string/StringFilterConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.string;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import java.util.Iterator;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link StringFilterConverter}.
*/
@Test(groups = { "gobblin.converter.string" })
public class StringFilterConverterTest {
/**
* Test for {@link StringFilterConverter#convertRecord(Class, String, WorkUnitState)} with a blank regex.
*/
@Test
public void testConvertRecordWithNoRegex() throws DataConversionException {
WorkUnitState workUnitState = new WorkUnitState();
StringFilterConverter converter = new StringFilterConverter();
converter.init(workUnitState);
String test = "HelloWorld";
Iterator<String> itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(!itr.hasNext());
}
/**
* Test for {@link StringFilterConverter#convertRecord(Class, String, WorkUnitState)} with a regex that is only a
* sequence of letters.
*/
@Test
public void testConvertRecordWithSimpleRegex() throws DataConversionException {
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_STRING_FILTER_PATTERN, "HelloWorld");
StringFilterConverter converter = new StringFilterConverter();
converter.init(workUnitState);
// Test that HelloWorld matches the pattern HelloWorld
String test = "HelloWorld";
Iterator<String> itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test);
Assert.assertTrue(!itr.hasNext());
// Test that Hello does not match the pattern HelloWorld
test = "Hello";
itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(!itr.hasNext());
}
/**
* Test for {@link StringFilterConverter#convertRecord(Class, String, WorkUnitState)} with a regex that actually uses
* regex features, such as wildcards.
*/
@Test
public void testConvertRecordWithComplexRegex() throws DataConversionException {
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_STRING_FILTER_PATTERN, ".*");
StringFilterConverter converter = new StringFilterConverter();
converter.init(workUnitState);
// Test that HelloWorld matches the pattern .*
String test = "HelloWorld";
Iterator<String> itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test);
Assert.assertTrue(!itr.hasNext());
// Test that Java matches the pattern .*
test = "Java";
itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test);
Assert.assertTrue(!itr.hasNext());
}
}
| 2,822 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/string/TextToStringConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.string;
import java.util.Iterator;
import org.apache.hadoop.io.Text;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
/**
* Unit tests for {@link TextToStringConverter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.converter.string" })
public class TextToStringConverterTest {
@Test
public void testConvertRecord() throws DataConversionException {
TextToStringConverter textToStringConverter =
(TextToStringConverter) new TextToStringConverter().init(new WorkUnitState());
Text text = new Text("test");
Iterator<String> iterator = textToStringConverter.convertRecord(null, text, new WorkUnitState()).iterator();
Assert.assertTrue(iterator.hasNext());
String textString = iterator.next();
Assert.assertEquals(textString, text.toString());
Assert.assertFalse(iterator.hasNext());
}
}
| 2,823 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/string/StringSplitterConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.string;
import java.util.Iterator;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link StringSplitterConverter}.
*/
@Test(groups = { "gobblin.converter.string" })
public class StringSplitterConverterTest {
/**
* Test that {@link StringSplitterConverter#init(WorkUnitState)} throws an {@link IllegalArgumentException} if the
* parameter {@link ConfigurationKeys#CONVERTER_STRING_SPLITTER_DELIMITER} is not specified in the config.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testInit() {
WorkUnitState workUnitState = new WorkUnitState();
StringSplitterConverter converter = new StringSplitterConverter();
converter.init(workUnitState);
}
/**
* Test that {@link StringSplitterConverter#convertRecord(Class, String, WorkUnitState)} properly splits a String by
* a specified delimiter.
*/
@Test
public void testConvertRecord() throws DataConversionException {
String delimiter = "\t";
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_STRING_SPLITTER_DELIMITER, delimiter);
StringSplitterConverter converter = new StringSplitterConverter();
converter.init(workUnitState);
// Test that the iterator returned by convertRecord is of length 1 when the delimiter is not in the inputRecord
String test = "HelloWorld";
Iterator<String> itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test);
Assert.assertTrue(!itr.hasNext());
// Test that the iterator returned by convertRecord is of length 2 when the delimiter is in the middle of two strings
String test1 = "Hello";
String test2 = "World";
test = test1 + delimiter + test2;
itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test1);
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test2);
Assert.assertTrue(!itr.hasNext());
// Test that the iterator returned by convertRecord is of length 2 even when the delimiter occurs multiple times in
// between the same two strings, and if the delimiter occurs at the end and beginning of the inputRecord
test1 = "Hello";
test2 = "World";
test = delimiter + test1 + delimiter + delimiter + test2 + delimiter;
itr = converter.convertRecord(String.class, test, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test1);
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), test2);
Assert.assertTrue(!itr.hasNext());
}
}
| 2,824 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/string/StringSplitterToListConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.string;
import java.util.Iterator;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
/**
* Tests for {@link StringSplitterToListConverter}.
*/
@Test(groups = {"gobblin.converter.string"})
public class StringSplitterToListConverterTest {
@Test
public void testConvertRecord()
throws DataConversionException {
StringSplitterToListConverter converter = new StringSplitterToListConverter();
String delimiter1 = "sep";
WorkUnitState workUnitState1 = new WorkUnitState();
workUnitState1.setProp(ConfigurationKeys.CONVERTER_STRING_SPLITTER_DELIMITER, delimiter1);
workUnitState1.setProp(ConfigurationKeys.CONVERTER_STRING_SPLITTER_SHOULD_TRIM_RESULTS, true);
converter.init(workUnitState1);
String inputRecord1 = "1sep2sepsep";
Iterator<List<String>> recordIterator = converter.convertRecord("", inputRecord1, workUnitState1).iterator();
Assert.assertTrue(recordIterator.hasNext());
List<String> record1 = recordIterator.next();
Assert.assertEquals(record1, Lists.newArrayList("1", "2"));
Assert.assertFalse(recordIterator.hasNext());
workUnitState1.setProp(ConfigurationKeys.CONVERTER_STRING_SPLITTER_SHOULD_TRIM_RESULTS, false);
converter.init(workUnitState1);
recordIterator = converter.convertRecord("", inputRecord1, workUnitState1).iterator();
Assert.assertTrue(recordIterator.hasNext());
record1 = recordIterator.next();
Assert.assertEquals(record1, Lists.newArrayList("1", "2", "", ""));
Assert.assertFalse(recordIterator.hasNext());
String delimiter2 = "\n\t";
String inputRecord2 = "1" + delimiter2 + "2" + delimiter2 + " ";
WorkUnitState workUnitState2 = new WorkUnitState();
workUnitState2.setProp(ConfigurationKeys.CONVERTER_STRING_SPLITTER_DELIMITER, delimiter2);
converter.init(workUnitState2);
recordIterator = converter.convertRecord("", inputRecord2, workUnitState2).iterator();
Assert.assertTrue(recordIterator.hasNext());
List<String> record2 = recordIterator.next();
Assert.assertEquals(record2, Lists.newArrayList("1", "2", " "));
Assert.assertFalse(recordIterator.hasNext());
}
}
| 2,825 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/string/ObjectToStringConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.string;
import java.util.Iterator;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link ObjectToStringConverter}.
*/
@Test(groups = { "gobblin.converter.string" })
public class ObjectToStringConverterTest {
/**
* Test for {@link ObjectToStringConverter#convertSchema(Object, WorkUnitState)}. Checks that the convertSchema method
* always returns {@link String}.class
*/
@Test
public void testConvertSchema() throws SchemaConversionException {
WorkUnitState workUnitState = new WorkUnitState();
ObjectToStringConverter converter = new ObjectToStringConverter();
converter.init(workUnitState);
Assert.assertEquals(converter.convertSchema(Object.class, workUnitState), String.class);
}
/**
* Test for {@link ObjectToStringConverter#convertRecord(Class, Object, WorkUnitState)}. Checks that the convertRecord
* method properly converts an {@link Object} to its String equivalent.
*/
@Test
public void testConvertRecord() throws DataConversionException {
WorkUnitState workUnitState = new WorkUnitState();
ObjectToStringConverter converter = new ObjectToStringConverter();
converter.init(workUnitState);
// Test that an Integer can properly be converted to a String
Integer integerValue = new Integer(1);
Iterator<String> itr = converter.convertRecord(String.class, integerValue, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), "1");
Assert.assertTrue(!itr.hasNext());
// Test that a Long can properly be converted to a String
Long longValue = new Long(2);
itr = converter.convertRecord(String.class, longValue, workUnitState).iterator();
Assert.assertTrue(itr.hasNext());
Assert.assertEquals(itr.next(), "2");
Assert.assertTrue(!itr.hasNext());
}
}
| 2,826 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/converter/filter/AvroFieldsPickConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.filter;
import java.io.File;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.skyscreamer.jsonassert.JSONAssert;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.SchemaConversionException;
@Test(groups = { "gobblin.converter.filter" })
public class AvroFieldsPickConverterTest {
@Test
public void testFieldsPick() throws Exception {
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/fieldPickInput.avsc"));
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_AVRO_FIELD_PICK_FIELDS, "name,favorite_number,favorite_color");
try (AvroFieldsPickConverter converter = new AvroFieldsPickConverter()) {
Schema converted = converter.convertSchema(inputSchema, workUnitState);
Schema expected = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/fieldPickExpected.avsc"));
JSONAssert.assertEquals(expected.toString(), converted.toString(), false);
}
}
@Test (expectedExceptions=SchemaConversionException.class)
public void testFieldsPickWrongFieldFailure() throws Exception {
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/fieldPickInput.avsc"));
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_AVRO_FIELD_PICK_FIELDS, "name,favorite_number,favorite_food");
try (AvroFieldsPickConverter converter = new AvroFieldsPickConverter()) {
Schema converted = converter.convertSchema(inputSchema, workUnitState);
Schema expected = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/fieldPickExpected.avsc"));
JSONAssert.assertEquals(expected.toString(), converted.toString(), false);
}
}
@Test
public void testFieldsPickWithNestedRecord() throws Exception {
Schema inputSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/pickfields_nested_with_union.avsc"));
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.CONVERTER_AVRO_FIELD_PICK_FIELDS, "name,favorite_number,nested1.nested1_string,nested1.nested2_union.nested2_string");
try (AvroFieldsPickConverter converter = new AvroFieldsPickConverter()) {
Schema convertedSchema = converter.convertSchema(inputSchema, workUnitState);
Schema expectedSchema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/converted_pickfields_nested_with_union.avsc"));
JSONAssert.assertEquals(expectedSchema.toString(), convertedSchema.toString(), false);
try (DataFileReader<GenericRecord> srcDataFileReader = new DataFileReader<GenericRecord>(
new File(getClass().getResource("/converter/pickfields_nested_with_union.avro").toURI()),
new GenericDatumReader<GenericRecord>(inputSchema));
DataFileReader<GenericRecord> expectedDataFileReader = new DataFileReader<GenericRecord>(
new File(getClass().getResource("/converter/converted_pickfields_nested_with_union.avro").toURI()),
new GenericDatumReader<GenericRecord>(expectedSchema));) {
while (expectedDataFileReader.hasNext()) {
GenericRecord expected = expectedDataFileReader.next();
GenericRecord actual = converter.convertRecord(convertedSchema, srcDataFileReader.next(), workUnitState).iterator().next();
Assert.assertEquals(actual.toString(), expected.toString());
}
Assert.assertTrue(!srcDataFileReader.hasNext());
}
}
}
} | 2,827 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/security | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/security/ssl/SSLContextFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.security.ssl;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
@Test
public class SSLContextFactoryTest {
public void testCreateSSLContext()
throws IOException {
Map<String, String> values = new HashMap<>();
boolean hasException = false;
// KEY_STORE_FILE_PATH is required
try {
SSLContextFactory.createInstance(ConfigFactory.parseMap(values));
} catch (ConfigException e) {
hasException = true;
}
Assert.assertTrue(hasException);
hasException = false;
// TRUST_STORE_FILE_PATH is required
try {
SSLContextFactory.createInstance(ConfigFactory.parseMap(values));
} catch (ConfigException e) {
hasException = true;
}
Assert.assertTrue(hasException);
values.put(SSLContextFactory.KEY_STORE_FILE_PATH, "identity.p12");
values.put(SSLContextFactory.TRUST_STORE_FILE_PATH, "certs");
values.put(SSLContextFactory.KEY_STORE_PASSWORD, "keyStorePassword");
values.put(SSLContextFactory.TRUST_STORE_PASSWORD, "trustStorePassword");
values.put(SSLContextFactory.KEY_STORE_TYPE, "XX");
hasException = false;
// KEY_STORE_TYPE not legal
try {
SSLContextFactory.createInstance(ConfigFactory.parseMap(values));
} catch (IllegalArgumentException e) {
hasException = true;
}
Assert.assertTrue(hasException);
values.put(SSLContextFactory.KEY_STORE_TYPE, "PKCS12");
try {
SSLContextFactory.createInstance(ConfigFactory.parseMap(values));
} catch (ConfigException | IllegalArgumentException e) {
Assert.fail();
} catch (Exception e) {
// OK
}
}
}
| 2,828 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/publisher/BaseDataPublisherTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.publisher;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Files;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import com.typesafe.config.ConfigFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Type;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.TaskScopeInstance;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.dataset.DatasetDescriptor;
import org.apache.gobblin.dataset.Descriptor;
import org.apache.gobblin.dataset.PartitionDescriptor;
import org.apache.gobblin.metadata.MetadataMerger;
import org.apache.gobblin.metadata.types.GlobalMetadata;
import org.apache.gobblin.metrics.event.lineage.LineageEventBuilder;
import org.apache.gobblin.metrics.event.lineage.LineageInfo;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.ForkOperatorUtils;
import org.apache.gobblin.util.io.GsonInterfaceAdapter;
import org.apache.gobblin.writer.FsDataWriter;
import org.apache.gobblin.writer.FsWriterMetrics;
import org.apache.gobblin.writer.PartitionIdentifier;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for BaseDataPublisher
*/
public class BaseDataPublisherTest {
private static final Type PARTITION_LIST_TYPE = new TypeToken<ArrayList<PartitionDescriptor>>(){}.getType();
private static final Gson GSON =
new GsonBuilder().registerTypeAdapterFactory(new GsonInterfaceAdapter(Descriptor.class)).create();
/**
* Test DATA_PUBLISHER_METADATA_STR: a user should be able to put an arbitrary metadata string in job configuration
* and have that written out.
*/
@Test
public void testMetadataStrOneBranch()
throws IOException {
State s = buildDefaultState(1);
WorkUnitState wuState = new WorkUnitState();
wuState.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_STR, "foobar");
addStateToWorkunit(s, wuState);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(wuState);
try (InputStream mdStream = new FileInputStream(openMetadataFile(s, 1, 0))) {
String mdBytes = IOUtils.toString(mdStream, StandardCharsets.UTF_8);
Assert.assertEquals(mdBytes, "foobar", "Expected to read back metadata from string");
}
}
/**
* Test that DATA_PUBLISHER_METADATA_STR functionality works across multiple branches.
*/
@Test
public void testMetadataStrMultipleWorkUnitsAndBranches()
throws IOException {
final int numBranches = 3;
State s = buildDefaultState(numBranches);
List<WorkUnitState> workUnits = new ArrayList<>();
for (int i = 0; i < numBranches; i++) {
WorkUnitState wuState = new WorkUnitState();
wuState.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_STR, "foobar");
addStateToWorkunit(s, wuState);
workUnits.add(wuState);
}
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(workUnits);
for (int branch = 0; branch < numBranches; branch++) {
try (InputStream mdStream = new FileInputStream(openMetadataFile(s, numBranches, branch))) {
String mdBytes = IOUtils.toString(mdStream, StandardCharsets.UTF_8);
Assert.assertEquals(mdBytes, "foobar", "Expected to read back metadata from string");
}
}
}
/**
* Test that an exception is properly thrown if we configure a merger that doesn't actually implement
* MetadataMerger
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testBogusMetadataMerger()
throws IOException {
State s = buildDefaultState(1);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, "true");
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_MERGER_NAME_KEY, "java.lang.String");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_STR, "foobar");
WorkUnitState wuState = new WorkUnitState();
addStateToWorkunit(s, wuState);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(Collections.singletonList(wuState));
}
/**
* This test is testing several things at once:
* 1. That a merger is called properly for all workunits in a brach
* 2. That different mergers can be instantiated per branch
*/
@Test
public void testMergedMetadata()
throws IOException {
final int numBranches = 2;
final int numWorkUnits = 10;
State s = buildDefaultState(numBranches);
for (int i = 0; i < numBranches; i++) {
String mdKeyName = ForkOperatorUtils
.getPropertyNameForBranch(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, numBranches, i);
String mdMergerKeyName = ForkOperatorUtils
.getPropertyNameForBranch(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_MERGER_NAME_KEY, numBranches, i);
s.setProp(mdKeyName, "true");
s.setProp(mdMergerKeyName,
(i % 2) == 0 ? TestAdditionMerger.class.getName() : TestMultiplicationMerger.class.getName());
}
// For each branch, metadata is (branchId+1*workUnitNumber+1) - adding 1 so we don't ever multiply by 0
List<WorkUnitState> workUnits = new ArrayList<>();
for (int workUnitId = 0; workUnitId < numWorkUnits; workUnitId++) {
WorkUnitState wuState = new WorkUnitState();
addStateToWorkunit(s, wuState);
for (int branchId = 0; branchId < numBranches; branchId++) {
String mdForBranchName =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_METADATA_KEY, numBranches, branchId);
wuState.setProp(mdForBranchName, String.valueOf((branchId + 1) * (workUnitId + 1)));
}
workUnits.add(wuState);
}
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(workUnits);
for (int branch = 0; branch < numBranches; branch++) {
int expectedSum = (branch % 2 == 0) ? 0 : 1;
for (int i = 0; i < numWorkUnits; i++) {
if (branch % 2 == 0) {
expectedSum += (branch + 1) * (i + 1);
} else {
expectedSum *= (branch + 1) * (i + 1);
}
}
try (InputStream mdStream = new FileInputStream(openMetadataFile(s, numBranches, branch))) {
String mdBytes = IOUtils.toString(mdStream, StandardCharsets.UTF_8);
Assert.assertEquals(mdBytes, String.valueOf(expectedSum), "Expected to read back correctly merged metadata from string");
}
}
}
@Test
public void testNoOutputWhenDisabled()
throws IOException {
State s = buildDefaultState(1);
WorkUnitState wuState = new WorkUnitState();
addStateToWorkunit(s, wuState);
wuState.setProp(ConfigurationKeys.WRITER_METADATA_KEY, "abcdefg");
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(Collections.singletonList(wuState));
File mdFile = openMetadataFile(s, 1, 0);
Assert.assertFalse(mdFile.exists(), "Internal metadata from writer should not be written out if no merger is set in config");
}
@Test
public void testNoOutputWhenDisabledWithPartitions()
throws IOException {
File publishPath = Files.createTempDir();
State s = buildDefaultState(1);
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishPath.getAbsolutePath());
WorkUnitState wuState = new WorkUnitState();
addStateToWorkunit(s, wuState);
wuState.setProp(ConfigurationKeys.WRITER_METADATA_KEY, "abcdefg");
FsWriterMetrics metrics1 = buildWriterMetrics("foo1.json", "1-2-3-4", 0, 10);
FsWriterMetrics metrics2 = buildWriterMetrics("foo1.json", "5-6-7-8",10, 20);
wuState.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "1-2-3-4");
wuState.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics1.toJson());
wuState.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + "_0", "1-2-3-4");
wuState.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + " _0", metrics2.toJson());
wuState.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + "_1", "5-6-7-8");
wuState.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + " _1", metrics2.toJson());
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(Collections.singletonList(wuState));
String[] filesInPublishDir = publishPath.list();
Assert.assertEquals(0, filesInPublishDir.length, "Expected 0 files to be output to publish path");
}
@Test
public void testMergesExistingMetadata() throws IOException {
File publishPath = Files.createTempDir();
try {
// Copy the metadata file from resources into the publish path
InputStream mdStream = this.getClass().getClassLoader().getResourceAsStream("publisher/sample_metadata.json");
try (FileOutputStream fOs = new FileOutputStream(new File(publishPath, "metadata.json"))) {
IOUtils.copy(mdStream, fOs);
}
State s = buildDefaultState(1);
String md = new GlobalMetadata().toJson();
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, "true");
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishPath.getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR, "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, "metadata.json");
WorkUnitState wuState1 = new WorkUnitState();
FsWriterMetrics metrics1 = buildWriterMetrics("newfile.json", null, 0, 90);
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics1.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
addStateToWorkunit(s, wuState1);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(ImmutableList.of(wuState1));
checkMetadata(new File(publishPath.getAbsolutePath(), "metadata.json"), 4,185,
new FsWriterMetrics.FileInfo("foo3.json", 30),
new FsWriterMetrics.FileInfo("foo1.json", 10),
new FsWriterMetrics.FileInfo("foo4.json", 55),
new FsWriterMetrics.FileInfo("newfile.json", 90));
} finally {
FileUtils.deleteDirectory(publishPath);
}
}
@Test
public void testWithFsMetricsNoPartitions() throws IOException {
File publishPath = Files.createTempDir();
try {
State s = buildDefaultState(1);
String md = new GlobalMetadata().toJson();
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, "true");
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishPath.getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR, "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, "metadata.json");
WorkUnitState wuState1 = new WorkUnitState();
FsWriterMetrics metrics1 = buildWriterMetrics("foo1.json", null, 0, 10);
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics1.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
addStateToWorkunit(s, wuState1);
WorkUnitState wuState2 = new WorkUnitState();
FsWriterMetrics metrics3 = buildWriterMetrics("foo3.json", null, 1, 30);
wuState2.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
wuState2.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics3.toJson());
addStateToWorkunit(s, wuState2);
WorkUnitState wuState3 = new WorkUnitState();
FsWriterMetrics metrics4 = buildWriterMetrics("foo4.json", null, 2, 55);
wuState3.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
wuState3.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics4.toJson());
addStateToWorkunit(s, wuState3);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(ImmutableList.of(wuState1, wuState2, wuState3));
checkMetadata(new File(publishPath.getAbsolutePath(), "metadata.json"), 3, 95,
new FsWriterMetrics.FileInfo("foo3.json", 30),
new FsWriterMetrics.FileInfo("foo1.json", 10),
new FsWriterMetrics.FileInfo("foo4.json", 55));
} finally {
FileUtils.deleteDirectory(publishPath);
}
}
@Test
public void testWithFsMetricsAndPartitions() throws IOException {
File publishPath = Files.createTempDir();
try {
File part1 = new File(publishPath, "1-2-3-4");
part1.mkdir();
File part2 = new File(publishPath, "5-6-7-8");
part2.mkdir();
State s = buildDefaultState(1);
String md = new GlobalMetadata().toJson();
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, "true");
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishPath.getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR, "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, "metadata.json");
WorkUnitState wuState1 = new WorkUnitState();
FsWriterMetrics metrics1 = buildWriterMetrics("foo1.json", "1-2-3-4", 0, 10);
FsWriterMetrics metrics2 = buildWriterMetrics("foo1.json", "5-6-7-8",10, 20);
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "1-2-3-4");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics1.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + "_0", "1-2-3-4");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + " _0", metrics2.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + "_1", "5-6-7-8");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + " _1", metrics2.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
addStateToWorkunit(s, wuState1);
WorkUnitState wuState2 = new WorkUnitState();
FsWriterMetrics metrics3 = buildWriterMetrics("foo3.json", "1-2-3-4", 1, 30);
wuState2.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "1-2-3-4");
wuState2.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
wuState2.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics3.toJson());
addStateToWorkunit(s, wuState2);
WorkUnitState wuState3 = new WorkUnitState();
FsWriterMetrics metrics4 = buildWriterMetrics("foo4.json", "5-6-7-8", 2, 55);
wuState3.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "5-6-7-8");
wuState3.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
wuState3.setProp(FsDataWriter.FS_WRITER_METRICS_KEY, metrics4.toJson());
addStateToWorkunit(s, wuState3);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(ImmutableList.of(wuState1, wuState2, wuState3));
checkMetadata(new File(part1, "metadata.json"), 2, 40,
new FsWriterMetrics.FileInfo("foo3.json", 30),
new FsWriterMetrics.FileInfo("foo1.json", 10));
checkMetadata(new File(part2, "metadata.json"), 2, 75,
new FsWriterMetrics.FileInfo("foo1.json", 20),
new FsWriterMetrics.FileInfo("foo4.json", 55));
} finally {
FileUtils.deleteDirectory(publishPath);
}
}
@Test
public void testWithFsMetricsBranchesAndPartitions() throws IOException {
File publishPaths[] = new File[] {
Files.createTempDir(), // branch 0
Files.createTempDir(), // branch 1
};
try {
List<File[]> branchPaths = Arrays.stream(publishPaths).map(branchPath -> new File[] {
new File(branchPath, "1-2-3-4"),
new File(branchPath, "5-6-7-8")
}).collect(Collectors.toList());
branchPaths.forEach(partitionPaths -> Arrays.stream(partitionPaths).forEach(File::mkdir));
State s = buildDefaultState(2);
String md = new GlobalMetadata().toJson();
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY + ".0", "true");
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY + ".1", "true");
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY + ".0", md);
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY + ".1", md);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".0", publishPaths[0].getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".1", publishPaths[1].getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR, "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR + ".0", "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR + ".1", "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, "metadata.json");
WorkUnitState wuState1 = new WorkUnitState();
FsWriterMetrics metrics1 = buildWriterMetrics("foo1.json", "1-2-3-4", 0, 10);
FsWriterMetrics metrics2 = buildWriterMetrics("foo1.json", "5-6-7-8",10, 20);
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + ".0", "1-2-3-4");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + ".0", metrics1.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + ".0_0", "1-2-3-4");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + ".0_0", metrics2.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + ".0" + "_1", "5-6-7-8");
wuState1.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + ".0_1", metrics2.toJson());
wuState1.setProp(ConfigurationKeys.WRITER_METADATA_KEY + ".0", md);
addStateToWorkunit(s, wuState1);
WorkUnitState wuState2 = new WorkUnitState();
FsWriterMetrics metrics3 = buildWriterMetrics("foo3.json", "1-2-3-4", 1, 1, 30);
wuState2.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + ".1", "1-2-3-4");
wuState2.setProp(ConfigurationKeys.WRITER_METADATA_KEY + ".1", md);
wuState2.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + ".1", metrics3.toJson());
addStateToWorkunit(s, wuState2);
WorkUnitState wuState3 = new WorkUnitState();
FsWriterMetrics metrics4 = buildWriterMetrics("foo4.json", "5-6-7-8", 2, 55);
wuState3.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY + ".0", "5-6-7-8");
wuState3.setProp(ConfigurationKeys.WRITER_METADATA_KEY + ".0", md);
wuState3.setProp(FsDataWriter.FS_WRITER_METRICS_KEY + ".0", metrics4.toJson());
addStateToWorkunit(s, wuState3);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(ImmutableList.of(wuState1, wuState2, wuState3));
checkMetadata(new File(branchPaths.get(0)[0], "metadata.json.0"), 1, 10,
new FsWriterMetrics.FileInfo("foo1.json", 10));
checkMetadata(new File(branchPaths.get(0)[1], "metadata.json.0"), 2, 75,
new FsWriterMetrics.FileInfo("foo1.json", 20),
new FsWriterMetrics.FileInfo("foo4.json", 55));
checkMetadata(new File(branchPaths.get(1)[0], "metadata.json.1"), 1, 30,
new FsWriterMetrics.FileInfo("foo3.json", 30));
} finally {
Arrays.stream(publishPaths).forEach(dir -> {
try {
FileUtils.deleteDirectory(dir);
} catch (IOException e) {
throw new RuntimeException("IOError");
}
});
}
}
private void checkMetadata(File file, int expectedNumFiles, int expectedNumRecords,
FsWriterMetrics.FileInfo... expectedFileInfo)
throws IOException {
Assert.assertTrue(file.exists(), "Expected file " + file.getAbsolutePath() + " to exist");
String contents = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8);
GlobalMetadata metadata = GlobalMetadata.fromJson(contents);
Assert.assertEquals(metadata.getNumFiles(), expectedNumFiles, "# of files do not match");
Assert.assertEquals(metadata.getNumRecords(), expectedNumRecords, "# of records do not match");
for (FsWriterMetrics.FileInfo fileInfo : expectedFileInfo) {
long recordsInMetadata =
((Number) metadata.getFileMetadata(fileInfo.getFileName(), GlobalMetadata.NUM_RECORDS_KEY)).longValue();
Assert.assertEquals(recordsInMetadata, fileInfo.getNumRecords(),
"# of records in file-level metadata do not match");
}
}
private FsWriterMetrics buildWriterMetrics(String fileName, String partitionKey, int writerId, int numRecords) {
return buildWriterMetrics(fileName, partitionKey, writerId, 0, numRecords);
}
private FsWriterMetrics buildWriterMetrics(String fileName, String partitionKey, int writerId, int branchId, int numRecords) {
return new FsWriterMetrics(
String.format("writer%d", writerId),
new PartitionIdentifier(partitionKey, branchId),
ImmutableList.of(new FsWriterMetrics.FileInfo(fileName, numRecords))
);
}
@Test
public void testWithPartitionKey() throws IOException {
File publishPath = Files.createTempDir();
try {
File part1 = new File(publishPath, "1-2-3-4");
part1.mkdir();
File part2 = new File(publishPath, "5-6-7-8");
part2.mkdir();
State s = buildDefaultState(1);
String md = new GlobalMetadata().toJson();
s.removeProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
s.setProp(ConfigurationKeys.DATA_PUBLISH_WRITER_METADATA_KEY, "true");
s.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
s.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishPath.getAbsolutePath());
s.setProp(ConfigurationKeys.DATA_PUBLISHER_APPEND_EXTRACT_TO_FINAL_DIR, "false");
s.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, "metadata.json");
WorkUnitState wuState1 = new WorkUnitState();
wuState1.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "1-2-3-4");
wuState1.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
addStateToWorkunit(s, wuState1);
WorkUnitState wuState2 = new WorkUnitState();
wuState2.setProp(ConfigurationKeys.WRITER_PARTITION_PATH_KEY, "5-6-7-8");
wuState2.setProp(ConfigurationKeys.WRITER_METADATA_KEY, md);
addStateToWorkunit(s, wuState2);
BaseDataPublisher publisher = new BaseDataPublisher(s);
publisher.publishMetadata(ImmutableList.of(wuState1, wuState2));
Assert.assertTrue(new File(part1, "metadata.json").exists());
Assert.assertTrue(new File(part2, "metadata.json").exists());
} finally {
FileUtils.deleteDirectory(publishPath);
}
}
/**
* Test lineage info is set on publishing single task
*/
@Test
public void testPublishSingleTask()
throws IOException {
WorkUnitState state = buildTaskState(1);
LineageInfo lineageInfo = LineageInfo.getLineageInfo(state.getTaskBroker()).get();
DatasetDescriptor source = new DatasetDescriptor("kafka", "testTopic");
lineageInfo.setSource(source, state);
BaseDataPublisher publisher = new BaseDataPublisher(state);
publisher.publishData(state);
Assert.assertTrue(state.contains("gobblin.event.lineage.branch.0.destination"));
Assert.assertFalse(state.contains("gobblin.event.lineage.branch.1.destination"));
}
/**
* Test lineage info is set on publishing multiple tasks
*/
@Test
public void testPublishMultiTasks()
throws IOException {
WorkUnitState state1 = buildTaskState(2);
WorkUnitState state2 = buildTaskState(2);
LineageInfo lineageInfo = LineageInfo.getLineageInfo(state1.getTaskBroker()).get();
DatasetDescriptor source = new DatasetDescriptor("kafka", "testTopic");
lineageInfo.setSource(source, state1);
lineageInfo.setSource(source, state2);
BaseDataPublisher publisher = new BaseDataPublisher(state1);
publisher.publishData(ImmutableList.of(state1, state2));
Assert.assertTrue(state1.contains("gobblin.event.lineage.branch.0.destination"));
Assert.assertTrue(state1.contains("gobblin.event.lineage.branch.1.destination"));
Assert.assertTrue(state2.contains("gobblin.event.lineage.branch.0.destination"));
Assert.assertTrue(state2.contains("gobblin.event.lineage.branch.1.destination"));
}
/**
* Test partition level lineages are set
*/
@Test
public void testPublishedPartitionsLineage()
throws IOException {
int numBranches = 2;
int numPartitionsPerBranch = 2;
WorkUnitState state = buildTaskState(numBranches);
LineageInfo lineageInfo = LineageInfo.getLineageInfo(state.getTaskBroker()).get();
DatasetDescriptor source = new DatasetDescriptor("kafka", "testTopic");
lineageInfo.setSource(source, state);
BaseDataPublisher publisher = new BaseDataPublisher(state);
// Set up writer partition descriptors
DatasetDescriptor datasetAtWriter = new DatasetDescriptor("dummy", "dummy");
for (int i = 0; i < numBranches; i++) {
List<PartitionDescriptor> partitions = new ArrayList<>();
for (int j = 0; j < numPartitionsPerBranch; j++) {
// Dummy dataset descriptor will be discarded by publisher
partitions.add(new PartitionDescriptor("partition" + i + j, datasetAtWriter));
}
String partitionsKey = "writer." + i + ".partitions";
state.setProp(partitionsKey, GSON.toJson(partitions, PARTITION_LIST_TYPE));
}
publisher.publish(ImmutableList.of(state));
Assert.assertTrue(state.contains("gobblin.event.lineage.branch.0.destination"));
Assert.assertTrue(state.contains("gobblin.event.lineage.branch.1.destination"));
Collection<LineageEventBuilder> events = LineageInfo.load(ImmutableList.of(state));
Assert.assertTrue(events.size() == 4);
// Find the partition lineage and assert
for (int i = 0; i < numBranches; i++) {
String outputPath = String.format("/data/output/branch%d/namespace/table", i);
DatasetDescriptor destinationDataset = new DatasetDescriptor("file", URI.create("file:///"), outputPath);
destinationDataset.addMetadata("fsUri", "file:///");
destinationDataset.addMetadata("branch", "" + i);
for (int j = 0; j < numPartitionsPerBranch; j++) {
LineageEventBuilder event = find(events, "partition" + i + j);
Assert.assertTrue(null != event);
Assert.assertEquals(event.getSource(), source);
Assert.assertEquals(event.getDestination(),
// Dataset written by the writer is discarded
new PartitionDescriptor("partition" + i + j, destinationDataset));
}
}
}
private static LineageEventBuilder find(Collection<LineageEventBuilder> events, String partitionName) {
for (LineageEventBuilder event : events) {
if (event.getDestination().getName().equals(partitionName)) {
return event;
}
}
return null;
}
public static class TestAdditionMerger implements MetadataMerger<String> {
private int sum = 0;
@Override
public void update(String metadata) {
sum += Integer.valueOf(metadata);
}
@Override
public void update(FsWriterMetrics metrics) {
}
@Override
public String getMergedMetadata() {
return String.valueOf(sum);
}
}
public static class TestMultiplicationMerger implements MetadataMerger<String> {
private int product = 1;
public TestMultiplicationMerger(Properties config) {
// testing ctor call
}
@Override
public void update(String metadata) {
product *= Integer.valueOf(metadata);
}
@Override
public String getMergedMetadata() {
return String.valueOf(product);
}
@Override
public void update(FsWriterMetrics metrics) {
}
}
private void addStateToWorkunit(State s, WorkUnitState wuState) {
for (Map.Entry<Object, Object> prop : s.getProperties().entrySet()) {
wuState.setProp((String) prop.getKey(), prop.getValue());
}
}
private File openMetadataFile(State state, int numBranches, int branchId) {
String dir = state.getProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR);
String fileName = state.getProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE);
if (numBranches > 1) {
fileName += "." + String.valueOf(branchId);
}
return new File(dir, fileName);
}
private State buildDefaultState(int numBranches)
throws IOException {
State state = new State();
state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, numBranches);
File tmpLocation = File.createTempFile("metadata", "");
tmpLocation.delete();
state.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_DIR, tmpLocation.getParent());
state.setProp(ConfigurationKeys.DATA_PUBLISHER_METADATA_OUTPUT_FILE, tmpLocation.getName());
return state;
}
private WorkUnitState buildTaskState(int numBranches) {
SharedResourcesBroker<GobblinScopeTypes> instanceBroker = SharedResourcesBrokerFactory
.createDefaultTopLevelBroker(ConfigFactory.empty(), GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBroker<GobblinScopeTypes> jobBroker = instanceBroker
.newSubscopedBuilder(new JobScopeInstance("LineageEventTest", String.valueOf(System.currentTimeMillis())))
.build();
SharedResourcesBroker<GobblinScopeTypes> taskBroker = jobBroker
.newSubscopedBuilder(new TaskScopeInstance("LineageEventTestTask" + String.valueOf(System.currentTimeMillis())))
.build();
WorkUnitState state = new WorkUnitState(WorkUnit.createEmpty(), new State(), taskBroker);
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "namespace");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "table");
state.setProp(ConfigurationKeys.WRITER_FILE_PATH_TYPE, "namespace_table");
state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, numBranches);
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, "/data/output");
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, "/data/working");
if (numBranches > 1) {
for (int i = 0; i < numBranches; i++) {
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + "." + i, "/data/output" + "/branch" + i);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR + "." + i, "/data/working" + "/branch" + i);
}
}
return state;
}
}
| 2,829 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/publisher/DataPublisherFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.publisher;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SimpleScope;
import org.apache.gobblin.broker.SimpleScopeType;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.capability.Capability;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import lombok.Getter;
/**
* Tests for DataPublisherFactory
*/
public class DataPublisherFactoryTest {
@Test
public void testGetNonThreadSafePublisher()
throws IOException {
SharedResourcesBroker broker =
SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(ConfigFactory.empty(),
SimpleScopeType.GLOBAL.defaultScopeInstance());
DataPublisher publisher1 = DataPublisherFactory.get(TestNonThreadsafeDataPublisher.class.getName(), null, broker);
DataPublisher publisher2 = DataPublisherFactory.get(TestNonThreadsafeDataPublisher.class.getName(), null, broker);
// should get different publishers
Assert.assertNotEquals(publisher1, publisher2);
// Check capabilities
Assert.assertTrue(publisher1.supportsCapability(DataPublisher.REUSABLE, Collections.EMPTY_MAP));
Assert.assertFalse(publisher1.supportsCapability(Capability.THREADSAFE, Collections.EMPTY_MAP));
}
@Test
public void testGetThreadSafePublisher()
throws IOException, NotConfiguredException, NoSuchScopeException {
SharedResourcesBroker<SimpleScopeType> broker =
SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(ConfigFactory.empty(),
SimpleScopeType.GLOBAL.defaultScopeInstance());
SharedResourcesBroker<SimpleScopeType> localBroker1 =
broker.newSubscopedBuilder(new SimpleScope<>(SimpleScopeType.LOCAL, "local1")).build();
TestThreadsafeDataPublisher publisher1 = (TestThreadsafeDataPublisher)DataPublisherFactory.get(TestThreadsafeDataPublisher.class.getName(), null, broker);
TestThreadsafeDataPublisher publisher2 = (TestThreadsafeDataPublisher)DataPublisherFactory.get(TestThreadsafeDataPublisher.class.getName(), null, broker);
// should get the same publisher
Assert.assertEquals(publisher1, publisher2);
TestThreadsafeDataPublisher publisher3 =
(TestThreadsafeDataPublisher)localBroker1.getSharedResource(new DataPublisherFactory<>(),
new DataPublisherKey(TestThreadsafeDataPublisher.class.getName(), null));
// should not get the same publisher
Assert.assertNotEquals(publisher2, publisher3);
TestThreadsafeDataPublisher publisher4 =
(TestThreadsafeDataPublisher)localBroker1.getSharedResourceAtScope(new DataPublisherFactory<>(),
new DataPublisherKey(TestThreadsafeDataPublisher.class.getName(), null), SimpleScopeType.LOCAL);
// should get the same publisher
Assert.assertEquals(publisher3, publisher4);
// Check capabilities
Assert.assertTrue(publisher1.supportsCapability(DataPublisher.REUSABLE, Collections.EMPTY_MAP));
Assert.assertTrue(publisher1.supportsCapability(Capability.THREADSAFE, Collections.EMPTY_MAP));
// Check data publisher is not closed
Assert.assertFalse(publisher1.isClosed());
Assert.assertFalse(publisher2.isClosed());
Assert.assertFalse(publisher3.isClosed());
Assert.assertFalse(publisher4.isClosed());
broker.close();
// Check all publishers are closed
Assert.assertTrue(publisher1.isClosed());
Assert.assertTrue(publisher2.isClosed());
Assert.assertTrue(publisher3.isClosed());
Assert.assertTrue(publisher4.isClosed());
}
@Test()
public void testMultiThreadedGetNonThreadSafePublisher()
throws InterruptedException, ExecutionException, IOException {
SharedResourcesBroker broker =
SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(ConfigFactory.empty(),
SimpleScopeType.GLOBAL.defaultScopeInstance());
ExecutorService service = Executors.newFixedThreadPool(40);
List<Future<?>> futures = new ArrayList<>();
for (int i = 0; i < 100000; i++) {
futures.add(service.submit(new GetNonThreadSafePublisher(broker)));
}
for (Future f: futures) {
f.get();
}
service.shutdown();
service.awaitTermination(100, TimeUnit.SECONDS);
}
private static class GetNonThreadSafePublisher implements Runnable {
private final SharedResourcesBroker broker;
private static long count = 0;
GetNonThreadSafePublisher(SharedResourcesBroker broker) {
this.broker = broker;
}
@Override
public void run() {
try {
DataPublisher publisher1 = DataPublisherFactory.get(TestNonThreadsafeDataPublisher.class.getName(), null, this.broker);
Assert.assertNotNull(publisher1);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
private static class TestNonThreadsafeDataPublisher extends DataPublisher {
@Getter
private boolean isClosed = false;
public TestNonThreadsafeDataPublisher(State state) {
super(state);
}
@Override
public void initialize() throws IOException {
}
@Override
public void publishData(Collection<? extends WorkUnitState> states) throws IOException {
}
@Override
public void publishMetadata(Collection<? extends WorkUnitState> states) throws IOException {
}
@Override
public void close() throws IOException {
isClosed = true;
}
@Override
public boolean supportsCapability(Capability c, Map<String, Object> properties) {
return c == DataPublisher.REUSABLE;
}
}
private static class TestThreadsafeDataPublisher extends TestNonThreadsafeDataPublisher {
public TestThreadsafeDataPublisher(State state) {
super(state);
}
@Override
public boolean supportsCapability(Capability c, Map<String, Object> properties) {
return (c == Capability.THREADSAFE || c == DataPublisher.REUSABLE);
}
}
}
| 2,830 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/publisher/TimePartitionedStreamingDataPublisherTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.publisher;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.workunit.WorkUnit;
@Test
public class TimePartitionedStreamingDataPublisherTest {
File tmpDir = Files.createTempDir();
File publishDir = new File(tmpDir, "/publish");
/**
* Test when publish output dir does not exist,
* it can still record the PublishOutputDirs in right format
* @throws IOException
*/
public void testPublishMultiTasks()
throws IOException {
WorkUnitState state1 = buildTaskState(2);
WorkUnitState state2 = buildTaskState(2);
TimePartitionedStreamingDataPublisher publisher = new TimePartitionedStreamingDataPublisher(state1);
Assert.assertFalse(publishDir.exists());
publisher.publishData(ImmutableList.of(state1, state2));
Assert.assertTrue(publisher.getPublishOutputDirs().contains(new Path(
state1.getProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + "." + 1)
+ "/namespace/table/hourly/2020/04/01/12")));
Assert.assertTrue(publisher.getPublishOutputDirs().contains(new Path(
state1.getProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + "." + 0)
+ "/namespace/table/hourly/2020/04/01/12")));
Assert.assertEquals(publisher.getPublishOutputDirs().size(), 2);
}
private WorkUnitState buildTaskState(int numBranches) throws IOException{
WorkUnitState state = new WorkUnitState(WorkUnit.createEmpty(), new State());
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "namespace");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "table");
state.setProp(ConfigurationKeys.WRITER_FILE_PATH_TYPE, "namespace_table");
state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, numBranches);
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, publishDir.toString());
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, tmpDir.toString());
if (numBranches > 1) {
for (int i = 0; i < numBranches; i++) {
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + "." + i, publishDir.toString() + "/branch" + i);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR + "." + i, tmpDir.toString() + "/branch" + i);
Files.createParentDirs(new File(state.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR + "." + i)
+ "/namespace/table/hourly/2020/04/01/12/data.avro"));
new File(state.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR + "." + i)
+ "/namespace/table/hourly/2020/04/01/12/data.avro").createNewFile();
}
}
return state;
}
} | 2,831 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/RegexBasedPartitionedRetrieverTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.List;
import org.joda.time.DateTime;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
public class RegexBasedPartitionedRetrieverTest {
private Path tempDir;
private enum DateToUse {
APR_1_2017(1491004800000L), APR_3_2017(1491177600000L), MAY_1_2017(1493596800000L),
TWENTY_THREE_HOURS_AGO(new DateTime().minusHours(23).getMillis());
private final long value;
DateToUse(long val) {
this.value = val;
}
public long getValue() {
return value;
}
}
@BeforeClass
public void setupDirectories()
throws IOException {
tempDir = Files.createTempDirectory("regexTest");
for (DateToUse d : DateToUse.values()) {
Path subdir = tempDir.resolve(String.format("%d-PT-123456", d.getValue()));
Files.createDirectory(subdir);
Files.createFile(subdir.resolve("foo.txt"));
}
}
@AfterClass
public void cleanup() throws IOException {
Files.walkFileTree(tempDir, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc)
throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
@Test
public void testSnapshotRegex() throws IOException {
String snapshotRegex = "(\\d+)-PT-\\d+";
RegexBasedPartitionedRetriever r = new RegexBasedPartitionedRetriever("txt");
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, "file:///");
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, tempDir.toString());
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_PATTERN,
snapshotRegex);
r.init(state);
List<PartitionAwareFileRetriever.FileInfo> files = r.getFilesToProcess(DateToUse.APR_3_2017.getValue() - 1, 9999);
Assert.assertEquals(files.size(), 3);
verifyFile(files.get(0), DateToUse.APR_3_2017.getValue());
verifyFile(files.get(1), DateToUse.MAY_1_2017.getValue());
verifyFile(files.get(2), DateToUse.TWENTY_THREE_HOURS_AGO.getValue());
}
@Test
public void testLeadtime() throws IOException {
String snapshotRegex = "(\\d+)-PT-\\d+";
RegexBasedPartitionedRetriever r = new RegexBasedPartitionedRetriever("txt");
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, "file:///");
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, tempDir.toString());
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_PATTERN,
snapshotRegex);
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_LEAD_TIME_GRANULARITY, "DAY");
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_LEAD_TIME, "1");
r.init(state);
List<PartitionAwareFileRetriever.FileInfo> files = r.getFilesToProcess(DateToUse.APR_3_2017.getValue() - 1, 9999);
Assert.assertEquals(files.size(), 2);
verifyFile(files.get(0), DateToUse.APR_3_2017.getValue());
verifyFile(files.get(1), DateToUse.MAY_1_2017.getValue());
}
private void verifyFile(PartitionAwareFileRetriever.FileInfo fileInfo, long value) {
org.apache.hadoop.fs.Path expectedStart = new org.apache.hadoop.fs.Path(tempDir.toUri());
String expectedEnd = String.format("%d-PT-123456/foo.txt", value);
Assert.assertEquals(fileInfo.getWatermarkMsSinceEpoch(), value);
Assert.assertTrue(fileInfo.getFilePath().startsWith(expectedStart.toString()));
Assert.assertTrue(fileInfo.getFilePath().endsWith(expectedEnd));
Assert.assertEquals(fileInfo.getFileSize(), 0);
Assert.assertEquals(fileInfo.getPartitionName(), String.format("%d-PT-123456", value));
}
}
| 2,832 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/workunit/MultiWorkUnitWeightedQueueTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.workunit;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link MultiWorkUnitWeightedQueue}.
*/
@Test(groups = {"gobblin.source.workunit"})
public class MultiWorkUnitWeightedQueueTest {
/**
* Test for {@link MultiWorkUnitWeightedQueue#MultiWorkUnitWeightedQueue()}. It adds a series of WorkUnits to an
* instance of MultiWorkUnitWeightedQueue of checks the size of all the WorkUnits returned by
* {@link MultiWorkUnitWeightedQueue#getQueueAsList()}.
*/
@Test
public void testDefaultConstructor() {
int numWorkUnits = 10;
int weight = 1;
MultiWorkUnitWeightedQueue multiWorkUnitWeightedQueue = new MultiWorkUnitWeightedQueue();
WorkUnit workUnit = WorkUnit.createEmpty();
for (int i = 0; i < numWorkUnits; i++) {
multiWorkUnitWeightedQueue.addWorkUnit(workUnit, weight);
}
List<WorkUnit> multiWorkUnitWeightedQueueList = multiWorkUnitWeightedQueue.getQueueAsList();
Assert.assertEquals(multiWorkUnitWeightedQueueList.size(), numWorkUnits);
MultiWorkUnit multiWorkUnit;
for (WorkUnit workUnitElement : multiWorkUnitWeightedQueueList) {
multiWorkUnit = (MultiWorkUnit) workUnitElement;
Assert.assertEquals(multiWorkUnit.getWorkUnits().size(), 1);
}
}
/**
* Test for {@link MultiWorkUnitWeightedQueue#MultiWorkUnitWeightedQueue(int)}. It sets a limit on the maximum number
* of MultiWorkUnits that can be created, adds a series of WorkUnits to the list, and checks the results of
* {@link MultiWorkUnitWeightedQueue#getQueueAsList()} to ensure each MultiWorkUnit created is of proper length.
*/
@Test
public void testWithQueueSizeLimit() {
int maxMultiWorkUnits = 10;
int numWorkUnits = 100;
int weight = 1;
MultiWorkUnitWeightedQueue multiWorkUnitWeightedQueue = new MultiWorkUnitWeightedQueue(maxMultiWorkUnits);
WorkUnit workUnit = WorkUnit.createEmpty();
for (int i = 0; i < numWorkUnits; i++) {
multiWorkUnitWeightedQueue.addWorkUnit(workUnit, weight);
}
MultiWorkUnit multiWorkUnit;
for (WorkUnit workUnitElement : multiWorkUnitWeightedQueue.getQueueAsList()) {
multiWorkUnit = (MultiWorkUnit) workUnitElement;
Assert.assertEquals(multiWorkUnit.getWorkUnits().size(), numWorkUnits / maxMultiWorkUnits);
}
}
}
| 2,833 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/DatePartitionedAvroFileExtractorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.Path;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.junit.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.DatePartitionedAvroFileSource;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.writer.AvroDataWriterBuilder;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.PartitionedDataWriter;
import org.apache.gobblin.writer.WriterOutputFormat;
import org.apache.gobblin.writer.partitioner.TimeBasedAvroWriterPartitioner;
import org.apache.gobblin.writer.partitioner.TimeBasedWriterPartitioner;
/**
* Unit tests for {@link DatePartitionedAvroFileExtractor}.
*
* @author Lorand Bendig
*/
@Test(groups = { "gobblin.source.extractor." })
public class DatePartitionedAvroFileExtractorTest {
private static final String SIMPLE_CLASS_NAME = DatePartitionedAvroFileExtractorTest.class.getSimpleName();
private static final String TEST_ROOT_DIR = "/tmp/" + SIMPLE_CLASS_NAME + "-test";
private static final String STAGING_DIR = TEST_ROOT_DIR + Path.SEPARATOR + "staging";
private static final String OUTPUT_DIR = TEST_ROOT_DIR + Path.SEPARATOR + "job-output";
private static final String FILE_NAME = SIMPLE_CLASS_NAME + "-name.avro";
private static final String PARTITION_COLUMN_NAME = "timestamp";
private static final String PREFIX = "minutes";
private static final String SUFFIX = "test";
private static final String SOURCE_ENTITY = "testsource";
private static final String DATE_PATTERN = "yyyy/MM/dd/HH_mm";
private static final int RECORD_SIZE = 4;
private static final String AVRO_SCHEMA =
"{" +
"\"type\" : \"record\"," +
"\"name\" : \"User\"," +
"\"namespace\" : \"example.avro\"," +
"\"fields\" : [" +
"{" +
"\"name\" : \"" + PARTITION_COLUMN_NAME + "\"," +
"\"type\" : \"long\"" +
"}" +
"]" +
"}";
private Schema schema;
private DataWriter<GenericRecord> writer;
private DateTime startDateTime;
private long[] recordTimestamps = new long[RECORD_SIZE];
private static final DateTimeZone TZ = DateTimeZone.forID(ConfigurationKeys.PST_TIMEZONE_NAME);
@BeforeClass
public void setUp() throws IOException {
this.schema = new Schema.Parser().parse(AVRO_SCHEMA);
//set up datetime objects
DateTime now = new DateTime(TZ).minusHours(6);
this.startDateTime =
new DateTime(now.getYear(), now.getMonthOfYear(), now.getDayOfMonth(), now.getHourOfDay(), 30, 0, TZ);
//create records, shift their timestamp by 1 minute
DateTime recordDt = startDateTime;
recordTimestamps[0] = recordDt.getMillis();
recordDt = recordDt.plusHours(4);
for (int i = 1; i < RECORD_SIZE; i++) {
recordDt = recordDt.plusMinutes(1);
recordTimestamps[i] = recordDt.getMillis();
}
// create dummy data partitioned by minutes
State state = new State();
state.setProp(TimeBasedAvroWriterPartitioner.WRITER_PARTITION_COLUMNS, PARTITION_COLUMN_NAME);
state.setProp(ConfigurationKeys.WRITER_BUFFER_SIZE, ConfigurationKeys.DEFAULT_BUFFER_SIZE);
state.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ConfigurationKeys.WRITER_STAGING_DIR, STAGING_DIR);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, OUTPUT_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.WRITER_FILE_NAME, FILE_NAME);
state.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_PATTERN, DATE_PATTERN);
state.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_PREFIX, PREFIX);
state.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_SUFFIX, SUFFIX);
state.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TimeBasedAvroWriterPartitioner.class.getName());
DataWriterBuilder<Schema, GenericRecord> builder = new AvroDataWriterBuilder()
.writeTo(Destination.of(Destination.DestinationType.HDFS, state))
.writeInFormat(WriterOutputFormat.AVRO)
.withWriterId("writer-1")
.withSchema(this.schema)
.withBranches(1).forBranch(0);
this.writer = new PartitionedDataWriter<Schema, GenericRecord>(builder, state);
GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(this.schema);
for (int i = 0; i < RECORD_SIZE; i++) {
genericRecordBuilder.set(PARTITION_COLUMN_NAME, recordTimestamps[i]);
this.writer.writeEnvelope(new RecordEnvelope<>(genericRecordBuilder.build()));
}
this.writer.close();
this.writer.commit();
}
@Test
public void testJobStateNotCopiedToWorkUnit() {
DatePartitionedAvroFileSource source = new DatePartitionedAvroFileSource();
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, OUTPUT_DIR + Path.SEPARATOR + SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_ENTITY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 2);
state.setProp("date.partitioned.source.partition.pattern", DATE_PATTERN);
state.setProp("date.partitioned.source.min.watermark.value", DateTimeFormat.forPattern(DATE_PATTERN).print(
this.startDateTime.minusMinutes(1)));
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, TableType.SNAPSHOT_ONLY);
state.setProp("date.partitioned.source.partition.prefix", PREFIX);
state.setProp("date.partitioned.source.partition.suffix", SUFFIX);
String dummyKey = "dummy.job.config";
state.setProp(dummyKey, "dummy");
List<WorkUnit> workunits = source.getWorkunits(state);
Assert.assertEquals(workunits.size(), 4);
for(WorkUnit wu : workunits) {
if (wu instanceof MultiWorkUnit) {
for (WorkUnit workUnit : ((MultiWorkUnit) wu).getWorkUnits()) {
Assert.assertFalse(workUnit.contains(dummyKey));
}
} else {
Assert.assertFalse(wu.contains(dummyKey));
}
}
}
@Test
public void testReadPartitionsByMinute() throws IOException, DataRecordException {
DatePartitionedAvroFileSource source = new DatePartitionedAvroFileSource();
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, OUTPUT_DIR + Path.SEPARATOR + SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_ENTITY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 2);
state.setProp("date.partitioned.source.partition.pattern", DATE_PATTERN);
state.setProp("date.partitioned.source.min.watermark.value", DateTimeFormat.forPattern(DATE_PATTERN).print(
this.startDateTime.minusMinutes(1)));
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, TableType.SNAPSHOT_ONLY);
state.setProp("date.partitioned.source.partition.prefix", PREFIX);
state.setProp("date.partitioned.source.partition.suffix", SUFFIX);
//Read data partitioned by minutes, i.e each workunit is assigned records under the same YYYY/MM/dd/HH_mm directory
List<WorkUnit> workunits = source.getWorkunits(state);
Assert.assertEquals(workunits.size(), 4);
verifyWorkUnits(workunits);
}
@Test
public void testReadPartitionsByMinuteWithLeadtime() throws IOException, DataRecordException {
DatePartitionedAvroFileSource source = new DatePartitionedAvroFileSource();
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, OUTPUT_DIR + Path.SEPARATOR + SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_ENTITY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 2);
state.setProp("date.partitioned.source.partition.pattern", DATE_PATTERN);
state.setProp("date.partitioned.source.min.watermark.value", DateTimeFormat.forPattern(DATE_PATTERN).print(
this.startDateTime.minusMinutes(1)));
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, TableType.SNAPSHOT_ONLY);
state.setProp("date.partitioned.source.partition.prefix", PREFIX);
state.setProp("date.partitioned.source.partition.suffix", SUFFIX);
state.setProp("date.partitioned.source.partition.lead_time.size", "3");
state.setProp("date.partitioned.source.partition.lead_time.granularity", "HOUR");
/*
* Since lead time is 3 hours, only the first WorkUnit (which is 6 hours old, rest are 2hrs) should get
* picked up
*/
List<WorkUnit> workunits = source.getWorkunits(state);
Assert.assertEquals(workunits.size(), 1);
verifyWorkUnits(workunits, workunits.size());
}
@Test
public void testWorksNoPrefix() throws IOException, DataRecordException {
DatePartitionedAvroFileSource source = new DatePartitionedAvroFileSource();
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, OUTPUT_DIR + Path.SEPARATOR + SOURCE_ENTITY + Path.SEPARATOR + PREFIX);
state.setProp(ConfigurationKeys.SOURCE_ENTITY, SOURCE_ENTITY);
state.setProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 2);
state.setProp("date.partitioned.source.partition.pattern", DATE_PATTERN);
state.setProp("date.partitioned.source.min.watermark.value", DateTimeFormat.forPattern(DATE_PATTERN).print(
this.startDateTime.minusMinutes(1)));
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, TableType.SNAPSHOT_ONLY);
state.setProp("date.partitioned.source.partition.suffix", SUFFIX);
//Read data partitioned by minutes, i.e each workunit is assigned records under the same YYYY/MM/dd/HH_mm directory
List<WorkUnit> workunits = source.getWorkunits(state);
Assert.assertEquals(workunits.size(), 4);
verifyWorkUnits(workunits);
}
private void verifyWorkUnits(List<WorkUnit> workunits)
throws IOException, DataRecordException {
verifyWorkUnits(workunits, RECORD_SIZE);
}
private void verifyWorkUnits(List<WorkUnit> workunits, int expectedSize) throws DataRecordException, IOException {
for (int i = 0; i < expectedSize; i++) {
WorkUnit workUnit = ((MultiWorkUnit) workunits.get(i)).getWorkUnits().get(0);
WorkUnitState wuState = new WorkUnitState(workunits.get(i), new State());
wuState.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, ConfigurationKeys.LOCAL_FS_URI);
wuState.setProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL,
workUnit.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL));
try (DatePartitionedAvroFileExtractor extractor = new DatePartitionedAvroFileExtractor(wuState);) {
GenericRecord record = extractor.readRecord(null);
Assert.assertEquals(recordTimestamps[i], record.get(PARTITION_COLUMN_NAME));
Assert.assertEquals(recordTimestamps[i], workUnit.getPropAsLong(ConfigurationKeys.WORK_UNIT_DATE_PARTITION_KEY));
}
}
}
@AfterClass
public void tearDown() throws IOException {
this.writer.close();
FileUtils.deleteDirectory(new File(TEST_ROOT_DIR));
}
}
| 2,834 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/filebased/FileBasedSourceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.filebased;
import com.google.common.collect.Sets;
import com.typesafe.config.ConfigFactory;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Set;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.dataset.DatasetConstants;
import org.apache.gobblin.dataset.DatasetDescriptor;
import org.apache.gobblin.dataset.Descriptor;
import org.apache.gobblin.dataset.PartitionDescriptor;
import org.apache.gobblin.source.DatePartitionedJsonFileSource;
import org.apache.gobblin.source.PartitionedFileSourceBase;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.extractor.hadoop.AvroFileSource;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.hadoop.fs.Path;
import org.junit.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.testng.collections.Lists;
@Test
public class FileBasedSourceTest {
private static final String SOURCE_LINEAGE_KEY = "gobblin.event.lineage.source";
SharedResourcesBroker<GobblinScopeTypes> instanceBroker;
SharedResourcesBroker<GobblinScopeTypes> jobBroker;
Path sourceDir;
@BeforeClass
public void setup() {
instanceBroker = SharedResourcesBrokerFactory
.createDefaultTopLevelBroker(ConfigFactory.empty(), GobblinScopeTypes.GLOBAL.defaultScopeInstance());
jobBroker = instanceBroker
.newSubscopedBuilder(new JobScopeInstance("LineageEventTest", String.valueOf(System.currentTimeMillis())))
.build();
sourceDir = new Path(getClass().getResource("/source").toString());
}
@Test
public void testFailJobWhenPreviousStateExistsButDoesNotHaveSnapshot() {
try {
DummyFileBasedSource source = new DummyFileBasedSource();
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setId("priorState");
List<WorkUnitState> workUnitStates = Lists.newArrayList(workUnitState);
State state = new State();
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, Extract.TableType.SNAPSHOT_ONLY.toString());
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_PRIOR_SNAPSHOT_REQUIRED, true);
SourceState sourceState = new SourceState(state, workUnitStates);
source.getWorkunits(sourceState);
Assert.fail("Expected RuntimeException, but no exceptions were thrown.");
} catch (RuntimeException e) {
Assert.assertEquals("No 'source.filebased.fs.snapshot' found on state of prior job", e.getMessage());
}
}
@Test
void numberOfWorkUnits()
throws IOException {
SourceState sourceState = new SourceState();
sourceState.setBroker(jobBroker);
DatePartitionedJsonFileSource source = new DatePartitionedJsonFileSource();
initState(sourceState);
List<WorkUnit> workUnits = source.getWorkunits(sourceState);
Assert.assertEquals(3, workUnits.size());
}
@Test(expectedExceptions = RuntimeException.class)
public void testFailOnInvalidSourceDirectory() {
SourceState sourceState = new SourceState();
sourceState.setBroker(jobBroker);
AvroFileSource source = new AvroFileSource();
initState(sourceState);
Path path = new Path(sourceDir, "testFailOnInvalidSourceDirectory");
sourceState.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, path.toString());
source.getWorkunits(sourceState);
}
@Test
public void testSourceLineage() {
String dataset = Path.getPathWithoutSchemeAndAuthority(sourceDir).toString();
SourceState sourceState = new SourceState();
sourceState.setBroker(jobBroker);
initState(sourceState);
// Avro file based source
AvroFileSource fileSource = new AvroFileSource();
List<WorkUnit> workUnits = fileSource.getWorkunits(sourceState);
DatasetDescriptor datasetDescriptor = new DatasetDescriptor("hdfs", URI.create("file:///"), dataset);
for (WorkUnit workUnit : workUnits) {
Assert.assertEquals(workUnit.getProp(SOURCE_LINEAGE_KEY), Descriptor.toJson(datasetDescriptor));
}
// Partitioned file based source
// Test platform configuration
sourceState.setProp(ConfigurationKeys.SOURCE_FILEBASED_PLATFORM, DatasetConstants.PLATFORM_FILE);
DatePartitionedJsonFileSource partitionedFileSource = new DatePartitionedJsonFileSource();
workUnits = partitionedFileSource.getWorkunits(sourceState);
datasetDescriptor = new DatasetDescriptor("file", URI.create("file:///"), dataset);
Set<String> partitions = Sets.newHashSet("2017-12", "2018-01");
for (WorkUnit workUnit : workUnits) {
if (workUnit instanceof MultiWorkUnit) {
DatasetDescriptor finalDatasetDescriptor = datasetDescriptor;
((MultiWorkUnit) workUnit).getWorkUnits().forEach( wu -> verifyPartitionSourceLineage(wu, partitions,
finalDatasetDescriptor));
} else {
verifyPartitionSourceLineage(workUnit, partitions, datasetDescriptor);
}
}
}
private void verifyPartitionSourceLineage(WorkUnit wu, Set<String> partitions, DatasetDescriptor datasetDescriptor) {
PartitionDescriptor descriptor = (PartitionDescriptor) Descriptor.fromJson(wu.getProp(SOURCE_LINEAGE_KEY));
Assert.assertTrue(partitions.contains(descriptor.getName()));
Assert.assertEquals(descriptor.getDataset(), datasetDescriptor);
}
private void initState(State state) {
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, sourceDir.toString());
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_PARTITION_PATTERN, "yyyy-MM");
state.setProp(PartitionedFileSourceBase.DATE_PARTITIONED_SOURCE_MIN_WATERMARK_VALUE, "2017-11");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, "snapshot_only");
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, "file:///");
state.setProp(ConfigurationKeys.SCHEMA_IN_SOURCE_DIR, "true");
state.setProp(ConfigurationKeys.SCHEMA_FILENAME, "metadata.json");
}
@AfterClass
public void cleanup()
throws IOException {
if (jobBroker != null) {
jobBroker.close();
}
if (instanceBroker != null) {
instanceBroker.close();
}
}
private static class DummyFileBasedSource extends FileBasedSource<String, String> {
@Override
public void initFileSystemHelper(State state)
throws FileBasedHelperException {
}
@Override
protected List<WorkUnit> getPreviousWorkUnitsForRetry(SourceState state) {
return Lists.newArrayList();
}
@Override
public List<String> getcurrentFsSnapshot(State state) {
return Lists.newArrayList("SnapshotEntry");
}
@Override
public Extractor<String, String> getExtractor(WorkUnitState state)
throws IOException {
return new DummyExtractor();
}
}
private static class DummyExtractor implements Extractor<String, String> {
@Override
public String getSchema() {
return "";
}
@Override
public String readRecord(String reuse)
throws DataRecordException, IOException {
return null;
}
@Override
public long getExpectedRecordCount() {
return 0;
}
@Override
public long getHighWatermark() {
return 0;
}
@Override
public void close()
throws IOException {
}
}
}
| 2,835 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/filebased/TokenizedFileDownloaderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.filebased;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
/**
* Test for {@link TokenizedFileDownloader}.
*/
public class TokenizedFileDownloaderTest {
@Test
public void testRecordIterator()
throws UnsupportedEncodingException {
String charset = "UTF-8";
String delimiter = "\n\r";
String record1 = "record1";
String record2 = "record2\n";
String record3 = "record3\r";
InputStream inputStream =
new ByteArrayInputStream(Joiner.on(delimiter).join(record1, record2, record3).getBytes(charset));
TokenizedFileDownloader.RecordIterator recordIterator =
new TokenizedFileDownloader.RecordIterator(inputStream, delimiter, charset);
Assert.assertTrue(recordIterator.hasNext());
Assert.assertEquals(recordIterator.next(), record1);
Assert.assertTrue(recordIterator.hasNext());
Assert.assertEquals(recordIterator.next(), record2);
Assert.assertTrue(recordIterator.hasNext());
Assert.assertEquals(recordIterator.next(), record3);
Assert.assertFalse(recordIterator.hasNext());
}
}
| 2,836 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/filebased/FileBasedExtractorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.filebased;
import java.io.IOException;
import com.google.common.base.Joiner;
import org.apache.commons.io.IOUtils;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
@Test
public class FileBasedExtractorTest {
public void testReadRecordWithNoFiles() throws DataRecordException, IOException {
WorkUnitState state = new WorkUnitState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL, "");
FileBasedHelper fsHelper = Mockito.mock(FileBasedHelper.class);
FileBasedExtractor<String, String> extractor = new DummyFileBasedExtractor<String, String>(state, fsHelper);
Assert.assertEquals(getNumRecords(extractor), 0);
}
public void testReadRecordWithEmptyFiles() throws DataRecordException, IOException, FileBasedHelperException {
String file1 = "file1.txt";
String file2 = "file2.txt";
String file3 = "file3.txt";
WorkUnitState state = new WorkUnitState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL, Joiner.on(",").join(file1, file2, file3));
FileBasedHelper fsHelper = Mockito.mock(FileBasedHelper.class);
Mockito.when(fsHelper.getFileStream(file1)).thenReturn(IOUtils.toInputStream(""));
Mockito.when(fsHelper.getFileStream(file2)).thenReturn(IOUtils.toInputStream(""));
Mockito.when(fsHelper.getFileStream(file3)).thenReturn(IOUtils.toInputStream(""));
FileBasedExtractor<String, String> extractor = new DummyFileBasedExtractor<String, String>(state, fsHelper);
Assert.assertEquals(getNumRecords(extractor), 0);
}
public void testReadRecordWithNonEmptyFiles() throws DataRecordException, IOException, FileBasedHelperException {
String file1 = "file1.txt";
String file2 = "file2.txt";
String file3 = "file3.txt";
WorkUnitState state = new WorkUnitState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL, Joiner.on(",").join(file1, file2, file3));
FileBasedHelper fsHelper = Mockito.mock(FileBasedHelper.class);
Mockito.when(fsHelper.getFileStream(file1)).thenReturn(IOUtils.toInputStream("record1 \n record2"));
Mockito.when(fsHelper.getFileStream(file2)).thenReturn(IOUtils.toInputStream("record3 \n record4"));
Mockito.when(fsHelper.getFileStream(file3)).thenReturn(IOUtils.toInputStream("record5 \n record6 \n record7"));
FileBasedExtractor<String, String> extractor = new DummyFileBasedExtractor<String, String>(state, fsHelper);
Assert.assertEquals(getNumRecords(extractor), 7);
}
public void testReadRecordWithEmptyAndNonEmptyFiles() throws DataRecordException, IOException, FileBasedHelperException {
String file1 = "file1.txt";
String file2 = "file2.txt";
String file3 = "file3.txt";
WorkUnitState state = new WorkUnitState();
state.setProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL, Joiner.on(",").join(file1, file2, file3));
FileBasedHelper fsHelper = Mockito.mock(FileBasedHelper.class);
Mockito.when(fsHelper.getFileStream(file1)).thenReturn(IOUtils.toInputStream("record1 \n record2"));
Mockito.when(fsHelper.getFileStream(file2)).thenReturn(IOUtils.toInputStream(""));
Mockito.when(fsHelper.getFileStream(file3)).thenReturn(IOUtils.toInputStream("record3 \n record4 \n record5"));
FileBasedExtractor<String, String> extractor = new DummyFileBasedExtractor<String, String>(state, fsHelper);
Assert.assertEquals(getNumRecords(extractor), 5);
}
private int getNumRecords(Extractor<?, ?> extractor) throws DataRecordException, IOException {
int numRecords = 0;
while (extractor.readRecord(null) != null) {
numRecords++;
}
return numRecords;
}
private static class DummyFileBasedExtractor<S, D> extends FileBasedExtractor<S, D> {
public DummyFileBasedExtractor(WorkUnitState workUnitState, FileBasedHelper fsHelper) {
super(workUnitState, fsHelper);
}
}
}
| 2,837 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/watermark/TimestampWatermarkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.watermark;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* Unit tests for {@link TimestampWatermark}.
*
* @author Ziyang Liu
*/
public class TimestampWatermarkTest {
private static final long WATERMARK_VALUE = 20141029133015L;
private static final long LOW_WATERMARK_VALUE = 20130501130000L;
private static final long HIGH_WATERMARK_VALUE = 20130502080000L;
private static final String COLUMN = "my_column";
private static final String OPERATOR = ">=";
private TimestampWatermark tsWatermark;
private final String watermarkFormat = "yyyyMMddHHmmss";
private final WorkUnitState workunitState = new WorkUnitState();
@BeforeClass
public void setUpBeforeClass() throws Exception {
this.tsWatermark = new TimestampWatermark(COLUMN, this.watermarkFormat);
this.workunitState.setId("");
}
@Test(expectedExceptions = java.lang.IllegalArgumentException.class)
public void testGetIntervalsPartitionIntervalNegative() throws Exception {
this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, Integer.MIN_VALUE, 1000);
}
@Test(expectedExceptions = java.lang.IllegalArgumentException.class)
public void testGetIntervalsPartitionIntervalZero() throws Exception {
this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 0, 1000);
}
@Test
public void testGetIntervalsPartitionIntervalLargerThanDiff() throws ParseException {
Map<Long, Long> expected = getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1000);
Map<Long, Long> actual = this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1000, 1000);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsNumIntervalsExceedsMaxInterval() throws ParseException {
Map<Long, Long> expected = getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1000);
Map<Long, Long> actual = this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1, 1);
Assert.assertEquals(actual, expected);
}
@Test(expectedExceptions = java.lang.IllegalArgumentException.class)
public void testGetIntervalsMaxIntervalsIsZero() {
this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1, 0);
}
@Test(expectedExceptions = java.lang.IllegalArgumentException.class)
public void testGetIntervalsMaxIntervalsIsNegative() {
this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, HIGH_WATERMARK_VALUE, 1, -1);
}
@Test
public void testGetIntervalsLowWatermarkExceedsHighWatermark() {
Map<Long, Long> expected = new HashMap<Long, Long>();
Map<Long, Long> actual = this.tsWatermark.getIntervals(HIGH_WATERMARK_VALUE, LOW_WATERMARK_VALUE, 1, 10);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsLowWatermarkEqualsHighWatermark() throws ParseException {
Map<Long, Long> expected = getIntervals(LOW_WATERMARK_VALUE, LOW_WATERMARK_VALUE, 1000);
Map<Long, Long> actual = this.tsWatermark.getIntervals(LOW_WATERMARK_VALUE, LOW_WATERMARK_VALUE, 1, 10);
Assert.assertEquals(actual, expected);
}
private Map<Long, Long> getIntervals(long lowWatermarkValue, long highWatermarkValue, int partitionInterval)
throws ParseException {
Map<Long, Long> intervals = new HashMap<Long, Long>();
if (lowWatermarkValue > highWatermarkValue || partitionInterval <= 0)
return intervals;
if (lowWatermarkValue == highWatermarkValue) {
return ImmutableMap.of(lowWatermarkValue, highWatermarkValue);
}
final SimpleDateFormat inputFormat = new SimpleDateFormat(this.watermarkFormat);
Date startTime = inputFormat.parse(String.valueOf(lowWatermarkValue));
Date endTime = inputFormat.parse(String.valueOf(highWatermarkValue));
Calendar cal = Calendar.getInstance();
while (startTime.compareTo(endTime) < 0) {
cal.setTime(startTime);
cal.add(Calendar.HOUR, partitionInterval);
Date nextTime = cal.getTime();
if (nextTime.compareTo(endTime) > 0) {
nextTime = endTime;
}
intervals.put(Long.parseLong(inputFormat.format(startTime)), Long.parseLong(inputFormat.format(nextTime)));
startTime = nextTime;
}
return intervals;
}
}
| 2,838 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/watermark/DateWatermarkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.watermark;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
/**
* Unit tests for {@link DateWatermark}.
*
* @author ydai
*/
@Test(groups = { "gobblin.source.extractor.watermark" })
public class DateWatermarkTest {
/**
* Test the method getIntervals, when the lowWaterMark is greater than the highWaterMark.
*/
@Test
public void testGetIntervalsOnNegDiff() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150206000000l;
long hwm = 20150201000000l;
int partition = 30;
int maxInterval = 4;
Map<Long, Long> results = datewm.getIntervals(lwm, hwm, partition, maxInterval);
Assert.assertEquals(results.size(), 0);
}
/**
* Test the method getIntervals, when the lowWaterMark is equal to the highWaterMark.
*/
@Test
public void testGetIntervalsOnZeroDiff() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150201000000l;
long hwm = 20150201000000l;
int partition = 30;
int maxInterval = 4;
Map<Long, Long> results = datewm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = ImmutableMap.of(lwm, hwm);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals.
* Test when the number of intervals divided by the partition value is smaller than maxInterval,
* result intervals should be based on the partition value.
*/
@Test
public void testGetIntervalsOnParition() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150201000000l;
long hwm = 20150206000000l;
//Partition by one day.
int partition = 30;
int maxInterval = 4;
Map<Long, Long> results = datewm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = Maps.newHashMap();
expected.put(20150201000000l, 20150203000000l);
expected.put(20150203000000l, 20150205000000l);
expected.put(20150205000000l, 20150206000000l);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals.
* Test when the number of intervals divided by the partition value is greater than maxInterval,
* the number of result intervals should be equal to the maxInterval.
*/
@Test
public void testGetIntervalsOnMaxInterval() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150201000000l;
long hwm = 20150206000000l;
int partition = 30;
int maxInterval = 2;
Map<Long, Long> results = datewm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = Maps.newHashMap();
expected.put(20150201000000l, 20150204000000l);
expected.put(20150204000000l, 20150206000000l);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals, when taking invalid input of maxIntervals.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetIntervalsOnInvalidMaxInterval() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150201011111l;
long hwm = 20150206111111l;
int partition = 30;
int maxInterval = -1;
datewm.getIntervals(lwm, hwm, partition, maxInterval);
}
/**
* Test the method getIntervals, when taking invalid input of partitionInterval.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetIntervalsOnInvalidPartitionInterval() {
DateWatermark datewm = new DateWatermark("Datewatermark", "test");
long lwm = 20150201011111l;
long hwm = 20150206111111l;
int partition = 20;
int maxInterval = 2;
datewm.getIntervals(lwm, hwm, partition, maxInterval);
}
}
| 2,839 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/watermark/HourWatermarkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.watermark;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
/**
* Unit tests for {@link HourWatermark}.
*
* @author ydai
*/
@Test(groups = { "gobblin.source.extractor.watermark" })
public class HourWatermarkTest {
/**
* Test the method getIntervals, when the lowWaterMark is greater than the highWaterMark.
*/
@Test
public void testGetIntervalsOnNegDiff() {
HourWatermark hourwm = new HourWatermark("Hourwatermark", "test");
long lwm = 20150201060000l;
long hwm = 20150201020000l;
int partition = 30;
int maxInterval = 4;
Map<Long, Long> results = hourwm.getIntervals(lwm, hwm, partition, maxInterval);
Assert.assertEquals(results.size(), 0);
}
/**
* Test the method getIntervals, when the lowWaterMark is equal to the highWaterMark.
*/
@Test
public void testGetIntervalsOnZeroDiff() {
HourWatermark datewm = new HourWatermark("Hourwatermark", "test");
long lwm = 20150201010000l;
long hwm = 20150201010000l;
int partition = 30;
int maxInterval = 4;
Map<Long, Long> results = datewm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = ImmutableMap.of(lwm, hwm);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals.
* Test when the number of intervals divided by the partition value is smaller than maxInterval,
* result intervals should be based on the partition value.
*/
@Test
public void testGetIntervalsOnParition() {
HourWatermark hourwm = new HourWatermark("Hourwatermark", "test");
long lwm = 20150201010000l;
long hwm = 20150201050000l;
//Partition by 2 hours.
int partition = 2;
int maxInterval = 4;
Map<Long, Long> results = hourwm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = Maps.newHashMap();
expected.put(20150201010000l, 20150201030000l);
expected.put(20150201030000l, 20150201050000l);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals.
* Test when the number of intervals divided by the partition value is greater than maxInterval,
* the number of result intervals should be equal to the maxInterval.
*/
@Test
public void testGetIntervalsOnMaxInterval() {
HourWatermark hourwm = new HourWatermark("Hourwatermark", "test");
long lwm = 20150201011111l;
long hwm = 20150202011111l;
int partition = 2;
int maxInterval = 2;
Map<Long, Long> results = hourwm.getIntervals(lwm, hwm, partition, maxInterval);
Map<Long, Long> expected = Maps.newHashMap();
expected.put(20150201010000l, 20150201130000l);
expected.put(20150201130000l, 20150202010000l);
Assert.assertEquals(results, expected);
}
/**
* Test the method getIntervals, when taking invalid input of maxIntervals.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetIntervalsOnInvalidInputs() {
HourWatermark hourwm = new HourWatermark("Hourwatermark", "test");
long lwm = 20150201011111l;
long hwm = 20150202111111l;
int partition = 2;
int maxInterval = -1;
hourwm.getIntervals(lwm, hwm, partition, maxInterval);
}
/**
* Test the method getIntervals, when taking invalid input of partitionInterval.
*/
@Test(expectedExceptions = IllegalArgumentException.class)
public void testGetIntervalsOnInvalidPartitionInterval() {
HourWatermark hourwm = new HourWatermark("Datewatermark", "test");
long lwm = 20150201011111l;
long hwm = 20150206111111l;
int partition = -1;
int maxInterval = 2;
hourwm.getIntervals(lwm, hwm, partition, maxInterval);
}
}
| 2,840 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/watermark/SimpleWatermarkTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.watermark;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import org.apache.gobblin.source.extractor.extract.QueryBasedExtractor;
/**
* Unit tests for {@link SimpleWatermark}.
*
* @author Ziyang Liu
*/
public class SimpleWatermarkTest {
private static final String COLUMN = "my_column";
private static final String GREATER_THAN = ">=";
private SimpleWatermark simpleWatermark;
@BeforeClass
public void setUpBeforeClass() {
this.simpleWatermark = new SimpleWatermark(COLUMN, "no_format_needed");
}
@Test
public void testGetWatermarkCondition() {
QueryBasedExtractor<?, ?> extractor = null;
//normal case
Assert.assertEquals(COLUMN + " " + GREATER_THAN + " " + Long.MAX_VALUE,
this.simpleWatermark.getWatermarkCondition(extractor, Long.MAX_VALUE, GREATER_THAN));
//operater is null
Assert.assertEquals(COLUMN + " null " + Long.MIN_VALUE,
this.simpleWatermark.getWatermarkCondition(extractor, Long.MIN_VALUE, null));
}
@Test
public void testGetIntervalsPartitionIntervalNegative() {
try {
this.simpleWatermark.getIntervals(0, 100, Integer.MIN_VALUE, 1000);
Assert.fail("Expected java.lang.IllegalArgumentException, but didn't get one");
} catch (java.lang.IllegalArgumentException e) {}
}
@Test
public void testGetIntervalsPartitionIntervalZero() {
try {
this.simpleWatermark.getIntervals(0, 100, 0, 1000);
Assert.fail("Expected java.lang.IllegalArgumentException, but didn't get one");
} catch (java.lang.IllegalArgumentException e) {}
}
@Test
public void testGetIntervalsPartitionIntervalLargerThanDiff() {
Map<Long, Long> expected = getIntervals(0, 100, 110);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(0, 100, 110, 1000);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsNumIntervalsExceedsMaxInterval() {
int partitionInterval = 100 / 7 + 1;
Map<Long, Long> expected = getIntervals(0, 100, partitionInterval);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(0, 100, 3, 7);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsMaxIntervalsIsOne() {
Map<Long, Long> expected = getIntervals(0, 100, 100);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(0, 100, 1, 1);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsMaxIntervalsIsZero() {
try {
this.simpleWatermark.getIntervals(0, 100, 1, 0);
Assert.fail("Expected java.lang.IllegalArgumentException, but didn't get one");
} catch (java.lang.IllegalArgumentException e) {}
}
@Test
public void testGetIntervalsMaxIntervalsIsNegative() {
try {
this.simpleWatermark.getIntervals(0, 100, 1, -1);
Assert.fail("Expected java.lang.IllegalArgumentException, but didn't get one");
} catch (java.lang.IllegalArgumentException e) {}
}
@Test
public void testGetIntervalsLowWatermarkEqualsHighWatermark() {
Map<Long, Long> expected = getIntervals(100, 100, 1);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(100, 100, 10, 10);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsLowWatermarkExceedsHighWatermark() {
Map<Long, Long> expected = new HashMap<Long, Long>();
Map<Long, Long> actual = this.simpleWatermark.getIntervals(110, 100, 10, 10);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsHighWatermarkIsLongMaxValue() {
Map<Long, Long> expected = getIntervals(Long.MAX_VALUE - 100, Long.MAX_VALUE, 10);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(Long.MAX_VALUE - 100, Long.MAX_VALUE, 10, 100);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetIntervalsLowWatermarkIsLongMinValue() {
Map<Long, Long> expected = getIntervals(Long.MIN_VALUE, Long.MIN_VALUE + 100, 10);
Map<Long, Long> actual = this.simpleWatermark.getIntervals(Long.MIN_VALUE, Long.MIN_VALUE + 100, 10, 100);
Assert.assertEquals(actual, expected);
}
@Test
public void testGetDeltaNumForNextWatermark() {
Assert.assertEquals(this.simpleWatermark.getDeltaNumForNextWatermark(), 1);
}
private Map<Long, Long> getIntervals(long lowWatermarkValue, long highWatermarkValue, int partitionInterval) {
Map<Long, Long> intervals = new HashMap<Long, Long>();
if (lowWatermarkValue > highWatermarkValue || partitionInterval <= 0)
return intervals;
if (lowWatermarkValue == highWatermarkValue) {
return ImmutableMap.of(lowWatermarkValue, highWatermarkValue);
}
boolean overflow = false;
for (Long i = lowWatermarkValue; i < highWatermarkValue && !overflow;) {
overflow = (Long.MAX_VALUE - partitionInterval < i);
long end = overflow ? Long.MAX_VALUE : Math.min(i + partitionInterval, highWatermarkValue);
intervals.put(i, end);
i = end;
}
return intervals;
}
}
| 2,841 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/hadoop/HadoopFsHelperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.hadoop;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.source.extractor.filebased.FileBasedHelperException;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.testng.Assert;
import org.testng.annotations.Test;
public class HadoopFsHelperTest {
@Test(expectedExceptions = IllegalArgumentException.class)
public void testConnectFailsWithS3URLWithoutAWSCredentials() throws FileBasedHelperException {
Configuration conf = new Configuration(); // plain conf, no S3 credentials
SourceState sourceState = new SourceState();
sourceState.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, "s3://support.elasticmapreduce/spark/install-spark/");
HadoopFsHelper fsHelper = new HadoopFsHelper(sourceState, conf);
fsHelper.connect();
}
@Test
public void testGetFileStreamSucceedsWithUncompressedFile() throws FileBasedHelperException, IOException {
SourceState sourceState = new SourceState();
URL rootUrl = getClass().getResource("/source/");
String rootPath = rootUrl.toString();
sourceState.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, rootPath);
HadoopFsHelper fsHelper = new HadoopFsHelper(sourceState);
fsHelper.connect();
URL url = getClass().getResource("/source/simple.tsv");
String path = url.toString();
InputStream in = fsHelper.getFileStream(path);
String contents = IOUtils.toString(in, "UTF-8");
Assert.assertEquals(contents, "A\t1\nB\t2\n");
}
@Test
public void testGetFileStreamSucceedsWithGZIPFile() throws FileBasedHelperException, IOException {
SourceState sourceState = new SourceState();
URL rootUrl = getClass().getResource("/source/");
String rootPath = rootUrl.toString();
sourceState.setProp(ConfigurationKeys.SOURCE_FILEBASED_FS_URI, rootPath);
HadoopFsHelper fsHelper = new HadoopFsHelper(sourceState);
fsHelper.connect();
URL url = getClass().getResource("/source/simple.tsv.gz");
String path = url.toString();
InputStream in = fsHelper.getFileStream(path);
String contents = IOUtils.toString(in, "UTF-8");
Assert.assertEquals(contents, "A\t1\nB\t2\n");
}
}
| 2,842 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/hadoop/OldApiHadoopFileInputSourceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.hadoop;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.RecordReader;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link OldApiHadoopFileInputSource}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.source.extractor.hadoop"})
public class OldApiHadoopFileInputSourceTest {
protected static final String TEXT = "This is a test text file";
protected SourceState sourceState;
@BeforeClass
public void setUp() throws IOException {
File textFile = new File(getFileDir(), "test.txt");
File dir = textFile.getParentFile();
if (!dir.exists() && !dir.mkdir()) {
throw new IOException("Failed to create directory: " + dir);
}
if (!textFile.createNewFile()) {
throw new IOException("Failed to create text file: " + textFile);
}
Files.write(TEXT, textFile, ConfigurationKeys.DEFAULT_CHARSET_ENCODING);
this.sourceState = new SourceState();
this.sourceState.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, Extract.TableType.SNAPSHOT_ONLY.toString());
this.sourceState.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "test");
this.sourceState.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "test");
this.sourceState.setProp(HadoopFileInputSource.FILE_INPUT_PATHS_KEY, textFile.getAbsolutePath());
}
@Test
public void testGetWorkUnitsAndExtractor() throws IOException, DataRecordException {
OldApiHadoopFileInputSource<String, Text, LongWritable, Text> fileInputSource = new TestHadoopFileInputSource();
List<WorkUnit> workUnitList = fileInputSource.getWorkunits(this.sourceState);
Assert.assertEquals(workUnitList.size(), 1);
WorkUnitState workUnitState = new WorkUnitState(workUnitList.get(0));
Closer closer = Closer.create();
try {
OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text> extractor =
(OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text>) fileInputSource.getExtractor(
workUnitState);
Text text = extractor.readRecord(null);
Assert.assertEquals(text.toString(), TEXT);
Assert.assertNull(extractor.readRecord(null));
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
@AfterClass
public void tearDown() throws IOException {
File dir = new File(getFileDir());
FileUtils.deleteDirectory(dir);
}
protected String getFileDir() {
return OldApiHadoopFileInputSourceTest.class.getSimpleName();
}
private static class TestHadoopFileInputSource extends OldApiHadoopTextInputSource<String> {
@Override
protected OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text> getExtractor(
WorkUnitState workUnitState, RecordReader<LongWritable, Text> recordReader,
FileSplit fileSplit, boolean readKeys) {
return new TestHadoopFileInputExtractor(recordReader, readKeys);
}
}
private static class TestHadoopFileInputExtractor
extends OldApiHadoopFileInputExtractor<String, Text, LongWritable, Text> {
public TestHadoopFileInputExtractor(RecordReader<LongWritable, Text> recordReader, boolean readKeys) {
super(recordReader, readKeys);
}
@Override
public String getSchema() {
return "";
}
}
}
| 2,843 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/hadoop/HadoopFileInputSourceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.hadoop;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link HadoopFileInputSource}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.source.extractor.hadoop"})
public class HadoopFileInputSourceTest extends OldApiHadoopFileInputSourceTest {
@BeforeClass
public void setUp() throws IOException {
super.setUp();
}
@Test
public void testGetWorkUnitsAndExtractor() throws IOException, DataRecordException {
HadoopFileInputSource<String, Text, LongWritable, Text> fileInputSource = new TestHadoopFileInputSource();
List<WorkUnit> workUnitList = fileInputSource.getWorkunits(this.sourceState);
Assert.assertEquals(workUnitList.size(), 1);
WorkUnitState workUnitState = new WorkUnitState(workUnitList.get(0));
Closer closer = Closer.create();
try {
HadoopFileInputExtractor<String, Text, LongWritable, Text> extractor =
(HadoopFileInputExtractor<String, Text, LongWritable, Text>) fileInputSource.getExtractor(
workUnitState);
Text text = extractor.readRecord(null);
Assert.assertEquals(text.toString(), TEXT);
Assert.assertNull(extractor.readRecord(null));
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
@AfterClass
public void tearDown() throws IOException {
super.tearDown();
}
@Override
protected String getFileDir() {
return HadoopFileInputSourceTest.class.getSimpleName();
}
private static class TestHadoopFileInputSource extends HadoopTextInputSource<String> {
@Override
protected HadoopFileInputExtractor<String, Text, LongWritable, Text> getExtractor(WorkUnitState workUnitState,
RecordReader<LongWritable, Text> recordReader, FileSplit fileSplit, boolean readKeys) {
return new TestHadoopFileInputExtractor(recordReader, readKeys);
}
}
private static class TestHadoopFileInputExtractor
extends HadoopFileInputExtractor<String, Text, LongWritable, Text> {
public TestHadoopFileInputExtractor(RecordReader<LongWritable, Text> recordReader, boolean readKeys) {
super(recordReader, readKeys);
}
@Override
public String getSchema() {
return "";
}
}
}
| 2,844 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/extract/QueryBasedSourceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.extract;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.configuration.WorkUnitState.WorkingState;
import org.apache.gobblin.source.extractor.extract.QueryBasedSource.SourceEntity;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.dataset.DatasetUtils;
/**
* Unit tests for {@link QueryBasedSource}
*/
public class QueryBasedSourceTest {
@Test
public void testSourceEntity() {
SourceEntity se1 = SourceEntity.fromSourceEntityName("SourceEntity1");
Assert.assertEquals(se1.getSourceEntityName(), "SourceEntity1");
Assert.assertEquals(se1.getDestTableName(), "SourceEntity1");
Assert.assertEquals(se1.getDatasetName(), "SourceEntity1");
SourceEntity se2 = SourceEntity.fromSourceEntityName("SourceEntity$2");
Assert.assertEquals(se2.getSourceEntityName(), "SourceEntity$2");
Assert.assertEquals(se2.getDestTableName(), "SourceEntity_2");
Assert.assertEquals(se2.getDatasetName(), "SourceEntity$2");
State st1 = new State();
st1.setProp(ConfigurationKeys.SOURCE_ENTITY, "SourceEntity3");
st1.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "SourceEntity3_Table");
Optional<SourceEntity> se3 = SourceEntity.fromState(st1);
Assert.assertTrue(se3.isPresent());
Assert.assertEquals(se3.get().getSourceEntityName(), "SourceEntity3");
Assert.assertEquals(se3.get().getDestTableName(), "SourceEntity3_Table");
Assert.assertEquals(se3.get().getDatasetName(), "SourceEntity3");
Assert.assertEquals(se3.get(), new SourceEntity("SourceEntity3", "SourceEntity3_Table"));
State st2 = new State();
st2.setProp(ConfigurationKeys.SOURCE_ENTITY, "SourceEntity$4");
Optional<SourceEntity> se4 = SourceEntity.fromState(st2);
Assert.assertTrue(se4.isPresent());
Assert.assertEquals(se4.get(), SourceEntity.fromSourceEntityName("SourceEntity$4"));
State st3 = new State();
st3.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "Table5");
Optional<SourceEntity> se5 = SourceEntity.fromState(st3);
Assert.assertTrue(se5.isPresent());
Assert.assertEquals(se5.get(), SourceEntity.fromSourceEntityName("Table5"));
}
private Set<SourceEntity> getFilteredEntities(SourceState state) {
Set<SourceEntity> unfiltered = QueryBasedSource.getSourceEntitiesHelper(state);
return QueryBasedSource.getFilteredSourceEntitiesHelper(state, unfiltered);
}
@Test
public void testGetFilteredSourceEntities() {
{
SourceState state = new SourceState();
state.setProp(QueryBasedSource.ENTITY_BLACKLIST, "Table1,BadTable.*");
state.setProp(ConfigurationKeys.SOURCE_ENTITIES, "Table1,Table2,BadTable1,Table3");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "PropShouldBeIgnored");
Set<SourceEntity> res = getFilteredEntities(state);
Assert.assertEquals(res.size(), 2);
Assert.assertTrue(res.contains(SourceEntity.fromSourceEntityName("Table2")),
"Missing Table2 in " + res);
Assert.assertTrue(res.contains(SourceEntity.fromSourceEntityName("Table3")),
"Missing Table3 in " + res);
}
{
SourceState state = new SourceState();
state.setProp(QueryBasedSource.ENTITY_BLACKLIST, "Table1,BadTable.*");
state.setProp(QueryBasedSource.ENTITY_WHITELIST, "Table3");
state.setProp(ConfigurationKeys.SOURCE_ENTITIES, "Table1,Table2,BadTable1,Table3");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "PropShouldBeIgnored");
Set<SourceEntity> res = getFilteredEntities(state);
Assert.assertEquals(res.size(), 1);
Assert.assertTrue(res.contains(SourceEntity.fromSourceEntityName("Table3")),
"Missing Table3 in " + res);
}
{
SourceState state = new SourceState();
state.setProp(QueryBasedSource.ENTITY_BLACKLIST, "Table1,BadTable.*");
state.setProp(QueryBasedSource.ENTITY_WHITELIST, "Table3");
state.setProp(ConfigurationKeys.SOURCE_ENTITY, "Table3");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "PropShouldNotBeIgnored");
Set<SourceEntity> res = getFilteredEntities(state);
SourceEntity expected = new SourceEntity("Table3", "PropShouldNotBeIgnored");
Assert.assertEquals(res.size(), 1);
Assert.assertTrue(res.contains(expected), "Missing Table3 in " + res);
}
{
SourceState state = new SourceState();
state.setProp(QueryBasedSource.ENTITY_BLACKLIST, "Table1,BadTable.*");
state.setProp(QueryBasedSource.ENTITY_WHITELIST, "Table5");
state.setProp(ConfigurationKeys.SOURCE_ENTITY, "Table3");
state.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "PropShouldNotBeIgnored");
Set<SourceEntity> res = getFilteredEntities(state);
Assert.assertEquals(res.size(), 0);
}
}
@Test
public void testGetTableSpecificPropsFromState() {
SourceState state = new SourceState();
state.setProp(DatasetUtils.DATASET_SPECIFIC_PROPS,
"[{\"dataset\":\"Entity1\", \"value\": 1}, {\"dataset\":\"Table2\", \"value\":2}]");
// We should look in the dataset specific properties using the entity name, not table name
SourceEntity se1 = new SourceEntity("Entity1", "Table2");
SourceEntity se3 = new SourceEntity("Entity3", "Table3");
Set<SourceEntity> entities = ImmutableSet.of(se1, se3);
Map<SourceEntity, State> datasetProps =
QueryBasedSource.getTableSpecificPropsFromState(entities, state);
// Value 1 should be returned for se1, no prpos should be returned for se3
Assert.assertEquals(datasetProps.size(), 1);
Assert.assertTrue(datasetProps.containsKey(se1));
State se1Props = datasetProps.get(se1);
Assert.assertEquals(se1Props.getProp("value"), "1");
}
@Test
public void testGetPreviousWatermarksForAllTables() {
{
State prevJobState = new SourceState();
prevJobState.setProp(ConfigurationKeys.JOB_COMMIT_POLICY_KEY, "full");
Extract[] extracts = new Extract[3];
SourceEntity[] sourceEntities = new SourceEntity[extracts.length];
List<WorkUnitState> prevWuStates = new ArrayList<>();
// Simulate previous execution with 3 tables and 9 workunits
// All work units for the Table1 failed.
// Workunit 0 for Table0 returned no results
for (int i = 0; i < extracts.length; ++i) {
String sourceEntityName = "Table$" + i;
SourceEntity sourceEntity = SourceEntity.fromSourceEntityName(sourceEntityName);
sourceEntities[i] = sourceEntity;
extracts[i] = new Extract(TableType.APPEND_ONLY, "", sourceEntity.getDestTableName());
for (int j = 0; j < 3; ++j) {
WorkUnit wu = new WorkUnit(extracts[i]);
wu.setProp(ConfigurationKeys.SOURCE_ENTITY, sourceEntity.getSourceEntityName());
wu.setProp(ConfigurationKeys.WORK_UNIT_LOW_WATER_MARK_KEY, 10 * i);
wu.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, sourceEntity.getDestTableName());
WorkUnitState wuState = new WorkUnitState(wu, prevJobState);
wuState.setProp(ConfigurationKeys.WORK_UNIT_STATE_RUNTIME_HIGH_WATER_MARK, 20 * i);
wuState.setProp(ConfigurationKeys.WORK_UNIT_WORKING_STATE_KEY,
i == 1 ? WorkingState.FAILED.toString() : WorkingState.SUCCESSFUL.toString() );
wuState.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, (i + j) * 5);
prevWuStates.add(wuState);
}
}
SourceState prevState = new SourceState(prevJobState, prevWuStates);
Map<SourceEntity, Long> previousWM =
QueryBasedSource.getPreviousWatermarksForAllTables(prevState);
Assert.assertEquals(previousWM.size(), 3);
// No records read for one WU for Table0: min of all LWM
Assert.assertEquals(previousWM.get(sourceEntities[0]), Long.valueOf(0L));
// Failure for Table 1: min of all LWM
Assert.assertEquals(previousWM.get(sourceEntities[1]), Long.valueOf(10L));
// Success for Table 2: max of all HWM
Assert.assertEquals(previousWM.get(sourceEntities[2]), Long.valueOf(40L));
}
}
}
| 2,845 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.extract;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.exception.HighWatermarkException;
import org.apache.gobblin.source.extractor.exception.RecordCountException;
import org.apache.gobblin.source.extractor.exception.SchemaException;
import org.apache.gobblin.source.extractor.partition.Partition;
import org.apache.gobblin.source.extractor.watermark.Predicate;
import org.apache.gobblin.source.extractor.watermark.WatermarkPredicate;
import org.apache.gobblin.source.extractor.watermark.WatermarkType;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link QueryBasedExtractor}
*/
public class QueryBasedExtractorTest {
@Test
public void testDataPullUpperBoundsRemovedInLastWorkUnit() {
int totalCount = 5;
ArrayList<DataRecord> records = this.generateRecords(totalCount);
WorkUnit workUnit = WorkUnit.createEmpty();
workUnit.setProp(Partition.IS_LAST_PARTIITON, true);
workUnit.setProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE, "SNAPSHOT");
WorkUnitState workUnitState = new WorkUnitState(workUnit, new State());
workUnitState.setId("testDataPullUpperBoundsRemovedInLastWorkUnit");
TestQueryBasedExtractor testExtractor = new TestQueryBasedExtractor(workUnitState, records);
testExtractor.setRangePredicates(1, 3);
this.verify(testExtractor, totalCount);
}
@Test
public void testDataPullUpperBoundsNotRemovedInLastWorkUnit() {
int totalCount = 5;
ArrayList<DataRecord> records = this.generateRecords(totalCount);
WorkUnit workUnit = WorkUnit.createEmpty();
WorkUnitState workUnitState = new WorkUnitState(workUnit, new State());
workUnitState.setId("testDataPullUpperBoundsNotRemovedInLastWorkUnit");
// It's not a last work unit
TestQueryBasedExtractor testExtractor = new TestQueryBasedExtractor(workUnitState, records);
testExtractor.setRangePredicates(1, 3);
this.verify(testExtractor, 3);
// It's a last work unit but user specifies high watermark
workUnit.setProp(Partition.IS_LAST_PARTIITON, true);
workUnit.setProp(Partition.HAS_USER_SPECIFIED_HIGH_WATERMARK, true);
testExtractor.reset();
testExtractor.setRangePredicates(1, 3);
this.verify(testExtractor, 3);
// It's a last work unit but it has WORK_UNIT_STATE_ACTUAL_HIGH_WATER_MARK_KEY on record
workUnit.removeProp(Partition.HAS_USER_SPECIFIED_HIGH_WATERMARK);
workUnit.setProp(ConfigurationKeys.WORK_UNIT_STATE_ACTUAL_HIGH_WATER_MARK_KEY, "3");
testExtractor.reset();
testExtractor.setRangePredicates(1, 3);
this.verify(testExtractor, 3);
}
private ArrayList<DataRecord> generateRecords(int count) {
ArrayList<DataRecord> records = new ArrayList<>();
while (count > 0) {
records.add(new DataRecord(count, count));
count--;
}
return records;
}
private void verify(TestQueryBasedExtractor testExtractor, int expectedCount) {
int actualCount = 0;
try {
while (testExtractor.readRecord(null) != null) {
actualCount++;
}
} catch (Exception e) {
Assert.fail("There should not incur any exception");
}
Assert.assertEquals(actualCount, expectedCount, "Expect " + expectedCount + " records!");
}
private class TestQueryBasedExtractor extends QueryBasedExtractor<ArrayList, DataRecord> {
private final ArrayList<DataRecord> records;
private long previousActualHwmValue;
TestQueryBasedExtractor(WorkUnitState workUnitState, ArrayList<DataRecord> records) {
super(workUnitState);
this.records = records;
previousActualHwmValue = -1;
}
void setRangePredicates(long lwmValue, long hwmValue) {
WatermarkPredicate watermark = new WatermarkPredicate("timeStamp", WatermarkType.SIMPLE);
predicateList.add(watermark.getPredicate(this, lwmValue, ">=", Predicate.PredicateType.LWM));
predicateList.add(watermark.getPredicate(this, hwmValue, "<=", Predicate.PredicateType.HWM));
}
void reset() {
previousActualHwmValue = -1;
predicateList.clear();
setFetchStatus(true);
}
@Override
public void extractMetadata(String schema, String entity, WorkUnit workUnit) throws SchemaException, IOException {
}
@Override
public long getMaxWatermark(String schema, String entity, String watermarkColumn,
List<Predicate> snapshotPredicateList, String watermarkSourceFormat) throws HighWatermarkException {
return 0;
}
@Override
public long getSourceCount(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList)
throws RecordCountException {
return records.size();
}
@Override
public Iterator<DataRecord> getRecordSet(String schema, String entity, WorkUnit workUnit, List<Predicate> predicateList)
throws DataRecordException, IOException {
if (records == null || predicateList == null) {
// No new data to pull
return null;
}
long lwmValue = -1;
long hwmValue = Long.MAX_VALUE;
long actualHwmValue = -1;
// Adjust watermarks from predicate list
for (Predicate predicate: predicateList) {
if (predicate.getType() == Predicate.PredicateType.HWM) {
hwmValue = predicate.value;
}
if (predicate.getType() == Predicate.PredicateType.LWM) {
lwmValue = predicate.value;
}
}
ArrayList<DataRecord> filteredRecords = new ArrayList<>();
for (DataRecord record : records) {
if (record.timeStamp <= previousActualHwmValue) {
// The record has been pulled previously
continue;
}
if (record.timeStamp >= lwmValue && record.timeStamp <= hwmValue) {
// Make a copy
filteredRecords.add(new DataRecord(record.value, record.timeStamp));
// Mark actual high watermark
if (record.timeStamp > actualHwmValue) {
actualHwmValue = record.timeStamp;
}
}
}
if (filteredRecords.isEmpty()) {
return null;
}
previousActualHwmValue = actualHwmValue;
return filteredRecords.iterator();
}
@Override
public String getWatermarkSourceFormat(WatermarkType watermarkType) {
return null;
}
@Override
public String getHourPredicateCondition(String column, long value, String valueFormat, String operator) {
return null;
}
@Override
public String getDatePredicateCondition(String column, long value, String valueFormat, String operator) {
return null;
}
@Override
public String getTimestampPredicateCondition(String column, long value, String valueFormat, String operator) {
return null;
}
@Override
public void setTimeOut(int timeOut) {
}
@Override
public Map<String, String> getDataTypeMap() {
return null;
}
@Override
public void closeConnection() throws Exception {
}
@Override
public Iterator<DataRecord> getRecordSetFromSourceApi(String schema, String entity, WorkUnit workUnit,
List<Predicate> predicateList) throws IOException {
try {
return getRecordSet(schema, entity, workUnit, predicateList);
} catch (DataRecordException e) {
e.printStackTrace();
return null;
}
}
}
private class DataRecord {
int value;
long timeStamp;
DataRecord(int value, long timeStamp) {
this.value = value;
this.timeStamp = timeStamp;
}
}
}
| 2,846 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/source/extractor/partition/PartitionerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.source.extractor.partition;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.joda.time.DateTime;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.source.extractor.extract.ExtractType;
import org.apache.gobblin.source.extractor.utils.Utils;
import org.apache.gobblin.source.extractor.watermark.WatermarkType;
/**
* Unit tests for {@link PartitionerTest}
*/
public class PartitionerTest {
@Test
public void testGetPartitionList() {
List<Partition> expectedPartitions = new ArrayList<>();
SourceState sourceState = new SourceState();
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE, true);
TestPartitioner partitioner = new TestPartitioner(sourceState);
long defaultValue = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
expectedPartitions.add(new Partition(defaultValue, defaultValue, true, false));
// Watermark doesn't exist
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Set watermark
sourceState.setProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY, "time");
// Set other properties
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, "hour");
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE, "SNAPSHOT");
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, "2");
sourceState.setProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, "2");
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE, true);
expectedPartitions.clear();
expectedPartitions.add(new Partition(defaultValue, Long.parseLong(TestPartitioner.currentTimeString), true, false));
// No user specified watermarks
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Set user specified low and high watermarks
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE, "20170101002010");
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, "20170101122010");
expectedPartitions.clear();
expectedPartitions.add(new Partition(20170101000000L, 20170101060000L));
expectedPartitions.add(new Partition(20170101060000L, 20170101120000L, true, true));
List<Partition> partitions = partitioner.getPartitionList(-1);
Collections.sort(partitions, Partitioner.ascendingComparator);
Assert.assertEquals(partitions, expectedPartitions);
}
@Test
public void testGetUserSpecifiedPartitionList() {
List<Partition> expectedPartitions = new ArrayList<>();
SourceState sourceState = new SourceState();
sourceState.setProp(Partitioner.HAS_USER_SPECIFIED_PARTITIONS, true);
TestPartitioner partitioner = new TestPartitioner(sourceState);
long defaultValue = ConfigurationKeys.DEFAULT_WATERMARK_VALUE;
expectedPartitions.add(new Partition(defaultValue, defaultValue, true, true));
sourceState.setProp(Partitioner.USER_SPECIFIED_PARTITIONS, "");
// Partition list doesn't exist
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Date partitions
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, "date");
// Only one partition point
sourceState.setProp(Partitioner.USER_SPECIFIED_PARTITIONS, "20140101030201");
expectedPartitions.clear();
expectedPartitions.add(new Partition(20140101000000L, 20170101000000L, true, false));
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Keep upper bounds for append_daily job
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE, "APPEND_DAILY");
sourceState.setProp(Partitioner.USER_SPECIFIED_PARTITIONS, "20140101030201, 20140102040201");
expectedPartitions.clear();
expectedPartitions.add(new Partition(20140101000000L, 20140102000000L, true, true));
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Hour partitions, snapshot extract
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, "hour");
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE, "SNAPSHOT");
expectedPartitions.clear();
expectedPartitions.add(new Partition(20140101030000L, 20140102040000L, true, false));
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
// Hour partitions, timestamp extract
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, "timestamp");
expectedPartitions.clear();
expectedPartitions.add(new Partition(20140101030201L, 20140102040201L, true,false));
Assert.assertEquals(partitioner.getPartitionList(-1), expectedPartitions);
}
/**
* Test getLowWatermark. Is watermark override: true.
*/
@Test
public void testGetLowWatermarkOnUserOverride() {
String startValue = "20140101000000";
SourceState sourceState = new SourceState();
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE, true);
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE, startValue);
TestPartitioner partitioner = new TestPartitioner(sourceState);
Assert.assertEquals(
partitioner.getLowWatermark(null, null, -1, 0),
Long.parseLong(startValue),
"Low watermark should be " + startValue);
// It works for full dump too
sourceState.removeProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE);
sourceState.setProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY, true);
Assert.assertEquals(
partitioner.getLowWatermark(null, null, -1, 0),
Long.parseLong(startValue),
"Low watermark should be " + startValue);
// Should return ConfigurationKeys.DEFAULT_WATERMARK_VALUE if no SOURCE_QUERYBASED_START_VALUE is specified
sourceState.removeProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE);
Assert.assertEquals(
partitioner.getLowWatermark(null, null, -1, 0),
ConfigurationKeys.DEFAULT_WATERMARK_VALUE,
"Low watermark should be " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE);
}
/**
* Test getLowWatermark. Extract type: Snapshot.
*/
@Test
public void testGetLowWatermarkOnSnapshotExtract() {
SourceState sourceState = new SourceState();
String startValue = "20140101000000";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE, startValue);
TestPartitioner partitioner = new TestPartitioner(sourceState);
ExtractType extractType = ExtractType.SNAPSHOT;
int delta = 1;
// No previous watermark
Assert.assertEquals(
partitioner.getLowWatermark(extractType, null, ConfigurationKeys.DEFAULT_WATERMARK_VALUE, delta),
Long.parseLong(startValue),
"Low watermark should be " + startValue);
// With previous watermark
long previousWatermark = 20140101000050L;
long expected = previousWatermark + delta;
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.SIMPLE, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.TIMESTAMP, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
// With SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS
int backupSecs = 10;
expected = previousWatermark + delta - backupSecs;
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, backupSecs);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.SIMPLE, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.TIMESTAMP, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
}
/**
* Test getLowWatermark. Extract type: Append.
*/
@Test
public void testGetLowWatermarkOnAppendExtract() {
SourceState sourceState = new SourceState();
String startValue = "20140101000000";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE, startValue);
TestPartitioner partitioner = new TestPartitioner(sourceState);
ExtractType extractType = ExtractType.APPEND_DAILY;
int delta = 1;
// No previous watermark
Assert.assertEquals(
partitioner.getLowWatermark(extractType, null, ConfigurationKeys.DEFAULT_WATERMARK_VALUE, delta),
Long.parseLong(startValue),
"Low watermark should be " + startValue);
// With previous watermark
long previousWatermark = 20140101000050L;
long expected = previousWatermark + delta;
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.SIMPLE, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.TIMESTAMP, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
// The result has nothing to do with SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS
int backupSecs = 10;
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, backupSecs);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.SIMPLE, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
Assert.assertEquals(
partitioner.getLowWatermark(extractType, WatermarkType.TIMESTAMP, previousWatermark, delta),
expected,
"Low watermark should be " + expected);
}
/**
* Test getHighWatermark. Is watermark override: true.
*/
@Test
public void testGetHighWatermarkOnUserOverride() {
String endValue = "20140101000000";
SourceState sourceState = new SourceState();
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_WATERMARK_OVERRIDE, true);
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, endValue);
TestPartitioner partitioner = new TestPartitioner(sourceState);
Assert.assertEquals(
partitioner.getHighWatermark(null, null),
Long.parseLong(endValue),
"High watermark should be " + endValue);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should mark as user specified high watermark");
partitioner.reset();
// Should return current time if no SOURCE_QUERYBASED_END_VALUE is specified
sourceState.removeProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE);
long expected = Long.parseLong(TestPartitioner.currentTimeString);
Assert.assertEquals(
partitioner.getHighWatermark(null, null),
expected,
"High watermark should be " + expected);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
false,
"Should not mark as user specified high watermark");
}
/**
* Test getHighWatermark. Extract type: Snapshot.
*/
@Test
public void testGetHighWatermarkOnSnapshotExtract() {
String endValue = "20140101000000";
SourceState sourceState = new SourceState();
// It won't use SOURCE_QUERYBASED_END_VALUE when extract is full
sourceState.setProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY, true);
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, endValue);
ExtractType extractType = ExtractType.SNAPSHOT;
TestPartitioner partitioner = new TestPartitioner(sourceState);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, WatermarkType.SIMPLE),
ConfigurationKeys.DEFAULT_WATERMARK_VALUE,
"High watermark should be " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
false,
"Should not mark as user specified high watermark");
long expected = Long.parseLong(TestPartitioner.currentTimeString);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, WatermarkType.TIMESTAMP),
expected,
"High watermark should be " + expected);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
false,
"Should not mark as user specified high watermark");
}
/**
* Test getHighWatermark. Extract type: Append.
*/
@Test
public void testGetHighWatermarkOnAppendExtract() {
String endValue = "20140101000000";
SourceState sourceState = new SourceState();
sourceState.setProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY, true);
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, endValue);
ExtractType extractType = ExtractType.APPEND_DAILY;
TestPartitioner partitioner = new TestPartitioner(sourceState);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
Long.parseLong(endValue),
"High watermark should be " + endValue);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should mark as user specified high watermark");
partitioner.reset();
// Test non-full-dump cases below
sourceState.removeProp(ConfigurationKeys.EXTRACT_IS_FULL_KEY);
// No limit type
Assert.assertEquals(
partitioner.getHighWatermark(ExtractType.APPEND_BATCH, null),
ConfigurationKeys.DEFAULT_WATERMARK_VALUE,
"High watermark should be " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
false,
"Should not mark as user specified high watermark");
// No limit delta
long expected = Long.parseLong(TestPartitioner.currentTimeString);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
expected,
"High watermark should be " + expected);
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
false,
"Should not mark as user specified high watermark");
// CURRENTDATE - 1
String maxLimit = "CURRENTDATE-1";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT, maxLimit);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
20161231235959L,
"High watermark should be 20161231235959");
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should not mark as user specified high watermark");
partitioner.reset();
// CURRENTHOUR - 1
maxLimit = "CURRENTHOUR-1";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT, maxLimit);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
20161231235959L,
"High watermark should be 20161231235959");
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should not mark as user specified high watermark");
partitioner.reset();
// CURRENTMINUTE - 1
maxLimit = "CURRENTMINUTE-1";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT, maxLimit);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
20161231235959L,
"High watermark should be 20161231235959");
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should not mark as user specified high watermark");
partitioner.reset();
// CURRENTSECOND - 1
maxLimit = "CURRENTSECOND-1";
sourceState.setProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT, maxLimit);
Assert.assertEquals(
partitioner.getHighWatermark(extractType, null),
20161231235959L,
"High watermark should be 20161231235959");
Assert.assertEquals(
partitioner.getUserSpecifiedHighWatermark(),
true,
"Should not mark as user specified high watermark");
}
private class TestPartitioner extends Partitioner {
static final String currentTimeString = "20170101000000";
private DateTime currentTime;
TestPartitioner(SourceState state) {
super(state);
currentTime = Utils.toDateTime(currentTimeString, "yyyyMMddHHmmss", ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE);
}
boolean getUserSpecifiedHighWatermark() {
return hasUserSpecifiedHighWatermark;
}
@Override
public DateTime getCurrentTime(String timeZone) {
return currentTime;
}
void reset() {
hasUserSpecifiedHighWatermark = false;
}
}
}
| 2,847 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/async/AsyncDataDispatcherTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.async;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.util.ExponentialBackoff;
@Test
public class AsyncDataDispatcherTest {
/**
* Test successful data dispatch with 2 writers. No exception
*/
public void testSuccessfulDataDispatch()
throws ExecutionException, InterruptedException {
final TestAsyncDataDispatcher dispatcher = new TestAsyncDataDispatcher();
// This should work when there is nothing to process
dispatcher.waitForBufferEmpty();
ExecutorService service = Executors.newFixedThreadPool(2);
Writer writer1 = new Writer(dispatcher);
Writer writer2 = new Writer(dispatcher);
Future<?> future1 = service.submit(writer1);
Future<?> future2 = service.submit(writer2);
// Process 10 records
ExponentialBackoff.awaitCondition().callable(new Callable<Boolean>() {
@Override
public Boolean call()
throws Exception {
return dispatcher.count == 10;
}
}).maxWait(1000L).await();
writer1.shouldWaitForABufferEmpty = true;
writer2.shouldWaitForABufferEmpty = true;
dispatcher.count = 0;
// Process another 10 records
ExponentialBackoff.awaitCondition().callable(new Callable<Boolean>() {
@Override
public Boolean call()
throws Exception {
return dispatcher.count == 10;
}
}).maxWait(1000L).await();
writer1.shouldExit = true;
writer2.shouldExit = true;
try {
future1.get();
future2.get();
service.shutdown();
dispatcher.terminate();
} catch (Exception e) {
Assert.fail("Could not complete successful data dispatch");
}
Assert.assertTrue(dispatcher.isDispatchCalled);
Assert.assertTrue(writer1.aBufferEmptyWaited);
Assert.assertTrue(writer2.aBufferEmptyWaited);
}
/**
* Test successful data dispatch with 2 writers. Normal exception
*/
public void testSuccessfulDataDispatchWithNormalException()
throws ExecutionException, InterruptedException {
final TestAsyncDataDispatcher dispatcher = new TestAsyncDataDispatcher();
ExecutorService service = Executors.newFixedThreadPool(2);
Writer writer1 = new Writer(dispatcher);
Writer writer2 = new Writer(dispatcher);
Future<?> future1 = service.submit(writer1);
Future<?> future2 = service.submit(writer2);
dispatcher.status = DispatchStatus.ERROR;
// Process 10 records
ExponentialBackoff.awaitCondition().callable(new Callable<Boolean>() {
@Override
public Boolean call()
throws Exception {
return dispatcher.count == 10;
}
}).maxWait(1000L).await();
writer1.shouldExit = true;
writer2.shouldExit = true;
// Everything should be fine
try {
future1.get();
future2.get();
service.shutdown();
dispatcher.terminate();
} catch (Exception e) {
Assert.fail("Could not complete successful data dispatch");
}
Assert.assertTrue(dispatcher.isDispatchCalled);
}
/**
* Test data dispatch with 2 writers. Fatal exception
*/
public void testSuccessfulDataDispatchWithFatalException()
throws ExecutionException, InterruptedException {
final TestAsyncDataDispatcher dispatcher = new TestAsyncDataDispatcher();
ExecutorService service = Executors.newFixedThreadPool(2);
Writer writer1 = new Writer(dispatcher);
Writer writer2 = new Writer(dispatcher);
Future<?> future1 = service.submit(writer1);
Future<?> future2 = service.submit(writer2);
dispatcher.status = DispatchStatus.FATAL;
// Process 10 records
ExponentialBackoff.awaitCondition().callable(new Callable<Boolean>() {
@Override
public Boolean call()
throws Exception {
return dispatcher.count == 10;
}
}).maxWait(1000L).await();
boolean hasAnException = false;
try {
dispatcher.put(new Object());
} catch (Exception e) {
hasAnException = true;
}
Assert.assertTrue(hasAnException, "put should get an exception");
hasAnException = false;
try {
dispatcher.waitForBufferEmpty();
} catch (Exception e) {
hasAnException = true;
}
Assert.assertTrue(hasAnException, "waitForBufferEmpty should get an exception");
hasAnException = false;
try {
// Everything should be fine
future1.get();
} catch (ExecutionException e) {
hasAnException = true;
} catch (InterruptedException e) {
// Do nothing
}
Assert.assertTrue(hasAnException, "future1 should get an exception");
hasAnException = false;
try {
// Everything should be fine
future2.get();
} catch (ExecutionException e) {
hasAnException = true;
} catch (InterruptedException e) {
// Do nothing
}
Assert.assertTrue(hasAnException, "future2 should get an exception");
hasAnException = false;
try {
service.shutdown();
dispatcher.terminate();
} catch (Exception e) {
hasAnException = true;
}
Assert.assertTrue(hasAnException, "terminating should get an exception");
Assert.assertTrue(dispatcher.isDispatchCalled);
}
enum DispatchStatus {
// Dispatch success
OK,
// Dispatch exception
ERROR,
// Fatal dispatch exception
FATAL
}
class TestAsyncDataDispatcher extends AsyncDataDispatcher<Object> {
volatile DispatchStatus status;
volatile int count;
boolean isDispatchCalled;
TestAsyncDataDispatcher() {
super(2);
status = DispatchStatus.OK;
isDispatchCalled = false;
count = 0;
}
@Override
protected void dispatch(Queue<Object> buffer)
throws DispatchException {
// Assert buffer must not be empty anytime dispatch is called
Assert.assertTrue(buffer.size() > 0);
isDispatchCalled = true;
// Consume a record
buffer.poll();
count++;
switch (status) {
case OK:
return;
case ERROR:
throw new DispatchException("error", false);
case FATAL:
throw new DispatchException("fatal");
}
}
}
class Writer implements Runnable {
TestAsyncDataDispatcher dispather;
boolean aBufferEmptyWaited;
volatile boolean shouldWaitForABufferEmpty;
volatile boolean shouldExit;
Writer(TestAsyncDataDispatcher dispather) {
this.dispather = dispather;
shouldWaitForABufferEmpty = false;
shouldExit = false;
aBufferEmptyWaited = false;
}
@Override
public void run() {
while (!shouldExit) {
dispather.put(new Object());
if (shouldWaitForABufferEmpty) {
dispather.waitForBufferEmpty();
aBufferEmptyWaited = true;
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// Do nothing
}
}
}
}
}
| 2,848 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/destination/DestinationDatasetHandlerServiceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.destination;
import java.util.ArrayList;
import java.util.List;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.metrics.event.EventSubmitter;
import org.apache.gobblin.source.workunit.BasicWorkUnitStream;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.source.workunit.WorkUnitStream;
import org.testng.Assert;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import static org.mockito.Mockito.mock;
public class DestinationDatasetHandlerServiceTest {
EventSubmitter eventSubmitter = null;
@BeforeSuite
void setup() {
this.eventSubmitter = mock(EventSubmitter.class);
}
@Test
void testSingleHandler() throws Exception {
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.DESTINATION_DATASET_HANDLER_CLASS, TestDestinationDatasetHandler.class.getName());
DestinationDatasetHandlerService service = new DestinationDatasetHandlerService(state, true, this.eventSubmitter);
List<WorkUnit> workUnits = new ArrayList<>();
for (int i = 0; i < 5; i++) {
WorkUnit wu = WorkUnit.createEmpty();
workUnits.add(wu);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workUnits).build();
service.executeHandlers(workUnitStream);
for (WorkUnit wu: workUnits) {
Assert.assertEquals(wu.getPropAsInt(TestDestinationDatasetHandler.TEST_COUNTER_KEY), 1);
}
}
@Test
void testMultipleHandlers() throws Exception {
SourceState state = new SourceState();
state.setProp(ConfigurationKeys.DESTINATION_DATASET_HANDLER_CLASS,
TestDestinationDatasetHandler.class.getName() + "," + TestDestinationDatasetHandler.class.getName());
DestinationDatasetHandlerService service = new DestinationDatasetHandlerService(state, true, this.eventSubmitter);
List<WorkUnit> workUnits = new ArrayList<>();
for (int i = 0; i < 5; i++) {
WorkUnit wu = WorkUnit.createEmpty();
workUnits.add(wu);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workUnits).build();
service.executeHandlers(workUnitStream);
for (WorkUnit wu: workUnits) {
// there were 2 handlers, each should have added to counter
Assert.assertEquals(wu.getPropAsInt(TestDestinationDatasetHandler.TEST_COUNTER_KEY), 2);
}
}
@Test
void testMultipleHandlersWhitespace() throws Exception {
SourceState state = new SourceState();
// add whitespace in class list
state.setProp(ConfigurationKeys.DESTINATION_DATASET_HANDLER_CLASS,
TestDestinationDatasetHandler.class.getName() + " , " + TestDestinationDatasetHandler.class.getName());
DestinationDatasetHandlerService service = new DestinationDatasetHandlerService(state, true, this.eventSubmitter);
List<WorkUnit> workUnits = new ArrayList<>();
for (int i = 0; i < 5; i++) {
WorkUnit wu = WorkUnit.createEmpty();
workUnits.add(wu);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workUnits).build();
service.executeHandlers(workUnitStream);
for (WorkUnit wu: workUnits) {
// there were 2 handlers, each should have added to counter
Assert.assertEquals(wu.getPropAsInt(TestDestinationDatasetHandler.TEST_COUNTER_KEY), 2);
}
}
@Test
// should not throw an exception
void testEmpty() throws Exception {
SourceState state = new SourceState();
DestinationDatasetHandlerService service = new DestinationDatasetHandlerService(state, true, this.eventSubmitter);
List<WorkUnit> workUnits = new ArrayList<>();
for (int i = 0; i < 5; i++) {
WorkUnit wu = WorkUnit.createEmpty();
workUnits.add(wu);
}
WorkUnitStream workUnitStream = new BasicWorkUnitStream.Builder(workUnits).build();
service.executeHandlers(workUnitStream);
}
}
| 2,849 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/destination/TestDestinationDatasetHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.destination;
import java.io.IOException;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.source.workunit.WorkUnitStream;
public class TestDestinationDatasetHandler implements DestinationDatasetHandler {
public static String TEST_COUNTER_KEY = "counter";
public TestDestinationDatasetHandler(SourceState state, Boolean canCleanUp){
}
@Override
public WorkUnitStream handle(WorkUnitStream workUnitSteam) {
return workUnitSteam.transform(wu -> {
wu.setProp(TEST_COUNTER_KEY, wu.getPropAsInt(TEST_COUNTER_KEY, 0) + 1);
return wu;
});
}
@Override
public void close() throws IOException {}
}
| 2,850 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/state/ConstructStateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.state;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import org.apache.gobblin.Constructs;
import org.apache.gobblin.configuration.WorkUnitState;
public class ConstructStateTest {
@Test
public void test() {
ConstructState constructState = new ConstructState();
WorkUnitState workUnitState = new WorkUnitState();
String overrideKey = "overrideMe";
String nonOverrideKey = "overrideMe.not";
String workUnitToken = "workUnit";
String constructToken = "construct";
workUnitState.setProp(overrideKey, workUnitToken);
workUnitState.setProp(nonOverrideKey, workUnitToken);
constructState.addOverwriteProperties(ImmutableMap.<String, String>builder().put(overrideKey, constructToken).build());
constructState.setProp(nonOverrideKey, constructToken);
constructState.mergeIntoWorkUnitState(workUnitState);
Assert.assertEquals(workUnitState.getProp(overrideKey), constructToken);
Assert.assertEquals(workUnitState.getProp(nonOverrideKey), workUnitToken);
Assert.assertEquals(workUnitState.getPropertyNames().size(), 3);
}
@Test
public void testCombineConstructStates() {
ConstructState constructState = new ConstructState();
ConstructState constructState2 = new ConstructState();
String overrideKey = "overrideMe";
String overrideKey2 = "overrideMe2";
String nonOverrideKey = "overrideMe.not";
String str1 = "str1";
String str2 = "str2";
constructState.addOverwriteProperties(ImmutableMap.<String, String>builder().put(overrideKey, str1).build());
constructState.addOverwriteProperties(ImmutableMap.<String, String>builder().put(overrideKey2, str1).build());
constructState.setProp(nonOverrideKey, str1);
constructState2.addOverwriteProperties(ImmutableMap.<String, String>builder().put(overrideKey, str2).build());
constructState2.setProp(nonOverrideKey, str2);
constructState.addConstructState(Constructs.CONVERTER, constructState2);
Properties overrideProperties = constructState.getOverwriteProperties();
Assert.assertEquals(overrideProperties.getProperty(overrideKey), str2);
Assert.assertEquals(overrideProperties.getProperty(overrideKey2), str1);
Assert.assertEquals(constructState.getPropertyNames().size(), 3);
}
}
| 2,851 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/policies | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/policies/count/RowCountRangePolicyTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.policies.count;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicy;
import org.apache.gobblin.qualitychecker.task.TaskLevelPolicy.Result;
import org.testng.Assert;
import org.testng.annotations.Test;
public class RowCountRangePolicyTest {
@Test
public void testRangePolicyFailure() {
RowCountRangePolicy rangePolicy = new RowCountRangePolicy(getTestState(4, 1, 0.5), TaskLevelPolicy.Type.FAIL);
Assert.assertEquals(rangePolicy.executePolicy(), Result.FAILED);
rangePolicy = new RowCountRangePolicy(getTestState(20, 8, 0.2), TaskLevelPolicy.Type.FAIL);
Assert.assertEquals(rangePolicy.executePolicy(), Result.FAILED);
}
@Test
public void testRangePolicySuccess() {
RowCountRangePolicy rangePolicy = new RowCountRangePolicy(getTestState(4, 3, 0.8), TaskLevelPolicy.Type.FAIL);
Assert.assertEquals(rangePolicy.executePolicy(), Result.PASSED);
rangePolicy = new RowCountRangePolicy(getTestState(20, 12, 0.5), TaskLevelPolicy.Type.FAIL);
Assert.assertEquals(rangePolicy.executePolicy(), Result.PASSED);
}
private State getTestState(long recordsRead, long recordsWritten, double range) {
State state = new State();
state.setProp(ConfigurationKeys.EXTRACTOR_ROWS_EXPECTED, recordsRead);
state.setProp(ConfigurationKeys.WRITER_ROWS_WRITTEN, recordsWritten);
state.setProp(ConfigurationKeys.ROW_COUNT_RANGE, range);
return state;
}
}
| 2,852 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/DestinationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
/**
* Unit tests for {@link Destination}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.writer"})
public class DestinationTest {
@Test
public void testMethods() {
State state = new State();
state.setProp("foo", "bar");
Destination destination = Destination.of(Destination.DestinationType.HDFS, state);
Assert.assertEquals(destination.getType(), Destination.DestinationType.HDFS);
Assert.assertEquals(destination.getProperties().getPropertyNames().size(), 1);
Assert.assertEquals(destination.getProperties().getProp("foo"), "bar");
}
}
| 2,853 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/ThrottleWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import static org.mockito.Mockito.*;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.writer.ThrottleWriter.ThrottleType;
import org.apache.gobblin.util.FinalState;
import org.apache.commons.lang3.mutable.MutableLong;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Stopwatch;
@Test(groups = { "gobblin.writer" })
public class ThrottleWriterTest {
public void testThrottleQps() throws IOException {
DataWriter<Void> writer = mock(DataWriter.class);
int parallelism = 2;
int qps = 4;
DataWriter<Void> throttleWriter = setup(writer, parallelism, qps, ThrottleType.QPS);
int count = 0;
long duration = 10L;
Stopwatch stopwatch = Stopwatch.createStarted();
while (stopwatch.elapsed(TimeUnit.SECONDS) <= duration) {
throttleWriter.writeEnvelope(null);
count++;
}
int expected = (int) (qps * duration);
Assert.assertTrue(count <= expected + qps * 2, "Request too high " + count + " , QPS " + qps + " for duration " + duration + " second ");
Assert.assertTrue(count >= expected - qps * 2, "Request too low " + count + " , QPS " + qps + " for duration " + duration + " second ");
}
public void testThrottleBytes() throws IOException {
DataWriter<Void> writer = mock(DataWriter.class);
final MutableLong mockBytes = new MutableLong();
when(writer.bytesWritten()).thenAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
mockBytes.add(1L); //Delta bytes
return mockBytes.getValue();
}
});
int parallelism = 2;
int bps = 2;
DataWriter<Void> throttleWriter = setup(writer, parallelism, bps, ThrottleType.Bytes);
int count = 0;
long duration = 10L;
Stopwatch stopwatch = Stopwatch.createStarted();
while (stopwatch.elapsed(TimeUnit.SECONDS) <= duration) {
throttleWriter.writeEnvelope(null);
count++;
}
int expected = (int) (bps * duration);
Assert.assertTrue(count <= expected + bps * 2);
Assert.assertTrue(count >= expected - bps * 2);
}
public void testGetFinalState() throws IOException {
PartitionedDataWriter writer = mock(PartitionedDataWriter.class);
when(writer.getFinalState()).thenReturn(new State());
int parallelism = 2;
int qps = 4;
DataWriter<Void> throttleWriter = setup(writer, parallelism, qps, ThrottleType.QPS);
State state = ((FinalState) throttleWriter).getFinalState();
verify(writer, times(1)).getFinalState();
Assert.assertTrue(state.contains(ThrottleWriter.THROTTLED_TIME_KEY));
}
private DataWriter<Void> setup(DataWriter<Void> writer, int parallelism, int rate, ThrottleType type) throws IOException {
State state = new State();
state.appendToSetProp(ThrottleWriter.WRITER_LIMIT_RATE_LIMIT_KEY, Integer.toString(rate * parallelism));
state.appendToSetProp(ThrottleWriter.WRITER_THROTTLE_TYPE_KEY, type.name());
state.appendToSetProp(ConfigurationKeys.TASK_EXECUTOR_THREADPOOL_SIZE_KEY, Integer.toString(parallelism));
state.appendToSetProp(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, Integer.toString(parallelism));
DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, state);
return builder.build();
}
}
| 2,854 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/ConsoleWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.source.extractor.CheckpointableWatermark;
import org.apache.gobblin.source.extractor.DefaultCheckpointableWatermark;
import org.apache.gobblin.source.extractor.extract.LongWatermark;
import org.apache.gobblin.stream.RecordEnvelope;
public class ConsoleWriterTest {
@Test
public void testNull()
{
ConsoleWriter<String> consoleWriter = new ConsoleWriter<>();
String foo = null;
try {
consoleWriter.write(foo);
}
catch (Exception e)
{
Assert.fail("Should not throw an exception on null");
}
}
@Test
public void testInteger()
{
ConsoleWriter<Integer> consoleWriter = new ConsoleWriter<>();
Integer foo = 1;
try {
consoleWriter.write(foo);
}
catch (Exception e)
{
Assert.fail("Should not throw an exception on writing an Integer");
}
}
private class TestObject {
};
@Test
public void testObject()
{
TestObject testObject = new TestObject();
ConsoleWriter<TestObject> consoleWriter = new ConsoleWriter<>();
try {
consoleWriter.write(testObject);
}
catch (Exception e)
{
Assert.fail("Should not throw an exception on writing an object that doesn't explicitly implement toString");
}
testObject = null;
try {
consoleWriter.write(testObject);
}
catch (Exception e)
{
Assert.fail("Should not throw an exception on writing a null object");
}
}
private void writeEnvelope(ConsoleWriter consoleWriter, String content, String source, long value)
throws IOException {
CheckpointableWatermark watermark =
new DefaultCheckpointableWatermark(source, new LongWatermark(value));
AcknowledgableWatermark ackable = new AcknowledgableWatermark(watermark);
RecordEnvelope<String> mockEnvelope = (RecordEnvelope<String>) new RecordEnvelope<>(content).addCallBack(ackable);
consoleWriter.writeEnvelope(mockEnvelope);
Assert.assertTrue(ackable.isAcked());
}
@Test
public void testWatermarkWrite()
throws IOException {
ConsoleWriter<TestObject> consoleWriter = new ConsoleWriter<>();
writeEnvelope(consoleWriter, "hello 1", "dataset1", 1);
writeEnvelope(consoleWriter, "hello 2", "dataset1", 2);
writeEnvelope(consoleWriter, "hello 2", "dataset2", 1);
}
}
| 2,855 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/TestConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
/**
* Test constants.
*
* @author Yinan Li
*/
public class TestConstants {
// Test Avro schema
public static final String AVRO_SCHEMA =
"{\"namespace\": \"example.avro\",\n" + " \"type\": \"record\",\n" + " \"name\": \"User\",\n" + " \"fields\": [\n"
+ " {\"name\": \"name\", \"type\": \"string\"},\n"
+ " {\"name\": \"favorite_number\", \"type\": \"int\"},\n"
+ " {\"name\": \"favorite_color\", \"type\": \"string\"}\n" + " ]\n" + "}";
// Test Avro data in json format
public static final String[] JSON_RECORDS =
{"{\"name\": \"Alyssa\", \"favorite_number\": 256, \"favorite_color\": \"yellow\"}", "{\"name\": \"Ben\", \"favorite_number\": 7, \"favorite_color\": \"red\"}", "{\"name\": \"Charlie\", \"favorite_number\": 68, \"favorite_color\": \"blue\"}"};
public static final String TEST_FS_URI = "file://localhost/";
public static final String TEST_ROOT_DIR = "test";
public static final String TEST_STAGING_DIR = TEST_ROOT_DIR + "/staging";
public static final String TEST_OUTPUT_DIR = TEST_ROOT_DIR + "/output";
public static final String TEST_FILE_NAME = "test.avro";
public static final String TEST_WRITER_ID = "writer-1";
public static final String TEST_FILE_EXTENSION = "avro";
public static final String TEST_EXTRACT_NAMESPACE = "com.linkedin.writer.test";
public static final String TEST_EXTRACT_ID = String.valueOf(System.currentTimeMillis());
public static final String TEST_EXTRACT_TABLE = "TestTable";
public static final String TEST_EXTRACT_PULL_TYPE = "FULL";
}
| 2,856 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/AvroHdfsDataWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
/**
* Unit tests for {@link AvroHdfsDataWriter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.writer" })
public class AvroHdfsDataWriterTest {
private static final String TEST_PROPERTY_KEY = "test.property";
private static final String TEST_PROPERTY_VALUE = "testValue";
private static final Type FIELD_ENTRY_TYPE = new TypeToken<Map<String, Object>>() {}.getType();
private Schema schema;
private String filePath;
private State properties;
@BeforeClass
public void setUp() throws Exception {
// Making the staging and/or output dirs if necessary
File stagingDir = new File(TestConstants.TEST_STAGING_DIR);
File outputDir = new File(TestConstants.TEST_OUTPUT_DIR);
if (!stagingDir.exists()) {
stagingDir.mkdirs();
}
if (!outputDir.exists()) {
outputDir.mkdirs();
}
this.schema = new Schema.Parser().parse(TestConstants.AVRO_SCHEMA);
schema.addProp(TEST_PROPERTY_KEY, TEST_PROPERTY_VALUE);
this.filePath = TestConstants.TEST_EXTRACT_NAMESPACE.replaceAll("\\.", "/") + "/" + TestConstants.TEST_EXTRACT_TABLE
+ "/" + TestConstants.TEST_EXTRACT_ID + "_" + TestConstants.TEST_EXTRACT_PULL_TYPE;
properties = new State();
properties.setProp(ConfigurationKeys.WRITER_BUFFER_SIZE, ConfigurationKeys.DEFAULT_BUFFER_SIZE);
properties.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, TestConstants.TEST_FS_URI);
properties.setProp(ConfigurationKeys.WRITER_STAGING_DIR, TestConstants.TEST_STAGING_DIR);
properties.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, TestConstants.TEST_OUTPUT_DIR);
properties.setProp(ConfigurationKeys.WRITER_FILE_PATH, this.filePath);
properties.setProp(ConfigurationKeys.WRITER_FILE_NAME, TestConstants.TEST_FILE_NAME);
}
@DataProvider(name = "codecs")
private String[] codecs() {
return new String[]{"null", "deflate", "snappy", "bzip2", "xz", "zstandard"};
}
@Test(dataProvider = "codecs")
public void testWrite(String codec) throws IOException {
properties.setProp(ConfigurationKeys.WRITER_CODEC_TYPE, codec);
DataWriterBuilder<Schema, GenericRecord> builder = new AvroDataWriterBuilder()
.writeTo(Destination.of(Destination.DestinationType.HDFS, properties))
.writeInFormat(WriterOutputFormat.AVRO).withWriterId(TestConstants.TEST_WRITER_ID)
.withSchema(this.schema).withBranches(1).forBranch(0);
DataWriter<GenericRecord> writer = builder.build();
// Write all test records
for (String record : TestConstants.JSON_RECORDS) {
writer.write(convertRecord(record));
}
Assert.assertEquals(writer.recordsWritten(), 3);
writer.close();
writer.commit();
File outputFile =
new File(TestConstants.TEST_OUTPUT_DIR + Path.SEPARATOR + this.filePath, TestConstants.TEST_FILE_NAME);
DataFileReader<GenericRecord> reader =
new DataFileReader<>(outputFile, new GenericDatumReader<GenericRecord>());
Schema fileSchema = reader.getSchema();
Assert.assertEquals(fileSchema.getProp(TEST_PROPERTY_KEY), TEST_PROPERTY_VALUE);
// Read the records back and assert they are identical to the ones written
GenericRecord user1 = reader.next();
// Strings are in UTF8, so we have to call toString() here and below
Assert.assertEquals(user1.get("name").toString(), "Alyssa");
Assert.assertEquals(user1.get("favorite_number"), 256);
Assert.assertEquals(user1.get("favorite_color").toString(), "yellow");
GenericRecord user2 = reader.next();
Assert.assertEquals(user2.get("name").toString(), "Ben");
Assert.assertEquals(user2.get("favorite_number"), 7);
Assert.assertEquals(user2.get("favorite_color").toString(), "red");
GenericRecord user3 = reader.next();
Assert.assertEquals(user3.get("name").toString(), "Charlie");
Assert.assertEquals(user3.get("favorite_number"), 68);
Assert.assertEquals(user3.get("favorite_color").toString(), "blue");
reader.close();
FsWriterMetrics metrics = FsWriterMetrics.fromJson(properties.getProp(FsDataWriter.FS_WRITER_METRICS_KEY));
Assert.assertEquals(metrics.fileInfos.size(),1);
FsWriterMetrics.FileInfo fileInfo = metrics.fileInfos.iterator().next();
Assert.assertEquals(fileInfo.fileName, TestConstants.TEST_FILE_NAME);
Assert.assertEquals(fileInfo.numRecords, 3);
Assert.assertNull(metrics.partitionInfo.partitionKey);
Assert.assertEquals(metrics.partitionInfo.branchId, 0);
}
@AfterClass
public void tearDown() throws IOException {
// Clean up the staging and/or output directories if necessary
File testRootDir = new File(TestConstants.TEST_ROOT_DIR);
if (testRootDir.exists()) {
FileUtil.fullyDelete(testRootDir);
}
}
private GenericRecord convertRecord(String inputRecord) {
Gson gson = new Gson();
JsonElement element = gson.fromJson(inputRecord, JsonElement.class);
Map<String, Object> fields = gson.fromJson(element, FIELD_ENTRY_TYPE);
GenericRecord outputRecord = new GenericData.Record(this.schema);
for (Map.Entry<String, Object> entry : fields.entrySet()) {
outputRecord.put(entry.getKey(), entry.getValue());
}
return outputRecord;
}
}
| 2,857 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/HiveWritableHdfsDataWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
public class HiveWritableHdfsDataWriterTest {
private FileSystem fs;
private File tmpDir;
@BeforeClass
public void setUp() throws IOException {
tmpDir = Files.createTempDir();
this.fs = FileSystem.get(new Configuration());
}
@AfterClass
public void cleanUp() throws IOException {
if (this.fs.exists(new Path(this.tmpDir.getAbsolutePath()))) {
if (!this.fs.delete(new Path(this.tmpDir.getAbsolutePath()), true)) {
throw new IOException("Failed to clean up path " + this.tmpDir);
}
}
}
/**
* Test that multiple close calls do not raise an error
*/
@Test
public void testMultipleClose() throws IOException {
Properties properties = new Properties();
properties.load(new FileReader("gobblin-core/src/test/resources/writer/hive_writer.properties"));
properties.setProperty("writer.staging.dir", new Path(tmpDir.getAbsolutePath(), "output-staging").toString());
properties.setProperty("writer.output.dir", new Path(tmpDir.getAbsolutePath(), "output").toString());
properties.setProperty("writer.file.path", ".");
SourceState sourceState = new SourceState(new State(properties), ImmutableList.<WorkUnitState> of());
DataWriter writer = new HiveWritableHdfsDataWriterBuilder<>().withBranches(1)
.withWriterId("0").writeTo(Destination.of(Destination.DestinationType.HDFS, sourceState))
.writeInFormat(WriterOutputFormat.ORC).build();
writer.close();
// check for file existence
Assert.assertTrue(this.fs.exists(new Path(new Path(tmpDir.getAbsolutePath(), "output-staging"), "writer-output.orc")),
"staging file not found");
// closed again is okay
writer.close();
// commit after close is okay
writer.commit();
Assert.assertTrue(this.fs.exists(new Path(new Path(tmpDir.getAbsolutePath(), "output"), "writer-output.orc")),
"output file not found");
}
}
| 2,858 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/CloseOnFlushWriterWrapperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.records.ControlMessageHandler;
import org.apache.gobblin.records.FlushControlMessageHandler;
import org.apache.gobblin.stream.ControlMessage;
import org.apache.gobblin.stream.FlushControlMessage;
import org.apache.gobblin.stream.RecordEnvelope;
public class CloseOnFlushWriterWrapperTest {
@Test
public void testCloseOnFlushDisabled()
throws IOException {
WorkUnitState state = new WorkUnitState();
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 0);
Assert.assertFalse(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
}
@Test
public void testCloseOnFlushEnabled()
throws IOException {
WorkUnitState state = new WorkUnitState();
state.getJobState().setProp(CloseOnFlushWriterWrapper.WRITER_CLOSE_ON_FLUSH_KEY, "true");
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
Assert.assertTrue(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
}
@Test
public void testWriteAfterFlush()
throws IOException {
WorkUnitState state = new WorkUnitState();
state.getJobState().setProp(CloseOnFlushWriterWrapper.WRITER_CLOSE_ON_FLUSH_KEY, "true");
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.size(), 1);
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
Assert.assertTrue(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.size(), 2);
Assert.assertEquals(dummyWriters.get(1).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(1).flushCount, 1);
Assert.assertEquals(dummyWriters.get(1).closeCount, 1);
Assert.assertTrue(dummyWriters.get(1).committed);
Assert.assertEquals(dummyWriters.get(1).handlerCalled, 1);
}
@Test
public void testCloseAfterFlush()
throws IOException {
WorkUnitState state = new WorkUnitState();
state.getJobState().setProp(CloseOnFlushWriterWrapper.WRITER_CLOSE_ON_FLUSH_KEY, "true");
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
Assert.assertTrue(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
writer.close();
// writer should not be closed multiple times
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
}
@Test
public void testDirectFlushAfterFlush()
throws IOException {
WorkUnitState state = new WorkUnitState();
state.getJobState().setProp(CloseOnFlushWriterWrapper.WRITER_CLOSE_ON_FLUSH_KEY, "true");
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
Assert.assertTrue(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
writer.flush();
writer.close();
// writer should not be flushed or closed multiple times
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
}
@Test
public void testBackToBackFlushMessages()
throws IOException {
WorkUnitState state = new WorkUnitState();
state.getJobState().setProp(CloseOnFlushWriterWrapper.WRITER_CLOSE_ON_FLUSH_KEY, "true");
List<DummyWriter> dummyWriters = new ArrayList<>();
CloseOnFlushWriterWrapper<byte[]> writer = getCloseOnFlushWriter(dummyWriters, state);
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
writer.writeEnvelope(new RecordEnvelope(record));
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
Assert.assertEquals(dummyWriters.get(0).recordsWritten(), 1);
Assert.assertEquals(dummyWriters.get(0).flushCount, 1);
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
Assert.assertTrue(dummyWriters.get(0).committed);
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
writer.getMessageHandler().handleMessage(FlushControlMessage.builder().build());
// a flush control message on a closed writer should be a noop
Assert.assertEquals(dummyWriters.get(0).handlerCalled, 1);
writer.close();
// writer should not be closed multiple times
Assert.assertEquals(dummyWriters.get(0).closeCount, 1);
}
private CloseOnFlushWriterWrapper getCloseOnFlushWriter(List<DummyWriter> dummyWriters, WorkUnitState state) {
return new CloseOnFlushWriterWrapper<>(new Supplier<DataWriter<byte[]>>() {
@Override
public DataWriter<byte[]> get() {
DummyWriter writer = new DummyWriter();
dummyWriters.add(writer);
return writer;
}
}, state.getJobState());
}
private static class DummyWriter implements DataWriter<byte[]> {
private int recordsSeen = 0;
private byte[] lastWrittenRecord;
private int flushCount = 0;
private int closeCount = 0;
private boolean committed = false;
private int handlerCalled = 0;
DummyWriter() {
}
@Override
public void write(byte[] record)
throws IOException {
this.recordsSeen++;
this.lastWrittenRecord = record;
}
@Override
public void commit()
throws IOException {
this.committed = true;
}
@Override
public void cleanup()
throws IOException {
}
@Override
public long recordsWritten() {
return this.recordsSeen;
}
@Override
public long bytesWritten()
throws IOException {
return 0;
}
@Override
public void close()
throws IOException {
this.closeCount++;
}
@Override
public ControlMessageHandler getMessageHandler() {
return new FlushControlMessageHandler(this) {
@Override
public void handleMessage(ControlMessage message) {
handlerCalled++;
if (message instanceof FlushControlMessage) {
flush();
}
}
};
}
@Override
public void flush() {
this.flushCount++;
}
}
}
| 2,859 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/FsDataWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.IOException;
import org.apache.gobblin.configuration.State;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Test {@link FsDataWriter}
*/
public class FsDataWriterTest {
@Test
public void testExceptionOnGetRecordsWritten() throws IOException {
FsDataWriter writer = Mockito.mock(FsDataWriter.class);
final long BYTES_WRITTEN = 1000;
Mockito.when(writer.getFinalState()).thenCallRealMethod();
Mockito.when(writer.recordsWritten()).thenThrow(new RuntimeException("Test exception"));
Mockito.when(writer.bytesWritten()).thenReturn(BYTES_WRITTEN);
State finalState = writer.getFinalState();
Assert.assertNull(finalState.getProp("RecordsWritten"));
Assert.assertEquals(finalState.getPropAsLong("BytesWritten"), BYTES_WRITTEN);
}
@Test
public void testExceptionOnGetBytesWritten() throws IOException {
FsDataWriter writer = Mockito.mock(FsDataWriter.class);
final long RECORDS_WRITTEN = 1000;
Mockito.when(writer.getFinalState()).thenCallRealMethod();
Mockito.when(writer.bytesWritten()).thenThrow(new RuntimeException("Test exception"));
Mockito.when(writer.recordsWritten()).thenReturn(RECORDS_WRITTEN);
State finalState = writer.getFinalState();
Assert.assertNull(finalState.getProp("BytesWritten"));
Assert.assertEquals(finalState.getPropAsLong("RecordsWritten"), RECORDS_WRITTEN);
}
}
| 2,860 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/MetadataWriterWrapperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.IOException;
import java.util.Arrays;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.metadata.types.GlobalMetadata;
import org.apache.gobblin.metadata.types.Metadata;
import org.apache.gobblin.type.RecordWithMetadata;
public class MetadataWriterWrapperTest {
private MetadataWriterWrapper<byte[]> writer;
private DummyWriter dummyWriter;
private WorkUnitState state;
@BeforeMethod
public void setUp() {
state = new WorkUnitState();
dummyWriter = new DummyWriter();
writer = new MetadataWriterWrapper<>(dummyWriter, byte[].class, 1, 0, state.getJobState());
}
@Test
public void passesRecordThrough()
throws IOException {
byte[] record = new byte[]{'a', 'b', 'c', 'd'};
dummyWriter.setExpectedRecord(record);
writer.write(record);
writer.commit();
Assert.assertEquals(dummyWriter.recordsWritten(), 1);
Assert.assertNull(state.getProp(ConfigurationKeys.WRITER_METADATA_KEY));
}
@Test
public void recordsMetadata()
throws IOException {
final String URN = "unit-test:dataset";
byte[] record = new byte[]{'a', 'b', 'c', 'd', 'e'};
dummyWriter.setExpectedRecord(record);
Metadata md = new Metadata();
md.getGlobalMetadata().setDatasetUrn(URN);
RecordWithMetadata<byte[]> mdRecord = new RecordWithMetadata<>(record, md);
writer.write(mdRecord);
writer.commit();
Assert.assertEquals(dummyWriter.recordsWritten(), 1);
String writerMetadata = state.getProp(ConfigurationKeys.WRITER_METADATA_KEY);
Assert.assertNotNull(writerMetadata, "Expected there to be metadata");
Assert.assertEquals(1, countOccurrences(writerMetadata, URN));
// Write a 2nd record with the same metadata; it should _not_ be included twice in output
byte[] record2 = new byte[]{'e', 'f', 'g', 'h'};
dummyWriter.setExpectedRecord(record2);
Metadata md2 = new Metadata();
md2.getGlobalMetadata().setDatasetUrn(URN);
writer.write(new RecordWithMetadata<>(record2, md2));
writer.commit();
Assert.assertEquals(dummyWriter.recordsWritten(), 2);
writerMetadata = state.getProp(ConfigurationKeys.WRITER_METADATA_KEY);
Assert.assertNotNull(writerMetadata, "Expected there to be metadata");
Assert.assertEquals(1, countOccurrences(writerMetadata, URN));
// and now a 3rd
// Write a 2nd record with the same metadata; it should _not_ be included twice in output
byte[] record3 = new byte[]{'i', 'j', 'k', 'l'};
dummyWriter.setExpectedRecord(record3);
Metadata md3 = new Metadata();
md3.getGlobalMetadata().setDatasetUrn(URN + "_other");
writer.write(new RecordWithMetadata<>(record3, md3));
writer.commit();
Assert.assertEquals(dummyWriter.recordsWritten(), 3);
writerMetadata = state.getProp(ConfigurationKeys.WRITER_METADATA_KEY);
Assert.assertNotNull(writerMetadata, "Expected there to be metadata");
Assert.assertEquals(2, countOccurrences(writerMetadata, URN));
}
@Test
public void testAppendsDefaultMetadata()
throws IOException {
state = new WorkUnitState();
dummyWriter = new MetadataDummyWriter();
writer = new MetadataWriterWrapper<>(dummyWriter, byte[].class, 1, 0, state.getJobState());
byte[] recordBytes = new byte[]{'a', 'b', 'c', 'd'};
Metadata md = new Metadata();
md.getGlobalMetadata().addTransferEncoding("first");
writer.write(new RecordWithMetadata<>(recordBytes, md));
writer.commit();
String writerMetadata = state.getProp(ConfigurationKeys.WRITER_METADATA_KEY);
Assert.assertNotNull(writerMetadata, "Expected there to be metadata");
int firstOccurrence = writerMetadata.indexOf("\"first\"");
Assert.assertNotEquals(firstOccurrence, -1, "Expected to find record-level encoding in metadata");
int secondOccurrence = writerMetadata.indexOf("\"default-encoding\"");
Assert.assertNotEquals(secondOccurrence, -1, "Expected to find default metadata in metadata");
Assert.assertTrue(firstOccurrence < secondOccurrence,
"Expected recordBytes encoding to be present before default encoding");
}
@Test
public void testAppendsMetadataWithNormalRecord() throws IOException {
state = new WorkUnitState();
dummyWriter = new MetadataDummyWriter();
writer = new MetadataWriterWrapper<>(dummyWriter, byte[].class, 1, 0, state.getJobState());
byte[] recordBytes = new byte[]{'a', 'b', 'c', 'd'};
writer.write(recordBytes);
writer.commit();
String writerMetadata = state.getProp(ConfigurationKeys.WRITER_METADATA_KEY);
Assert.assertNotNull(writerMetadata, "Expected there to be metadata");
Assert.assertNotEquals(writerMetadata.indexOf("\"default-encoding\""),
-1, "Expected to find default metadata in metadata");
}
private static int countOccurrences(String s, String stringToFind) {
int start = s.indexOf(stringToFind, 0);
int count = 0;
while (start != -1) {
count++;
start = s.indexOf(stringToFind, start + stringToFind.length());
}
return count;
}
private static class DummyWriter implements DataWriter<byte[]> {
private int rawRecordSeen;
private byte[] expectedRecord;
DummyWriter() {
rawRecordSeen = 0;
}
public void setExpectedRecord(byte[] record) {
this.expectedRecord = record;
}
@Override
public void write(byte[] record)
throws IOException {
rawRecordSeen++;
if (expectedRecord != null && !Arrays.equals(expectedRecord, record)) {
throw new IOException("Expected record doesn't match");
}
}
@Override
public void commit()
throws IOException {
}
@Override
public void cleanup()
throws IOException {
}
@Override
public long recordsWritten() {
return rawRecordSeen;
}
@Override
public long bytesWritten()
throws IOException {
return 0;
}
@Override
public void close()
throws IOException {
}
}
private static class MetadataDummyWriter extends DummyWriter implements MetadataAwareWriter {
private static GlobalMetadata globalMd;
static {
globalMd = new GlobalMetadata();
globalMd.addTransferEncoding("default-encoding");
}
@Override
public GlobalMetadata getDefaultMetadata() {
return globalMd;
}
}
}
| 2,861 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/RetryWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.IOException;
import org.junit.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.exception.NonTransientException;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.util.FinalState;
import static org.mockito.Mockito.*;
@Test(groups = { "gobblin.writer" })
public class RetryWriterTest {
public void retryTest() throws IOException {
DataWriter<Void> writer = mock(DataWriter.class);
doThrow(new RuntimeException()).when(writer).writeEnvelope(any(RecordEnvelope.class));
State state = new State();
state.setProp(RetryWriter.RETRY_MAX_ATTEMPTS, "5");
DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, state);
DataWriter<Void> retryWriter = builder.build();
try {
retryWriter.writeEnvelope(new RecordEnvelope<>(null));
Assert.fail("Should have failed.");
} catch (Exception e) { }
verify(writer, times(5)).writeEnvelope(any(RecordEnvelope.class));
}
public void retryTestNonTransientException() throws IOException {
DataWriter<Void> writer = mock(DataWriter.class);
doThrow(new NonTransientException()).when(writer).writeEnvelope(any(RecordEnvelope.class));
DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, new State());
DataWriter<Void> retryWriter = builder.build();
try {
retryWriter.writeEnvelope(new RecordEnvelope<>(null));
Assert.fail("Should have failed.");
} catch (Exception e) { }
verify(writer, atMost(1)).writeEnvelope(any(RecordEnvelope.class));
}
public void retryTestSuccess() throws IOException {
DataWriter<Void> writer = mock(DataWriter.class);
DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, new State());
DataWriter<Void> retryWriter = builder.build();
retryWriter.writeEnvelope(new RecordEnvelope<>(null));
verify(writer, times(1)).writeEnvelope(any(RecordEnvelope.class));
}
public void retryGetFinalState() throws IOException {
PartitionedDataWriter writer = mock(PartitionedDataWriter.class);
when(writer.getFinalState()).thenReturn(new State());
DataWriterWrapperBuilder<Void> builder = new DataWriterWrapperBuilder<>(writer, new State());
DataWriter<Void> retryWriter = builder.build();
State state = ((FinalState) retryWriter).getFinalState();
verify(writer, times(1)).getFinalState();
Assert.assertTrue(state.contains(RetryWriter.FAILED_WRITES_KEY));
}
}
| 2,862 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/SimpleDataWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.Collections;
import java.util.zip.GZIPInputStream;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.crypto.EncryptionConfigParser;
import org.apache.gobblin.crypto.EncryptionFactory;
/**
* Unit tests for {@link SimpleDataWriter}.
*
* @author akshay@nerdwallet.com
*/
@Test(groups = { "gobblin.writer" })
public class SimpleDataWriterTest {
private String filePath;
private final String schema = "";
private final int newLine = "\n".getBytes()[0];
private State properties;
private static final String ENCRYPT_PREFIX = "writer.encrypt.";
@BeforeMethod
public void setUp() throws Exception {
properties = new State();
// Making the staging and/or output dirs if necessary
File stagingDir = new File(TestConstants.TEST_STAGING_DIR);
File outputDir = new File(TestConstants.TEST_OUTPUT_DIR);
if (!stagingDir.exists()) {
stagingDir.mkdirs();
}
if (!outputDir.exists()) {
outputDir.mkdirs();
}
this.filePath = TestConstants.TEST_EXTRACT_NAMESPACE.replaceAll("\\.", "/") + "/" + TestConstants.TEST_EXTRACT_TABLE
+ "/" + TestConstants.TEST_EXTRACT_ID + "_" + TestConstants.TEST_EXTRACT_PULL_TYPE;
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "\n");
properties.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, TestConstants.TEST_FS_URI);
properties.setProp(ConfigurationKeys.WRITER_STAGING_DIR, TestConstants.TEST_STAGING_DIR);
properties.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, TestConstants.TEST_OUTPUT_DIR);
properties.setProp(ConfigurationKeys.WRITER_FILE_PATH, this.filePath);
properties.setProp(ConfigurationKeys.WRITER_FILE_NAME, TestConstants.TEST_FILE_NAME);
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_PREPEND_SIZE, false);
}
/**
* Test writing records without a delimiter and make sure it works.
* @throws IOException
*/
@Test
public void testWriteBytesNoDelim() throws IOException {
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "");
// Build a writer to write test records
SimpleDataWriter writer = buildSimpleDataWriter();
byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
writer.write(rec1);
writer.write(rec2);
writer.write(rec3);
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), rec1.length + rec2.length + rec3.length);
File outputFile = new File(writer.getOutputFilePath());
InputStream is = new FileInputStream(outputFile);
int c, resNum = 0, resi = 0;
byte[][] records = { rec1, rec2, rec3 };
while ((c = is.read()) != -1) {
if (resi >= records[resNum].length) {
resNum++;
resi = 0;
}
Assert.assertEquals(c, records[resNum][resi]);
resi++;
}
}
/**
* Prepend the size to each record without delimiting the record. Each record
* should be prepended by the size of that record and the bytes written should
* include the prepended bytes.
*/
@Test
public void testPrependSizeWithoutDelimiter() throws IOException {
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_PREPEND_SIZE, true);
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "");
SimpleDataWriter writer = buildSimpleDataWriter();
byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
byte[][] records = { rec1, rec2, rec3 };
writer.write(rec1);
writer.write(rec2);
writer.write(rec3);
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), rec1.length + rec2.length + rec3.length + (Long.SIZE / 8 * 3));
File outputFile = new File(writer.getOutputFilePath());
DataInputStream dis = new DataInputStream(new FileInputStream(outputFile));
for (int i = 0; i < 3; i++) {
long size = dis.readLong();
Assert.assertEquals(size, records[i].length);
for (int j = 0; j < size; j++) {
Assert.assertEquals(dis.readByte(), records[i][j]);
}
}
}
/**
* Use the simple data writer to write random bytes to a file and ensure
* they are the same when read back.
*
* @throws IOException
*/
@Test
public void testWriteRandomBytes() throws IOException {
// Build a writer to write test records
SimpleDataWriter writer = buildSimpleDataWriter();
byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
writer.write(rec1);
writer.write(rec2);
writer.write(rec3);
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), rec1.length + rec2.length + rec3.length + 3); // 3 bytes for newline character
File outputFile = new File(writer.getOutputFilePath());
InputStream is = new FileInputStream(outputFile);
int c, resNum = 0, resi = 0;
byte[][] records = { rec1, rec2, rec3 };
while ((c = is.read()) != -1) {
if (c != newLine) {
Assert.assertEquals(c, records[resNum][resi]);
resi++;
} else {
resNum++;
resi = 0;
}
}
}
/**
* Prepend the size to each record and delimit the record. Each record
* should be prepended by the size of that record and the bytes written should
* include the prepended bytes.
*/
@Test
public void testPrependSizeWithDelimiter() throws IOException {
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_PREPEND_SIZE, true);
SimpleDataWriter writer = buildSimpleDataWriter();
byte[] rec1 = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
byte[] rec2 = { 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 };
byte[] rec3 = { 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45 };
byte[][] records = { rec1, rec2, rec3 };
writer.write(rec1);
writer.write(rec2);
writer.write(rec3);
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), rec1.length + rec2.length + rec3.length + (Long.SIZE / 8 * 3) + 3);
File outputFile = new File(writer.getOutputFilePath());
DataInputStream dis = new DataInputStream(new FileInputStream(outputFile));
for (int i = 0; i < 3; i++) {
long size = dis.readLong();
Assert.assertEquals(size, records[i].length + 1);
for (int j = 0; j < size - 1; j++) {
Assert.assertEquals(dis.readByte(), records[i][j]);
}
Assert.assertEquals(dis.readByte(), '\n');
}
}
@Test
public void testSupportsGzip() throws IOException {
properties.setProp(ConfigurationKeys.WRITER_CODEC_TYPE, "gzip");
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "");
byte[] toWrite = new byte[] { 'a', 'b', 'c', 'd'};
SimpleDataWriter writer = buildSimpleDataWriter();
writer.write(toWrite);
writer.close();
writer.commit();
File outputFile = new File(writer.getOutputFilePath());
InputStream in = new GZIPInputStream(new FileInputStream(outputFile));
byte[] contents = IOUtils.toByteArray(in);
Assert.assertEquals(contents, toWrite, "Expected gzip'd content to be written out");
Assert.assertTrue(outputFile.getName().endsWith(".gzip"), "Expected gzip'd file to end in .gzip");
}
@Test
public void testSupportsGzipAndEncryption() throws IOException {
final String ENCRYPTION_TYPE = "insecure_shift";
final String COMPRESSION_TYPE = "gzip";
properties.setProp(ConfigurationKeys.WRITER_CODEC_TYPE, COMPRESSION_TYPE);
properties.setProp(ENCRYPT_PREFIX + EncryptionConfigParser.ENCRYPTION_ALGORITHM_KEY,
ENCRYPTION_TYPE);
properties.setProp(ConfigurationKeys.SIMPLE_WRITER_DELIMITER, "");
byte[] toWrite = new byte[] { 'a', 'b', 'c', 'd'};
SimpleDataWriter writer = buildSimpleDataWriter();
writer.write(toWrite);
writer.close();
writer.commit();
File outputFile = new File(writer.getOutputFilePath());
Assert.assertTrue(outputFile.getName().endsWith("." + COMPRESSION_TYPE + "." + ENCRYPTION_TYPE),
"Expected compression & encryption in file name!");
InputStream decryptedFile =
EncryptionFactory.buildStreamCryptoProvider(ENCRYPTION_TYPE, Collections.<String, Object>emptyMap())
.decodeInputStream(new FileInputStream(outputFile));
InputStream uncompressedFile = new GZIPInputStream(decryptedFile);
byte[] contents = IOUtils.toByteArray(uncompressedFile);
Assert.assertEquals(contents, toWrite, "expected to decode same contents");
}
/**
* Use the simple writer to write json entries to a file and ensure that
* they are the same when read back.
*
* @throws IOException
*/
@Test
public void testWrite() throws IOException {
SimpleDataWriter writer = buildSimpleDataWriter();
int totalBytes = 3; // 3 extra bytes for the newline character
// Write all test records
for (String record : TestConstants.JSON_RECORDS) {
byte[] toWrite = record.getBytes();
Assert.assertEquals(toWrite.length, record.length()); // ensure null byte does not get added to end
writer.write(toWrite);
totalBytes += toWrite.length;
}
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), totalBytes);
File outputFile = new File(writer.getOutputFilePath());
BufferedReader br = new BufferedReader(new FileReader(outputFile));
String line;
int lineNumber = 0;
while ((line = br.readLine()) != null) {
Assert.assertEquals(line, TestConstants.JSON_RECORDS[lineNumber]);
lineNumber++;
}
br.close();
Assert.assertEquals(lineNumber, 3);
}
private SimpleDataWriter buildSimpleDataWriter()
throws IOException {
SimpleDataWriterBuilder b = (SimpleDataWriterBuilder)new SimpleDataWriterBuilder()
.writeTo(Destination.of(Destination.DestinationType.HDFS, properties)).writeInFormat(WriterOutputFormat.AVRO)
.withWriterId(TestConstants.TEST_WRITER_ID).withSchema(this.schema).forBranch(0);
return new SimpleDataWriter(b, properties);
}
/**
* If the staging file exists, the simple data writer should overwrite its contents.
*
* @throws IOException
*/
@Test
public void testOverwriteExistingStagingFile() throws IOException {
byte[] randomBytesStage = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 0 };
byte[] randomBytesWrite = { 11, 12, 13, 14, 15 };
Path stagingFile = new Path(TestConstants.TEST_STAGING_DIR + Path.SEPARATOR + this.filePath,
TestConstants.TEST_FILE_NAME + "." + TestConstants.TEST_WRITER_ID + "." + "tmp");
Configuration conf = new Configuration();
// Add all job configuration properties so they are picked up by Hadoop
for (String key : properties.getPropertyNames()) {
conf.set(key, properties.getProp(key));
}
FileSystem fs = FileSystem.get(URI.create(TestConstants.TEST_FS_URI), conf);
OutputStream os = fs.create(stagingFile);
os.write(randomBytesStage);
os.flush();
os.close();
SimpleDataWriter writer = buildSimpleDataWriter();
writer.write(randomBytesWrite);
writer.close();
writer.commit();
Assert.assertEquals(writer.recordsWritten(), 1);
Assert.assertEquals(writer.bytesWritten(), randomBytesWrite.length + 1);
File writeFile = new File(writer.getOutputFilePath());
int c, i = 0;
InputStream is = new FileInputStream(writeFile);
while ((c = is.read()) != -1) {
if (i == 5) {
Assert.assertEquals(c, (byte) newLine); // the last byte should be newline
i++;
continue;
}
Assert.assertEquals(randomBytesWrite[i], c);
i++;
}
}
@AfterMethod
public void tearDown() throws IOException {
// Clean up the staging and/or output directories if necessary
File testRootDir = new File(TestConstants.TEST_ROOT_DIR);
if (testRootDir.exists()) {
FileUtil.fullyDelete(testRootDir);
}
}
}
| 2,863 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/PartitionedWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer;
import com.google.common.util.concurrent.UncheckedExecutionException;
import java.io.IOException;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.util.Strings;
import org.apache.gobblin.ack.BasicAckableForTesting;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.dataset.DatasetDescriptor;
import org.apache.gobblin.dataset.PartitionDescriptor;
import org.apache.gobblin.stream.FlushControlMessage;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.writer.test.TestPartitionAwareWriterBuilder;
import org.apache.gobblin.writer.test.TestPartitioner;
/**
* Test {@link org.apache.gobblin.writer.PartitionedDataWriter}
*/
public class PartitionedWriterTest {
@Test
public void test() throws IOException {
State state = new State();
state.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TestPartitioner.class.getCanonicalName());
TestPartitionAwareWriterBuilder builder = new TestPartitionAwareWriterBuilder();
PartitionedDataWriter writer = new PartitionedDataWriter<String, String>(builder, state);
Assert.assertEquals(builder.actions.size(), 0);
String record1 = "abc";
writer.writeEnvelope(new RecordEnvelope(record1));
Assert.assertEquals(builder.actions.size(), 2);
TestPartitionAwareWriterBuilder.Action action = builder.actions.poll();
Assert.assertEquals(action.getPartition(), "a");
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.BUILD);
action = builder.actions.poll();
Assert.assertEquals(action.getPartition(), "a");
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.WRITE);
Assert.assertEquals(action.getTarget(), record1);
Assert.assertTrue(writer.isSpeculativeAttemptSafe());
String record2 = "123";
writer.writeEnvelope(new RecordEnvelope(record2));
Assert.assertEquals(builder.actions.size(), 2);
action = builder.actions.poll();
Assert.assertEquals(action.getPartition(), "1");
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.BUILD);
Assert.assertFalse(writer.isSpeculativeAttemptSafe());
action = builder.actions.poll();
Assert.assertEquals(action.getPartition(), "1");
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.WRITE);
Assert.assertEquals(action.getTarget(), record2);
writer.writeEnvelope(new RecordEnvelope(record1));
Assert.assertEquals(builder.actions.size(), 1);
action = builder.actions.poll();
Assert.assertEquals(action.getPartition(), "a");
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.WRITE);
Assert.assertEquals(action.getTarget(), record1);
Assert.assertEquals(writer.recordsWritten(), 3);
Assert.assertEquals(writer.bytesWritten(), 3);
Assert.assertFalse(writer.isSpeculativeAttemptSafe());
writer.cleanup();
Assert.assertEquals(builder.actions.size(), 2);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.CLEANUP);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.CLEANUP);
// Before close, partitions info is not serialized
String partitionsKey = "writer.0.partitions";
Assert.assertTrue(state.getProp(partitionsKey) == null);
writer.close();
Assert.assertEquals(builder.actions.size(), 2);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.CLOSE);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.CLOSE);
// After close, partitions info is available
Assert.assertFalse(Strings.isNullOrEmpty(state.getProp(partitionsKey)));
List<PartitionDescriptor> partitions = PartitionedDataWriter.getPartitionInfoAndClean(state, 0);
Assert.assertTrue(state.getProp(partitionsKey) == null);
Assert.assertEquals(partitions.size(), 2);
DatasetDescriptor dataset = new DatasetDescriptor("testPlatform", "testDataset");
Assert.assertEquals(partitions.get(0), new PartitionDescriptor("a", dataset));
Assert.assertEquals(partitions.get(1), new PartitionDescriptor("1", dataset));
writer.commit();
Assert.assertEquals(builder.actions.size(), 2);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.COMMIT);
action = builder.actions.poll();
Assert.assertEquals(action.getType(), TestPartitionAwareWriterBuilder.Actions.COMMIT);
}
@Test
public void testControlMessageHandler() throws IOException {
State state = new State();
state.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TestPartitioner.class.getCanonicalName());
TestPartitionAwareWriterBuilder builder = new TestPartitionAwareWriterBuilder();
PartitionedDataWriter writer = new PartitionedDataWriter<String, String>(builder, state);
Assert.assertEquals(builder.actions.size(), 0);
String record1 = "abc";
writer.writeEnvelope(new RecordEnvelope(record1));
String record2 = "123";
writer.writeEnvelope(new RecordEnvelope(record2));
FlushControlMessage controlMessage = FlushControlMessage.builder().build();
BasicAckableForTesting ackable = new BasicAckableForTesting();
controlMessage.addCallBack(ackable);
Assert.assertEquals(ackable.acked, 0);
// when the control message is cloned properly then this does not raise an error
writer.getMessageHandler().handleMessage(controlMessage);
// message handler does not ack since consumeRecordStream does acking for control messages
// this should be revisited when control message error handling is changed
controlMessage.ack();
Assert.assertEquals(ackable.acked, 1);
writer.close();
}
@Test
public void testTimeoutWhenCreatingWriter() throws IOException {
State state = new State();
state.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TestPartitioner.class.getCanonicalName());
state.setProp(PartitionedDataWriter.PARTITIONED_WRITER_CACHE_TTL_SECONDS, 6);
TestPartitionAwareWriterBuilder builder = new TestPartitionAwareWriterBuilder(true);
PartitionedDataWriter writer = new PartitionedDataWriter<String, String>(builder, state);
String record1 = "abc";
Assert.expectThrows(UncheckedExecutionException.class, () -> writer.writeEnvelope(new RecordEnvelope(record1)));
}
@Test
public void testPartitionWriterCacheRemovalListener()
throws IOException, InterruptedException {
State state = new State();
state.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TestPartitioner.class.getCanonicalName());
state.setProp(PartitionedDataWriter.PARTITIONED_WRITER_CACHE_TTL_SECONDS, 3);
TestPartitionAwareWriterBuilder builder = new TestPartitionAwareWriterBuilder();
PartitionedDataWriter writer = new PartitionedDataWriter<String, String>(builder, state);
String record1 = "abc";
writer.writeEnvelope(new RecordEnvelope(record1));
String record2 = "123";
writer.writeEnvelope(new RecordEnvelope(record2));
//Sleep for more than cache expiration interval
Thread.sleep(3500);
//Call cache clean up to ensure removal of expired entries.
writer.getPartitionWriters().cleanUp();
//Ensure the removal listener updates counters.
Assert.assertEquals(writer.getTotalRecordsFromEvictedWriters(), 2L);
Assert.assertEquals(writer.getTotalBytesFromEvictedWriters(), 2L);
}
}
| 2,864 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/test/GobblinTestEventBusWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.test;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.Destination.DestinationType;
/**
* Unit tests for {@link GobblinTestEventBusWriter}
*/
public class GobblinTestEventBusWriterTest {
@Test
public void testWrite() throws IOException, InterruptedException, TimeoutException {
final String eventBusId = "/tmp/GobblinTestEventBusWriterTest/testWrite";
try(TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId)) {
GobblinTestEventBusWriter writer =
GobblinTestEventBusWriter.builder().withEventBusId(eventBusId).build();
writer.write("event1");
writer.write("event2");
writer.write("event3");
asserter.assertNextValueEq("event1");
asserter.assertNextValueEq("event2");
asserter.assertNextValueEq("event3");
Assert.assertEquals(writer.recordsWritten(), 3);
}
}
@Test
public void testBuilder() throws IOException, InterruptedException, TimeoutException {
final String eventBusId = "/GobblinTestEventBusWriterTest/testBuilder";
GobblinTestEventBusWriter.Builder writerBuilder = new GobblinTestEventBusWriter.Builder();
WorkUnit wu = WorkUnit.createEmpty();
wu.setProp(GobblinTestEventBusWriter.FULL_EVENTBUSID_KEY, eventBusId);
writerBuilder.writeTo(Destination.of(DestinationType.HDFS, wu));
Assert.assertEquals(writerBuilder.getEventBusId(), eventBusId);
try(TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId)) {
GobblinTestEventBusWriter writer = writerBuilder.build();
writer.write("event1");
writer.write("event2");
asserter.assertNextValueEq("event1");
asserter.assertNextValueEq("event2");
Assert.assertEquals(writer.recordsWritten(), 2);
}
}
}
| 2,865 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/test/TestingEventBusAsserterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.test;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.testng.Assert;
import org.testng.Assert.ThrowingRunnable;
import org.testng.annotations.Test;
import com.google.common.eventbus.EventBus;
/**
* Unit tests for {@link TestingEventBusAsserter}
*/
public class TestingEventBusAsserterTest {
@Test
public void testAssertNext() throws InterruptedException, TimeoutException, IOException {
EventBus testBus = TestingEventBuses.getEventBus("TestingEventBusAsserterTest.testHappyPath");
try(final TestingEventBusAsserter asserter =
new TestingEventBusAsserter("TestingEventBusAsserterTest.testHappyPath")) {
testBus.post(new TestingEventBuses.Event("event1"));
testBus.post(new TestingEventBuses.Event("event2"));
asserter.assertNextValueEq("event1");
Assert.assertThrows(new ThrowingRunnable() {
@Override public void run() throws Throwable {
asserter.assertNextValueEq("event3");
}
});
testBus.post(new TestingEventBuses.Event("event13"));
testBus.post(new TestingEventBuses.Event("event11"));
testBus.post(new TestingEventBuses.Event("event12"));
testBus.post(new TestingEventBuses.Event("event10"));
asserter.assertNextValuesEq(Arrays.asList("event10", "event11", "event12", "event13"));
testBus.post(new TestingEventBuses.Event("event22"));
testBus.post(new TestingEventBuses.Event("event20"));
Assert.assertThrows(new ThrowingRunnable() {
@Override public void run() throws Throwable {
asserter.assertNextValuesEq(Arrays.asList("event22", "event21"));
}
});
}
}
@Test
public void testTimeout() throws InterruptedException, TimeoutException, IOException {
try(final TestingEventBusAsserter asserter =
new TestingEventBusAsserter("TestingEventBusAsserterTest.testTimeout")) {
final CountDownLatch timeoutSeen = new CountDownLatch(1);
Thread assertThread = new Thread(new Runnable() {
@Override public void run() {
try {
asserter.withTimeout(300, TimeUnit.MILLISECONDS).assertNextValueEq("event1");
} catch (TimeoutException e) {
timeoutSeen.countDown();
}
catch (InterruptedException e ) {
// Unexpected
}
}
}, "TestingEventBusAsserterTest.testTimeout.assertThread");
assertThread.start();
Thread.sleep(100);
Assert.assertTrue(assertThread.isAlive());
Assert.assertTrue(timeoutSeen.await(300, TimeUnit.MILLISECONDS));
}
}
@Test
public void testAssertNextValuesEqTimeout() throws InterruptedException, TimeoutException, IOException {
EventBus testBus = TestingEventBuses.getEventBus("TestingEventBusAsserterTest.testAssertNextValuesEqTimeout");
try(final TestingEventBusAsserter asserter =
new TestingEventBusAsserter("TestingEventBusAsserterTest.testAssertNextValuesEqTimeout")) {
testBus.post(new TestingEventBuses.Event("event1"));
final CountDownLatch timeoutSeen = new CountDownLatch(1);
Thread assertThread = new Thread(new Runnable() {
@Override public void run() {
try {
asserter.withTimeout(300, TimeUnit.MILLISECONDS)
.assertNextValuesEq(Arrays.asList("event1", "event2"));
} catch (TimeoutException e) {
timeoutSeen.countDown();
}
catch (InterruptedException e ) {
// Unexpected
}
}
}, "TestingEventBusAsserterTest.testTimeout.assertThread");
assertThread.start();
Thread.sleep(100);
Assert.assertTrue(assertThread.isAlive());
Assert.assertTrue(timeoutSeen.await(300, TimeUnit.MILLISECONDS));
}
}
}
| 2,866 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/test/TestPartitionAwareWriterBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.test;
import java.io.IOException;
import java.util.Queue;
import org.apache.avro.Schema;
import com.google.common.collect.Queues;
import org.apache.gobblin.commit.SpeculativeAttemptAwareConstruct;
import org.apache.gobblin.dataset.DatasetDescriptor;
import org.apache.gobblin.dataset.Descriptor;
import org.apache.gobblin.dataset.PartitionDescriptor;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.PartitionAwareDataWriterBuilder;
import lombok.Data;
public class TestPartitionAwareWriterBuilder extends PartitionAwareDataWriterBuilder<String, String> {
public final Queue<Action> actions = Queues.newArrayDeque();
private boolean testTimeout;
public enum Actions {
BUILD, WRITE, COMMIT, CLEANUP, CLOSE
}
public TestPartitionAwareWriterBuilder() {
this(false);
}
public TestPartitionAwareWriterBuilder(boolean testTimeout) {
this.testTimeout = testTimeout;
}
@Override
public boolean validatePartitionSchema(Schema partitionSchema) {
return true;
}
@Override
public DataWriter build()
throws IOException {
if (testTimeout) {
try {
Thread.sleep(10*1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
String partition = this.partition.get().get(TestPartitioner.PARTITION).toString();
this.actions.add(new Action(Actions.BUILD, partition, null));
if (partition.matches(".*\\d+.*")) {
return new SpeculativeNotSafeTestWriter(partition);
}
return new TestDataWriter(partition);
}
private class SpeculativeNotSafeTestWriter extends TestDataWriter {
public SpeculativeNotSafeTestWriter(String partition) {
super(partition);
}
@Override
public boolean isSpeculativeAttemptSafe() {
return false;
}
}
private class TestDataWriter implements DataWriter<String>, SpeculativeAttemptAwareConstruct {
private String partition;
private long recordsWritten = 0;
private long bytesWritten = 0;
public TestDataWriter(String partition) {
this.partition = partition;
}
@Override
public void write(String record)
throws IOException {
actions.add(new Action(Actions.WRITE, this.partition, record));
this.recordsWritten++;
this.bytesWritten++;
}
@Override
public void commit()
throws IOException {
actions.add(new Action(Actions.COMMIT, this.partition, null));
}
@Override
public void cleanup()
throws IOException {
actions.add(new Action(Actions.CLEANUP, this.partition, null));
}
@Override
public long recordsWritten() {
return this.recordsWritten;
}
@Override
public long bytesWritten()
throws IOException {
return this.bytesWritten;
}
@Override
public void close()
throws IOException {
actions.add(new Action(Actions.CLOSE, this.partition, null));
}
@Override
public boolean isSpeculativeAttemptSafe() {
return true;
}
@Override
public Descriptor getDataDescriptor() {
DatasetDescriptor dataset = new DatasetDescriptor("testPlatform", "testDataset");
return new PartitionDescriptor(this.partition, dataset);
}
}
@Data
public static class Action {
private final Actions type;
private final String partition;
private final String target;
}
}
| 2,867 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/test/TestPartitioner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.test;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.writer.partitioner.WriterPartitioner;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
public class TestPartitioner implements WriterPartitioner<String> {
public static final String PARTITION = "partition";
private static final Schema SCHEMA = SchemaBuilder.record("LetterPartition").namespace("gobblin.test").
fields().name(PARTITION).
type(Schema.create(Schema.Type.STRING)).noDefault().endRecord();
public TestPartitioner(State state, int numBranches, int branchId) {
}
@Override public Schema partitionSchema() {
return SCHEMA;
}
@Override
public GenericRecord partitionForRecord(String record) {
GenericRecord partition = new GenericData.Record(SCHEMA);
partition.put(PARTITION, record.toLowerCase().charAt(0));
return partition;
}
}
| 2,868 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/partitioner/SchemaBasedWriterPartitionerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.partitioner;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.testng.Assert;
import org.testng.annotations.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@Test(groups = {"gobblin.writer.partitioner"})
public class SchemaBasedWriterPartitionerTest {
@Test
public void partitionRecordTest() {
Schema mockSchema = mock(Schema.class);
GenericRecord mockRecord = mock(GenericRecord.class);
String mockSchemaString = "returnSchemaString";
when(mockRecord.getSchema()).thenReturn(mockSchema);
when(mockSchema.toString()).thenReturn(mockSchemaString);
SchemaBasedWriterPartitioner partitioner = new SchemaBasedWriterPartitioner(null, 0, 0);
GenericRecord partitionRecord = partitioner.partitionForRecord(mockRecord);
Assert.assertEquals(partitionRecord.get(SchemaBasedWriterPartitioner.SCHEMA_STRING), mockSchemaString);
}
}
| 2,869 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/partitioner/TimeBasedAvroWriterPartitionerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.partitioner;
import java.io.File;
import java.io.IOException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.avro.util.Utf8;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.writer.AvroDataWriterBuilder;
import org.apache.gobblin.writer.DataWriter;
import org.apache.gobblin.writer.DataWriterBuilder;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.PartitionedDataWriter;
import org.apache.gobblin.writer.WriterOutputFormat;
/**
* Tests for {@link TimeBasedAvroWriterPartitioner}.
*/
@Test(groups = { "gobblin.writer.partitioner" })
public class TimeBasedAvroWriterPartitionerTest {
private static final String SIMPLE_CLASS_NAME = TimeBasedAvroWriterPartitionerTest.class.getSimpleName();
private static final String TEST_ROOT_DIR = SIMPLE_CLASS_NAME + "-test";
private static final String STAGING_DIR = TEST_ROOT_DIR + Path.SEPARATOR + "staging";
private static final String OUTPUT_DIR = TEST_ROOT_DIR + Path.SEPARATOR + "output";
private static final String BASE_FILE_PATH = "base";
private static final String FILE_NAME = SIMPLE_CLASS_NAME + "-name.avro";
private static final String PARTITION_COLUMN_NAME = "timestamp";
private static final String WRITER_ID = "writer-1";
@BeforeClass
public void setUp() throws IOException {
File stagingDir = new File(STAGING_DIR);
File outputDir = new File(OUTPUT_DIR);
if (!stagingDir.exists()) {
stagingDir.mkdirs();
} else {
FileUtils.deleteDirectory(stagingDir);
}
if (!outputDir.exists()) {
outputDir.mkdirs();
} else {
FileUtils.deleteDirectory(outputDir);
}
}
/**
* Test
* 1. Record timestamp of type long
* 2. Partition path of a given record
*/
@Test
public void testWriter() throws IOException {
Schema schema = getRecordSchema("long");
State state = getBasicState();
// Write three records, each should be written to a different file
GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(schema);
DataWriter<GenericRecord> millisPartitionWriter = getWriter(schema, state);
// This timestamp corresponds to 2015/01/01
genericRecordBuilder.set("timestamp", 1420099200000l);
millisPartitionWriter.writeEnvelope(new RecordEnvelope<>(genericRecordBuilder.build()));
// This timestamp corresponds to 2015/01/02
genericRecordBuilder.set("timestamp", 1420185600000l);
millisPartitionWriter.writeEnvelope(new RecordEnvelope<>(genericRecordBuilder.build()));
millisPartitionWriter.close();
millisPartitionWriter.commit();
// Check that the writer reports that 2 records have been written
Assert.assertEquals(millisPartitionWriter.recordsWritten(), 2);
state.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_TIMEUNIT, "seconds");
DataWriter<GenericRecord> secsPartitionWriter = getWriter(schema, state);
// This timestamp corresponds to 2015/01/03
genericRecordBuilder.set("timestamp", 1420272000L);
secsPartitionWriter.writeEnvelope(new RecordEnvelope<>(genericRecordBuilder.build()));
secsPartitionWriter.close();
secsPartitionWriter.commit();
// Check that the writer reports that 1 record has been written
Assert.assertEquals(secsPartitionWriter.recordsWritten(), 1);
// Check that 3 files were created
Assert.assertEquals(FileUtils.listFiles(new File(TEST_ROOT_DIR), new String[] { "avro" }, true).size(), 3);
// Check if each file exists, and in the correct location
File baseOutputDir = new File(OUTPUT_DIR, BASE_FILE_PATH);
Assert.assertTrue(baseOutputDir.exists());
File outputDir20150101 =
new File(baseOutputDir, "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "01" + Path.SEPARATOR + FILE_NAME);
Assert.assertTrue(outputDir20150101.exists());
File outputDir20150102 =
new File(baseOutputDir, "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "02" + Path.SEPARATOR + FILE_NAME);
Assert.assertTrue(outputDir20150102.exists());
File outputDir20150103 =
new File(baseOutputDir, "2015" + Path.SEPARATOR + "01" + Path.SEPARATOR + "03" + Path.SEPARATOR + FILE_NAME);
Assert.assertTrue(outputDir20150103.exists());
}
@Test
public void testGetRecordTimestamp() {
// Test for string record timestamp in millis partition time unit
State state = getBasicState();
TimeBasedAvroWriterPartitioner partitioner = new TimeBasedAvroWriterPartitioner(state);
GenericRecordBuilder genericRecordBuilder = new GenericRecordBuilder(getRecordSchema("string"));
genericRecordBuilder.set("timestamp", "1557786583000");
// Test without parsing as string
Assert.assertTrue(partitioner.getRecordTimestamp(genericRecordBuilder.build()) > 1557786583000L);
// Test with parsing as string
state.setProp(TimeBasedAvroWriterPartitioner.WRITER_PARTITION_ENABLE_PARSE_AS_STRING, true);
partitioner = new TimeBasedAvroWriterPartitioner(state);
Assert.assertEquals(partitioner.getRecordTimestamp(genericRecordBuilder.build()), 1557786583000L);
// Test for Utf8
genericRecordBuilder.set("timestamp", new Utf8("1557786583000"));
Assert.assertEquals(partitioner.getRecordTimestamp(genericRecordBuilder.build()), 1557786583000L);
// Test for null value
genericRecordBuilder.set("timestamp", null);
Assert.assertTrue(
partitioner.getRecordTimestamp(genericRecordBuilder.build()) <= System.currentTimeMillis());
// Test for string type in seconds partition time unit
state.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_TIMEUNIT, "seconds");
partitioner = new TimeBasedAvroWriterPartitioner(state);
genericRecordBuilder.set("timestamp", "1557786583");
Assert.assertEquals(partitioner.getRecordTimestamp(genericRecordBuilder.build()), 1557786583L);
}
@AfterClass
public void tearDown() throws IOException {
FileUtils.deleteDirectory(new File(TEST_ROOT_DIR));
}
private DataWriter<GenericRecord> getWriter(Schema schema, State state)
throws IOException {
// Build a writer to write test records
DataWriterBuilder<Schema, GenericRecord> builder = new AvroDataWriterBuilder()
.writeTo(Destination.of(Destination.DestinationType.HDFS, state)).writeInFormat(WriterOutputFormat.AVRO)
.withWriterId(WRITER_ID).withSchema(schema).withBranches(1).forBranch(0);
return new PartitionedDataWriter<Schema, GenericRecord>(builder, state);
}
private State getBasicState() {
State properties = new State();
properties.setProp(TimeBasedAvroWriterPartitioner.WRITER_PARTITION_COLUMNS, PARTITION_COLUMN_NAME);
properties.setProp(ConfigurationKeys.WRITER_BUFFER_SIZE, ConfigurationKeys.DEFAULT_BUFFER_SIZE);
properties.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, ConfigurationKeys.LOCAL_FS_URI);
properties.setProp(ConfigurationKeys.WRITER_STAGING_DIR, STAGING_DIR);
properties.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, OUTPUT_DIR);
properties.setProp(ConfigurationKeys.WRITER_FILE_PATH, BASE_FILE_PATH);
properties.setProp(ConfigurationKeys.WRITER_FILE_NAME, FILE_NAME);
properties.setProp(TimeBasedWriterPartitioner.WRITER_PARTITION_PATTERN, "yyyy/MM/dd");
properties.setProp(ConfigurationKeys.WRITER_PARTITIONER_CLASS, TimeBasedAvroWriterPartitioner.class.getName());
return properties;
}
private Schema getRecordSchema(String timestampType) {
return new Schema.Parser().parse("{" + "\"type\" : \"record\"," + "\"name\" : \"User\"," + "\"namespace\" : \"example.avro\"," + "\"fields\" : [ {"
+ "\"name\" : \"" + PARTITION_COLUMN_NAME + "\"," + "\"type\" : [\"null\", \"" + timestampType + "\"]} ]" + "}");
}
}
| 2,870 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/objectstore/ObjectStoreWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.objectstore;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.commons.io.IOUtils;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.objectstore.ObjectStoreDeleteConverter;
import org.apache.gobblin.writer.objectstore.response.GetObjectResponse;
public class ObjectStoreWriterTest {
private static final String SCHEMA_STR =
"{ \"type\" : \"record\", \"name\" : \"test_schema\", \"namespace\" : \"com.gobblin.test\", "
+ "\"fields\" : [ { \"name\" : \"objectId\", \"type\" : \"string\"} ], \"doc:\" : \"\" }";
@Test
public void testDelete() throws Exception {
WorkUnitState wu = new WorkUnitState();
wu.setProp(ObjectStoreDeleteConverter.OBJECT_ID_FIELD, "objectId");
ObjectStoreClient client = new MockObjectStoreClient();
byte[] objId = client.put(IOUtils.toInputStream("test", "UTF-8"), ConfigFactory.empty());
Assert.assertEquals(IOUtils.toString(client.getObject(objId).getObjectData(), "UTF-8"), "test");
try (ObjectStoreWriter writer = new ObjectStoreWriter(client, new State());) {
ObjectStoreDeleteConverter converter = new ObjectStoreDeleteConverter();
converter.init(wu);
Schema schema = new Schema.Parser().parse(SCHEMA_STR);
GenericRecord datum = new GenericData.Record(schema);
datum.put("objectId", objId);
Iterables.getFirst(converter.convertRecord(converter.convertSchema(schema, wu), datum, wu), null);
writer.write(Iterables.getFirst(
converter.convertRecord(converter.convertSchema(schema, wu), datum, new WorkUnitState()), null));
}
try {
client.getObject(objId);
Assert.fail("should have thrown an IOException as object is already deleted");
} catch (IOException e) {
// All good exception thrown because object does not exist
}
}
private static class MockObjectStoreClient implements ObjectStoreClient {
private Map<byte[], String> store;
public MockObjectStoreClient() {
this.store = Maps.newHashMap();
}
@Override
public byte[] put(InputStream objectStream, Config putConfig) throws IOException {
byte[] objectId = UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8);
this.store.put(objectId, IOUtils.toString(objectStream, "UTF-8"));
return objectId;
}
@Override
public void delete(byte[] objectId, Config deletetConfig) throws IOException {
this.store.remove(objectId);
}
@Override
public Config getObjectProps(byte[] objectId) {
return ConfigFactory.empty();
}
@Override
public void close() throws IOException {
this.store.clear();
}
@Override
public GetObjectResponse getObject(byte[] objectId) throws IOException {
if (!this.store.containsKey(objectId)) {
throw new IOException("Object not found " + objectId);
}
return new GetObjectResponse(IOUtils.toInputStream(this.store.get(objectId), "UTF-8"), ConfigFactory.empty());
}
@Override
public void setObjectProps(byte[] objectId, Config conf) throws IOException {}
@Override
public byte[] put(InputStream objectStream, byte[] objectId, Config putConfig) throws IOException {
throw new UnsupportedOperationException();
}
}
}
| 2,871 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/writer/http/SalesforceRestWriterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.writer.http;
import static org.mockito.Mockito.*;
import static org.apache.gobblin.writer.http.SalesForceRestWriterBuilder.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.converter.http.RestEntry;
import org.apache.gobblin.writer.http.SalesforceRestWriter.Operation;
import org.apache.http.HttpEntity;
import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
@Test(groups = { "gobblin.writer" })
public class SalesforceRestWriterTest {
private SalesforceRestWriter writer;
private CloseableHttpClient client;
private void setup(Operation operation) throws ClientProtocolException, IOException, URISyntaxException {
setup(operation, new State());
}
private void setup(Operation operation, State state) throws ClientProtocolException, IOException, URISyntaxException {
state.appendToSetProp(CONF_PREFIX + STATIC_SVC_ENDPOINT, "test");
state.appendToSetProp(CONF_PREFIX + CLIENT_ID, "test");
state.appendToSetProp(CONF_PREFIX + CLIENT_SECRET, "test");
state.appendToSetProp(CONF_PREFIX + USER_ID, "test");
state.appendToSetProp(CONF_PREFIX + PASSWORD, "test");
state.appendToSetProp(CONF_PREFIX + USE_STRONG_ENCRYPTION, "test");
state.appendToSetProp(CONF_PREFIX + SECURITY_TOKEN, "test");
state.appendToSetProp(CONF_PREFIX + OPERATION, operation.name());
SalesForceRestWriterBuilder builder = new SalesForceRestWriterBuilder();
builder = spy(builder);
HttpClientBuilder httpClientBuilder = mock(HttpClientBuilder.class);
doReturn(httpClientBuilder).when(builder).getHttpClientBuilder();
client = mock(CloseableHttpClient.class);
when(httpClientBuilder.build()).thenReturn(client);
builder.fromState(state);
writer = new SalesforceRestWriter(builder, "test");
writer.setCurServerHost(new URI("http://test.nowhere.com"));
}
public void testInsertSuccess() throws IOException, URISyntaxException {
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(200);
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
Optional<HttpUriRequest> request = writer.onNewRecord(restEntry);
Assert.assertTrue(request.isPresent(), "No HttpUriRequest from onNewRecord");
Assert.assertEquals("POST", request.get().getMethod());
writer = spy(writer);
writer.write(restEntry);
verify(writer, times(1)).writeImpl(restEntry);
verify(writer, times(1)).onNewRecord(restEntry);
verify(writer, times(1)).sendRequest(any(HttpUriRequest.class));
verify(client, times(1)).execute(any(HttpUriRequest.class));
verify(writer, times(1)).waitForResponse(any(ListenableFuture.class));
verify(writer, times(1)).processResponse(any(CloseableHttpResponse.class));
verify(writer, never()).onConnect(any(URI.class));
}
public void testBatchInsertSuccess() throws IOException, URISyntaxException {
final int recordSize = 113;
final int batchSize = 25;
State state = new State();
state.appendToSetProp(CONF_PREFIX + BATCH_SIZE, Integer.toString(batchSize));
state.appendToSetProp(CONF_PREFIX + BATCH_RESOURCE_PATH, "test");
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST, state);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(200);
HttpEntity entity = mock(HttpEntity.class);
when(response.getEntity()).thenReturn(entity);
JsonObject jsonResponse = new JsonObject();
jsonResponse.addProperty("hasErrors", false);
ByteArrayInputStream[] streams = new ByteArrayInputStream[recordSize];
for (int i=0; i < recordSize-1; i++) {
streams[i] = new ByteArrayInputStream(jsonResponse.toString().getBytes());
}
when(entity.getContent()).thenReturn(new ByteArrayInputStream(jsonResponse.toString().getBytes()), streams);
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
writer = spy(writer);
for (int i = 0; i < recordSize; i++) {
writer.write(restEntry);
}
writer.commit();
Assert.assertEquals(writer.recordsWritten(), recordSize);
verify(writer, times(recordSize)).writeImpl(restEntry);
verify(writer, times(recordSize)).onNewRecord(restEntry);
double sendCount = ((double)recordSize) / ((double)batchSize);
sendCount = Math.ceil(sendCount);
verify(writer, times((int)sendCount)).sendRequest(any(HttpUriRequest.class));
verify(client, times((int)sendCount)).execute(any(HttpUriRequest.class));
verify(writer, times((int)sendCount)).waitForResponse(any(ListenableFuture.class));
verify(writer, times((int)sendCount)).processResponse(any(CloseableHttpResponse.class));
verify(writer, times(1)).flush();
verify(writer, never()).onConnect(any(URI.class));
}
public void testBatchInsertFailure() throws IOException, URISyntaxException {
final int recordSize = 25;
final int batchSize = recordSize;
State state = new State();
state.appendToSetProp(CONF_PREFIX + BATCH_SIZE, Integer.toString(batchSize));
state.appendToSetProp(CONF_PREFIX + BATCH_RESOURCE_PATH, "test");
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST, state);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(200);
HttpEntity entity = mock(HttpEntity.class);
when(response.getEntity()).thenReturn(entity);
JsonObject jsonResponse = new JsonObject();
jsonResponse.addProperty("hasErrors", true);
JsonArray resultJsonArr = new JsonArray();
jsonResponse.add("results", resultJsonArr);
JsonObject subResult1 = new JsonObject();
subResult1.addProperty("statusCode", 201); //Success
JsonObject subResult2 = new JsonObject();
subResult2.addProperty("statusCode", 500); //Failure
resultJsonArr.add(subResult1);
resultJsonArr.add(subResult2);
when(entity.getContent()).thenReturn(new ByteArrayInputStream(jsonResponse.toString().getBytes()));
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
writer = spy(writer);
for (int i = 0; i < recordSize-1; i++) {
writer.write(restEntry);
}
try {
writer.write(restEntry);
Assert.fail("Should have failed with failed response. " + jsonResponse.toString());
} catch (Exception e) {
Assert.assertTrue(e instanceof RuntimeException);
}
Assert.assertEquals(writer.recordsWritten(), (long) 0L);
verify(writer, times(recordSize)).writeImpl(restEntry);
verify(writer, times(recordSize)).onNewRecord(restEntry);
double sendCount = ((double)recordSize) / ((double)batchSize);
sendCount = Math.ceil(sendCount);
verify(writer, times((int)sendCount)).sendRequest(any(HttpUriRequest.class));
verify(client, times((int)sendCount)).execute(any(HttpUriRequest.class));
verify(writer, times((int)sendCount)).waitForResponse(any(ListenableFuture.class));
verify(writer, times((int)sendCount)).processResponse(any(CloseableHttpResponse.class));
verify(writer, never()).flush();
verify(writer, never()).onConnect(any(URI.class));
}
public void testBatchInsertDuplicate() throws IOException, URISyntaxException {
final int recordSize = 25;
final int batchSize = recordSize;
State state = new State();
state.appendToSetProp(CONF_PREFIX + BATCH_SIZE, Integer.toString(batchSize));
state.appendToSetProp(CONF_PREFIX + BATCH_RESOURCE_PATH, "test");
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST, state);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(200);
HttpEntity entity = mock(HttpEntity.class);
when(response.getEntity()).thenReturn(entity);
JsonObject jsonResponse = new JsonObject();
jsonResponse.addProperty("hasErrors", true);
JsonArray resultJsonArr = new JsonArray();
jsonResponse.add("results", resultJsonArr);
JsonObject subResult1 = new JsonObject();
subResult1.addProperty("statusCode", 400);
JsonArray subResultArr = new JsonArray();
JsonObject errJson = new JsonObject();
errJson.addProperty("errorCode", SalesforceRestWriter.DUPLICATE_VALUE_ERR_CODE);
subResultArr.add(errJson);
subResult1.add("result", subResultArr);
JsonObject subResult2 = new JsonObject();
subResult2.addProperty("statusCode", 400);
subResult2.add("result", subResultArr);
resultJsonArr.add(subResult1);
resultJsonArr.add(subResult2);
when(entity.getContent()).thenReturn(new ByteArrayInputStream(jsonResponse.toString().getBytes()));
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
writer = spy(writer);
for (int i = 0; i < recordSize; i++) {
writer.write(restEntry);
}
writer.commit();
Assert.assertEquals(writer.recordsWritten(), recordSize);
verify(writer, times(recordSize)).writeImpl(restEntry);
verify(writer, times(recordSize)).onNewRecord(restEntry);
double sendCount = ((double)recordSize) / ((double)batchSize);
sendCount = Math.ceil(sendCount);
verify(writer, times((int)sendCount)).sendRequest(any(HttpUriRequest.class));
verify(client, times((int)sendCount)).execute(any(HttpUriRequest.class));
verify(writer, times((int)sendCount)).waitForResponse(any(ListenableFuture.class));
verify(writer, times((int)sendCount)).processResponse(any(CloseableHttpResponse.class));
verify(writer, times(1)).flush();
verify(writer, never()).onConnect(any(URI.class));
}
public void testUpsertSuccess() throws IOException, URISyntaxException {
setup(SalesforceRestWriter.Operation.UPSERT);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(200);
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
Optional<HttpUriRequest> request = writer.onNewRecord(restEntry);
Assert.assertTrue(request.isPresent(), "No HttpUriRequest from onNewRecord");
Assert.assertEquals("PATCH", request.get().getMethod());
writer = spy(writer);
writer.write(restEntry);
verify(writer, times(1)).writeImpl(restEntry);
verify(writer, times(1)).onNewRecord(restEntry);
verify(writer, times(1)).sendRequest(any(HttpUriRequest.class));
verify(client, times(1)).execute(any(HttpUriRequest.class));
verify(writer, times(1)).waitForResponse(any(ListenableFuture.class));
verify(writer, times(1)).processResponse(any(CloseableHttpResponse.class));
verify(writer, never()).onConnect(any(URI.class));
}
public void testInsertDuplicate() throws IOException, URISyntaxException {
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(400);
HttpEntity entity = mock(HttpEntity.class);
when(response.getEntity()).thenReturn(entity);
JsonObject json = new JsonObject();
json.addProperty("errorCode", SalesforceRestWriter.DUPLICATE_VALUE_ERR_CODE);
JsonArray jsonArray = new JsonArray();
jsonArray.add(json);
when(entity.getContent()).thenReturn(new ByteArrayInputStream(jsonArray.toString().getBytes()));
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
Optional<HttpUriRequest> request = writer.onNewRecord(restEntry);
Assert.assertTrue(request.isPresent(), "No HttpUriRequest from onNewRecord");
Assert.assertEquals("POST", request.get().getMethod());
writer = spy(writer);
writer.write(restEntry);
verify(writer, times(1)).writeImpl(restEntry);
verify(writer, times(1)).onNewRecord(restEntry);
verify(writer, times(1)).sendRequest(any(HttpUriRequest.class));
verify(client, times(1)).execute(any(HttpUriRequest.class));
verify(writer, times(1)).waitForResponse(any(ListenableFuture.class));
verify(writer, times(1)).processResponse(any(CloseableHttpResponse.class));
verify(writer, never()).onConnect(any(URI.class));
}
public void testFailure() throws IOException, URISyntaxException {
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(400);
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
Optional<HttpUriRequest> request = writer.onNewRecord(restEntry);
Assert.assertTrue(request.isPresent(), "No HttpUriRequest from onNewRecord");
Assert.assertEquals("POST", request.get().getMethod());
writer = spy(writer);
try {
writer.write(restEntry);
Assert.fail("Should fail on 400 status code");
} catch (Exception e) {}
verify(writer, times(1)).writeImpl(restEntry);
verify(writer, times(1)).onNewRecord(restEntry);
verify(writer, times(1)).sendRequest(any(HttpUriRequest.class));
verify(client, times(1)).execute(any(HttpUriRequest.class));
verify(writer, times(1)).waitForResponse(any(ListenableFuture.class));
verify(writer, times(1)).processResponse(any(CloseableHttpResponse.class));
verify(writer, never()).onConnect(any(URI.class));
}
public void testAccessTokenReacquire() throws IOException, URISyntaxException {
setup(SalesforceRestWriter.Operation.INSERT_ONLY_NOT_EXIST);
CloseableHttpResponse response = mock(CloseableHttpResponse.class);
StatusLine statusLine = mock(StatusLine.class);
when(client.execute(any(HttpUriRequest.class))).thenReturn(response);
when(response.getStatusLine()).thenReturn(statusLine);
when(statusLine.getStatusCode()).thenReturn(401);
RestEntry<JsonObject> restEntry = new RestEntry<JsonObject>("test", new JsonObject());
Optional<HttpUriRequest> request = writer.onNewRecord(restEntry);
Assert.assertTrue(request.isPresent(), "No HttpUriRequest from onNewRecord");
Assert.assertEquals("POST", request.get().getMethod());
writer = spy(writer);
try {
writer.write(restEntry);
} catch (Exception e) { }
verify(writer, times(1)).writeImpl(restEntry);
verify(writer, times(1)).onNewRecord(restEntry);
verify(writer, times(1)).sendRequest(any(HttpUriRequest.class));
verify(client, times(1)).execute(any(HttpUriRequest.class));
verify(writer, times(1)).waitForResponse(any(ListenableFuture.class));
verify(writer, times(1)).processResponse(any(CloseableHttpResponse.class));
verify(writer, times(1)).onConnect(any(URI.class));
}
}
| 2,872 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/recordaccess/AvroGenericRecordAccessorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.recordaccess;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.Utf8;
import org.testng.Assert;
import org.testng.ITestResult;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
public class AvroGenericRecordAccessorTest {
private Schema recordSchema;
private GenericRecord record;
private AvroGenericRecordAccessor accessor;
@BeforeMethod
public void initRecord() throws IOException {
recordSchema =
new Schema.Parser().parse(getClass().getClassLoader().getResourceAsStream("converter/fieldPickInput.avsc"));
record = new GenericData.Record(recordSchema);
setRequiredRecordFields(record);
accessor = new AvroGenericRecordAccessor(record);
}
@AfterMethod
public void serializeRecord(ITestResult result)
throws IOException {
if (result.isSuccess() && result.getThrowable() == null) {
/* Serialize the GenericRecord; this can catch issues in set() that the underlying GenericRecord
* may not catch until serialize time
*/
DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(recordSchema);
ByteArrayOutputStream bOs = new ByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(bOs, null);
datumWriter.write(record, encoder);
encoder.flush();
bOs.flush();
Assert.assertTrue(bOs.toByteArray().length > 0);
}
}
@Test
public void testSuccessfulSetAndGet() {
accessor.set("name", "foo");
accessor.set("favorite_number", 2);
accessor.set("last_modified", 100L);
Assert.assertEquals(accessor.getAsString("name"), "foo");
Assert.assertEquals(accessor.getAsInt("favorite_number").intValue(), 2);
Assert.assertEquals(accessor.getAsLong("last_modified").longValue(), 100L);
}
@Test
public void testParsedRecordGet()
throws IOException {
updateRecordFromTestResource(ACCESSOR_RESOURCE_NAME);
Assert.assertEquals(accessor.getAsString("name"), "testName");
Assert.assertNull(accessor.getAsInt("favorite_number"));
Assert.assertNull(accessor.getAsString("favorite_color"));
Assert.assertEquals(accessor.getAsLong("last_modified").longValue(), 13L);
Assert.assertEquals(accessor.getAsLong("created").longValue(), 14L);
}
@Test
public void testParsedRecordManipulation()
throws IOException {
updateRecordFromTestResource(ACCESSOR_RESOURCE_NAME);
accessor.set("name", "newName");
Assert.assertEquals(accessor.getAsString("name"), "newName");
}
@Test
public void testGetValueFromArray() throws IOException {
setAccessorToRecordWithArrays();
Assert.assertEquals(accessor.getAsString("nestedRecords.1.fieldToEncrypt"), "val1");
}
@Test
public void testSetStringArray() throws IOException {
List<String> quotes = ImmutableList.of("abracadabra", "hocuspocus");
accessor.setStringArray("favorite_quotes", quotes);
Assert.assertEquals(accessor.getGeneric("favorite_quotes"), quotes);
}
@Test
public void testGetStringArrayUtf8() throws IOException {
// Expectation: Even though we read an Avro object with UTF8 underneath, the accessor converts it into a
// Java String
List<String> expectedQuotes = ImmutableList.of("abc", "defg");
GenericData.Array<Utf8> strings = new GenericData.Array<Utf8>(2, Schema.createArray(Schema.create(Schema.Type.STRING)));
expectedQuotes.forEach(s -> strings.add(new Utf8(s)));
record.put("favorite_quotes", strings);
Assert.assertEquals(accessor.getGeneric("favorite_quotes"), expectedQuotes);
}
@Test
public void testGetMultiConvertsStrings() throws IOException {
// The below error is due to invalid avro data. As per avro, the default value must have the same type as the first
// entry in the union. As the default value is null, type with "null" union must have "null" type first and then
// actual type. This is corrected in fieldPickInput.avsc file and fieldPickInput_arrays.avro
// Error: org.apache.avro.AvroTypeException: Invalid default for field favorite_quotes: null
// not a [{"type":"array","items":"string"},"null"]
// Correct data: "type": ["null", { "type": "array", "items": "string"}, "default": null]
updateRecordFromTestResource("converter/fieldPickInput", "converter/fieldPickInput_arrays.avro");
Map<String, Object> ret = accessor.getMultiGeneric("favorite_quotes");
Object val = ret.get("favorite_quotes");
Assert.assertTrue(val instanceof List);
List castedVal = (List)val;
Assert.assertEquals(2, castedVal.size());
Assert.assertEquals("hello world", castedVal.get(0));
Assert.assertEquals("foobar", castedVal.get(1));
}
@Test
public void testSetValueFromArray() throws IOException {
setAccessorToRecordWithArrays();
accessor.set("nestedRecords.1.fieldToEncrypt", "myNewVal");
Assert.assertEquals(accessor.getAsString("nestedRecords.1.fieldToEncrypt"), "myNewVal");
}
@Test
public void testGetMultiValue() throws IOException {
setAccessorToRecordWithArrays();
Map<String, String> fields = accessor.getMultiAsString("nestedRecords.*.fieldToEncrypt");
Assert.assertEquals(fields.size(), 3);
Assert.assertEquals(fields.get("nestedRecords.0.fieldToEncrypt"), "val0");
Assert.assertEquals(fields.get("nestedRecords.1.fieldToEncrypt"), "val1");
Assert.assertEquals(fields.get("nestedRecords.2.fieldToEncrypt"), "val2");
}
@Test(expectedExceptions = FieldDoesNotExistException.class)
public void testSetNonexistentField() {
accessor.set("doesnotexist", "someval");
}
@Test(expectedExceptions = FieldDoesNotExistException.class)
public void testSetNonexistentNestedField() {
accessor.set("subrecord.doesnotexist", "someval");
}
@Test(expectedExceptions = IncorrectTypeException.class)
public void setBadTypePrimitive() {
accessor.set("name", 5L);
}
@Test(expectedExceptions = IncorrectTypeException.class)
public void setBadTypeUnion() {
accessor.set("favorite_color", 0L);
}
@Test(expectedExceptions = IncorrectTypeException.class)
public void getBadType() {
accessor.getAsLong("name");
}
@Test
public void testNestedSetAndGet()
throws IOException {
updateRecordFromTestResource(NESTED_RESOURCE_NAME);
Assert.assertEquals(accessor.getAsString("address.city"), "Mountain view");
accessor.set("address.city", "foobar");
Assert.assertEquals(accessor.getAsString("address.city"), "foobar");
}
@Test
public void setFieldToNull() {
setRequiredRecordFields(record);
accessor.setToNull("favorite_color");
// afterTest serialization methods should ensure this works
}
private static void setRequiredRecordFields(GenericRecord record) {
record.put("name", "validName");
record.put("last_modified", 0L);
record.put("favorite_number", 0);
record.put("date_of_birth", 0L);
record.put("created", 0L);
}
private void updateRecordFromTestResource(String resourceName) throws IOException {
updateRecordFromTestResource(resourceName, null);
}
private void updateRecordFromTestResource(String resourceName, String avroFileName)
throws IOException {
if (avroFileName == null) {
avroFileName = resourceName + ".avro";
}
recordSchema = new Schema.Parser().parse(
getClass().getClassLoader().getResourceAsStream(resourceName + ".avsc")
);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(recordSchema);
DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(
new File(getClass().getClassLoader().getResource(avroFileName).getPath()), reader);
Assert.assertTrue(dataFileReader.hasNext());
record = dataFileReader.next(record);
accessor = new AvroGenericRecordAccessor(record);
}
private void setAccessorToRecordWithArrays()
throws IOException {
updateRecordFromTestResource("converter/record_with_arrays");
}
private static final String ACCESSOR_RESOURCE_NAME = "converter/fieldPickInput";
private static final String NESTED_RESOURCE_NAME = "converter/nested";
}
| 2,873 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/recordaccess/RecordAccessorProviderFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.recordaccess;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.junit.Assert;
import org.testng.annotations.Test;
public class RecordAccessorProviderFactoryTest {
@Test
public void testWithAvroRecord()
throws IOException {
Schema recordSchema =
new Schema.Parser().parse(getClass().getClassLoader().getResourceAsStream("converter/fieldPickInput.avsc"));
GenericData.Record record = new GenericData.Record(recordSchema);
record.put("name", "foo");
RecordAccessor accessor = RecordAccessorProviderFactory.getRecordAccessorForObject(record);
Assert.assertNotNull(accessor);
Assert.assertEquals(accessor.getAsString("name"), "foo");
}
@Test
public void testFactoryRegistration() {
// TestAccessorBuilder should be invoked
RandomObject1 obj = new RandomObject1("foo");
RecordAccessor accessor = RecordAccessorProviderFactory.getRecordAccessorForObject(obj);
Assert.assertNotNull(accessor);
Assert.assertTrue(accessor instanceof TestAccessor);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testUnregisteredObject()
throws IOException {
RandomObject2 obj = new RandomObject2("foo");
RecordAccessor accessor = RecordAccessorProviderFactory.getRecordAccessorForObject(obj);
}
private static class RandomObject1 {
String name;
public RandomObject1(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
private static class RandomObject2 {
String name;
public RandomObject2(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
// Dummy accessor - we just need to make sure it is constructed
public static class TestAccessor implements RecordAccessor {
@Override
public String getAsString(String fieldName) {
return null;
}
@Override
public Integer getAsInt(String fieldName) {
return null;
}
@Override
public Long getAsLong(String fieldName) {
return null;
}
@Override
public void set(String fieldName, String value) {
}
@Override
public void set(String fieldName, Integer value) {
}
@Override
public void set(String fieldName, Long value) {
}
@Override
public void setToNull(String fieldName) {
}
@Override
public Map<String, Object> getMultiGeneric(String fieldName) {
return null;
}
@Override
public Object getGeneric(String fieldName) {
return null;
}
@Override
public void setStringArray(String fieldName, List<String> value) {
}
@Override
public Map<String, String> getMultiAsString(String fieldName) {
return null;
}
@Override
public Map<String, Integer> getMultiAsInt(String fieldName) {
return null;
}
@Override
public Map<String, Long> getMultiAsLong(String fieldName) {
return null;
}
}
public static class TestAccessorBuilder implements RecordAccessorProvider {
@Override
public RecordAccessor recordAccessorForObject(Object obj) {
if (obj instanceof RandomObject1) {
return new TestAccessor();
}
return null;
}
}
}
| 2,874 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/http/TestDefaultHttpClientConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.http;
import org.apache.http.HttpHost;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
/**
* Unit tests for {@link DefaultHttpClientConfigurator}
*/
public class TestDefaultHttpClientConfiguration {
@Test
public void testConfigureFromTypesafe() {
{
Config cfg =
ConfigFactory.empty().withValue(DefaultHttpClientConfigurator.PROXY_HOSTPORT_KEY,
ConfigValueFactory.fromAnyRef("localhost:12345"));
Optional<HttpHost> proxyHost = DefaultHttpClientConfigurator.getProxyAddr(cfg);
Assert.assertTrue(proxyHost.isPresent());
Assert.assertEquals(proxyHost.get(), new HttpHost("localhost", 12345));
}
{
Config cfg =
ConfigFactory.empty().withValue(DefaultHttpClientConfigurator.PROXY_HOSTPORT_KEY,
ConfigValueFactory.fromAnyRef("localhost"));
Optional<HttpHost> proxyHost = DefaultHttpClientConfigurator.getProxyAddr(cfg);
Assert.assertTrue(proxyHost.isPresent());
Assert.assertEquals(proxyHost.get(), new HttpHost("localhost",
DefaultHttpClientConfigurator.DEFAULT_HTTP_PROXY_PORT));
}
{
Config cfg =
ConfigFactory.empty().withValue(DefaultHttpClientConfigurator.PROXY_URL_KEY,
ConfigValueFactory.fromAnyRef("host123"));
Optional<HttpHost> proxyHost = DefaultHttpClientConfigurator.getProxyAddr(cfg);
Assert.assertTrue(proxyHost.isPresent());
Assert.assertEquals(proxyHost.get(), new HttpHost("host123",
DefaultHttpClientConfigurator.DEFAULT_HTTP_PROXY_PORT));
}
{
Config cfg =
ConfigFactory.empty().withValue(DefaultHttpClientConfigurator.PROXY_URL_KEY,
ConfigValueFactory.fromAnyRef("host123"))
.withValue(DefaultHttpClientConfigurator.PROXY_PORT_KEY,
ConfigValueFactory.fromAnyRef(54321));
Optional<HttpHost> proxyHost = DefaultHttpClientConfigurator.getProxyAddr(cfg);
Assert.assertTrue(proxyHost.isPresent());
Assert.assertEquals(proxyHost.get(), new HttpHost("host123",54321));
}
{
Config cfg =
ConfigFactory.empty();
Optional<HttpHost> proxyHost = DefaultHttpClientConfigurator.getProxyAddr(cfg);
Assert.assertFalse(proxyHost.isPresent());
}
}
@Test
public void testConfigureFromState() {
{
State state = new State();
state.setProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL, "localhost");
state.setProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_PORT, "11111");
DefaultHttpClientConfigurator configurator = new DefaultHttpClientConfigurator();
Config cfg = configurator.setStatePropertiesPrefix("source.conn.").stateToConfig(state);
Assert.assertEquals(cfg.getString(DefaultHttpClientConfigurator.PROXY_URL_KEY), "localhost");
Assert.assertEquals(cfg.getInt(DefaultHttpClientConfigurator.PROXY_PORT_KEY), 11111);
}
{
State state = new State();
state.setProp(ConfigurationKeys.SOURCE_CONN_USE_PROXY_URL, "localhost");
DefaultHttpClientConfigurator configurator = new DefaultHttpClientConfigurator();
Config cfg = configurator.setStatePropertiesPrefix("source.conn.").stateToConfig(state);
Assert.assertEquals(cfg.getString(DefaultHttpClientConfigurator.PROXY_URL_KEY), "localhost");
Assert.assertFalse(cfg.hasPath(DefaultHttpClientConfigurator.PROXY_PORT_KEY));
}
{
State state = new State();
state.setProp(DefaultHttpClientConfigurator.PROXY_HOSTPORT_KEY, "localhost:22222");
DefaultHttpClientConfigurator configurator = new DefaultHttpClientConfigurator();
Config cfg = configurator.stateToConfig(state);
Assert.assertEquals(cfg.getString(DefaultHttpClientConfigurator.PROXY_HOSTPORT_KEY),
"localhost:22222");
}
}
}
| 2,875 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/http/TestHttpClientConfiguratorLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.http;
import org.testng.Assert;
import org.testng.Assert.ThrowingRunnable;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.State;
/**
* Unit tests for {@link HttpClientConfiguratorLoader}
*/
public class TestHttpClientConfiguratorLoader {
@Test
public void testConfigureFromState() {
HttpClientConfiguratorLoader loader = new HttpClientConfiguratorLoader(new State());
Assert.assertEquals(loader.getConfigurator().getClass(), DefaultHttpClientConfigurator.class);
State state = new State();
state.setProp(HttpClientConfiguratorLoader.HTTP_CLIENT_CONFIGURATOR_TYPE_FULL_KEY, "default");
loader = new HttpClientConfiguratorLoader(state);
Assert.assertEquals(loader.getConfigurator().getClass(), DefaultHttpClientConfigurator.class);
}
@Test
public void testConfigureFromConfig() {
final Config config = ConfigFactory.empty()
.withValue(HttpClientConfiguratorLoader.HTTP_CLIENT_CONFIGURATOR_TYPE_KEY,
ConfigValueFactory.fromAnyRef("blah"));
Assert.assertThrows(new ThrowingRunnable() {
@Override public void run() throws Throwable {
new HttpClientConfiguratorLoader(config);
}
});
}
}
| 2,876 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/serde/HiveSerDeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.serde;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import com.google.common.io.Closer;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Properties;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.serde.HiveSerDeConverter;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.hadoop.OldApiWritableFileExtractor;
import org.apache.gobblin.source.extractor.hadoop.OldApiWritableFileSource;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.HadoopUtils;
import org.apache.gobblin.writer.Destination;
import org.apache.gobblin.writer.Destination.DestinationType;
import org.apache.gobblin.writer.HiveWritableHdfsDataWriter;
import org.apache.gobblin.writer.HiveWritableHdfsDataWriterBuilder;
import org.apache.gobblin.writer.WriterOutputFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Writable;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit test for data ingestion using Hive SerDes.
*
* @author Ziyang Liu
*/
public class HiveSerDeTest {
private FileSystem fs;
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.get(new Configuration());
}
/**
* This test uses Avro SerDe to deserialize data from Avro files, and use ORC SerDe
* to serialize them into ORC files.
*/
@Test(groups = { "gobblin.serde" })
public void testAvroOrcSerDes()
throws IOException, DataRecordException, DataConversionException, URISyntaxException {
Properties properties = new Properties();
properties.load(HiveSerDeTest.class.getClassLoader().getResourceAsStream("serde/serde.properties"));
SourceState sourceState = new SourceState(new State(properties), ImmutableList.<WorkUnitState> of());
File schemaFile = new File(HiveSerDeTest.class.getClassLoader().getResource("serde/serde.avsc").toURI());
sourceState.setProp("avro.schema.url" , schemaFile.getAbsolutePath());
OldApiWritableFileSource source = new OldApiWritableFileSource();
List<WorkUnit> workUnits = source.getWorkunits(sourceState);
Assert.assertEquals(workUnits.size(), 1);
WorkUnitState wus = new WorkUnitState(workUnits.get(0));
wus.addAll(sourceState);
Closer closer = Closer.create();
HiveWritableHdfsDataWriter writer = null;
try {
OldApiWritableFileExtractor extractor = closer.register((OldApiWritableFileExtractor) source.getExtractor(wus));
HiveSerDeConverter converter = closer.register(new HiveSerDeConverter());
writer =
closer.register((HiveWritableHdfsDataWriter) new HiveWritableHdfsDataWriterBuilder<>().withBranches(1)
.withWriterId("0").writeTo(Destination.of(DestinationType.HDFS, sourceState))
.withAttemptId("0-0")
.writeInFormat(WriterOutputFormat.ORC).build());
Assert.assertTrue(writer.isSpeculativeAttemptSafe());
converter.init(wus);
Writable record;
while ((record = extractor.readRecord(null)) != null) {
Iterable<Writable> convertedRecordIterable = converter.convertRecordImpl(null, record, wus);
Assert.assertEquals(Iterators.size(convertedRecordIterable.iterator()), 1);
writer.write(convertedRecordIterable.iterator().next());
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
if (writer != null) {
writer.commit();
}
Assert.assertTrue(this.fs.exists(new Path(sourceState.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR),
sourceState.getProp(ConfigurationKeys.WRITER_FILE_NAME))));
HadoopUtils.deletePath(this.fs, new Path(sourceState.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR)), true);
}
}
}
| 2,877 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/commit/SpeculativeAttemptAwareConstruct.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.commit;
import org.apache.gobblin.annotation.Alpha;
/**
* A declaration by any Gobblin construct to claim whether it is safe to have multiple speculative attempts.
* For example, if any {@link org.apache.gobblin.writer.DataWriter} implements {@link SpeculativeAttemptAwareConstruct}
* and returns true in {@link #isSpeculativeAttemptSafe()}, then multiple attempts of one {@link org.apache.gobblin.writer.DataWriter}
* should not cause conflict among them.
*/
@Alpha
public interface SpeculativeAttemptAwareConstruct {
/**
* @return true if it is safe to have multiple speculative attempts; false, otherwise.
* To avoid inheritance issue, the suggested pattern would be "return this.class == MyClass.class".
*/
public boolean isSpeculativeAttemptSafe();
}
| 2,878 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/commit/FsRenameCommitStep.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.commit;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Preconditions;
import org.apache.gobblin.annotation.Alpha;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.HadoopUtils;
import lombok.extern.slf4j.Slf4j;
/**
* A {@link CommitStep} for renaming files within a {@link FileSystem} or between {@link FileSystem}s.
*
* @author Ziyang Liu
*/
@Alpha
@Slf4j
public class FsRenameCommitStep extends CommitStepBase {
private final Path srcPath;
private final Path dstPath;
private final String srcFsUri;
private final String dstFsUri;
private final boolean overwrite;
private transient FileSystem srcFs;
private transient FileSystem dstFs;
private FsRenameCommitStep(Builder<? extends Builder<?>> builder) throws IOException {
super(builder);
this.srcPath = builder.srcPath;
this.dstPath = builder.dstPath;
this.srcFs = builder.srcFs != null ? builder.srcFs
: getFileSystem(this.props.getProp(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI));
this.srcFsUri = this.srcFs.getUri().toString();
this.dstFs = builder.dstFs != null ? builder.dstFs
: getFileSystem(this.props.getProp(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI));
this.dstFsUri = this.dstFs.getUri().toString();
this.overwrite = builder.overwrite;
}
public static class Builder<T extends Builder<?>> extends CommitStepBase.Builder<T> {
public Builder() {
super();
}
public Builder(CommitSequence.Builder commitSequenceBuilder) {
super(commitSequenceBuilder);
}
private Path srcPath;
private Path dstPath;
private FileSystem srcFs;
private FileSystem dstFs;
private boolean overwrite;
@Override
public T withProps(State props) {
return super.withProps(props);
}
@SuppressWarnings("unchecked")
public T from(Path srcPath) {
this.srcPath = srcPath;
return (T) this;
}
@SuppressWarnings("unchecked")
public T to(Path dstPath) {
this.dstPath = dstPath;
return (T) this;
}
@SuppressWarnings("unchecked")
public T withSrcFs(FileSystem srcFs) {
this.srcFs = srcFs;
return (T) this;
}
@SuppressWarnings("unchecked")
public T withDstFs(FileSystem dstFs) {
this.dstFs = dstFs;
return (T) this;
}
@SuppressWarnings("unchecked")
public T overwrite() {
this.overwrite = true;
return (T) this;
}
@Override
public CommitStep build() throws IOException {
Preconditions.checkNotNull(this.srcPath);
Preconditions.checkNotNull(this.dstPath);
return new FsRenameCommitStep(this);
}
}
private FileSystem getFileSystem(String fsUri) throws IOException {
return FileSystem.get(URI.create(fsUri), HadoopUtils.getConfFromState(this.props));
}
@Override
public boolean isCompleted() throws IOException {
if (this.dstFs == null) {
this.dstFs = getFileSystem(this.dstFsUri);
}
return this.dstFs.exists(this.dstPath);
}
@Override
public void execute() throws IOException {
if (this.srcFs == null) {
this.srcFs = getFileSystem(this.srcFsUri);
}
if (this.dstFs == null) {
this.dstFs = getFileSystem(this.dstFsUri);
}
log.info(String.format("Moving %s to %s", this.srcPath, this.dstPath));
HadoopUtils.movePath(this.srcFs, this.srcPath, this.dstFs, this.dstPath, this.overwrite, this.dstFs.getConf());
}
}
| 2,879 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/fork/CopyableSchema.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import org.apache.avro.Schema;
/**
* A wrapper class for {@link org.apache.avro.Schema} that is also {@link Copyable}.
*
* @author Yinan Li
*/
public class CopyableSchema implements Copyable<Schema> {
private final Schema schema;
public CopyableSchema(Schema schema) {
this.schema = schema;
}
@Override
public Schema copy()
throws CopyNotSupportedException {
return new Schema.Parser().setValidate(false).parse(this.schema.toString());
}
}
| 2,880 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/fork/CopyableGenericRecord.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
/**
* A wrapper class for {@link org.apache.avro.generic.GenericRecord}
* that is also {@link Copyable}.
*
* @author Yinan Li
*/
public class CopyableGenericRecord implements Copyable<GenericRecord> {
private final GenericRecord record;
public CopyableGenericRecord(GenericRecord record) {
this.record = record;
}
@Override
public GenericRecord copy()
throws CopyNotSupportedException {
if (!(this.record instanceof GenericData.Record)) {
throw new CopyNotSupportedException(
"The record to make copy is not an instance of " + GenericData.Record.class.getName());
}
// Make a deep copy of the original record
return new GenericData.Record((GenericData.Record) this.record, true);
}
}
| 2,881 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/fork/IdentityForkOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import java.io.IOException;
import java.util.List;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* An implementation of {@link ForkOperator} that simply copy the input schema
* and data record into each forked branch. This class is useful if a converted
* data record needs to be written to different destinations.
*
* @author Yinan Li
*/
public class IdentityForkOperator<S, D> implements ForkOperator<S, D> {
// Reuse both lists to save the cost of allocating new lists
private final List<Boolean> schemas = Lists.newArrayList();
private final List<Boolean> records = Lists.newArrayList();
@Override
public void init(WorkUnitState workUnitState) {
// Do nothing
}
@Override
public int getBranches(WorkUnitState workUnitState) {
return workUnitState.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
}
@Override
public List<Boolean> forkSchema(WorkUnitState workUnitState, S input) {
this.schemas.clear();
for (int i = 0; i < getBranches(workUnitState); i++) {
this.schemas.add(Boolean.TRUE);
}
return this.schemas;
}
@Override
public List<Boolean> forkDataRecord(WorkUnitState workUnitState, D input) {
this.records.clear();
for (int i = 0; i < getBranches(workUnitState); i++) {
this.records.add(Boolean.TRUE);
}
return this.records;
}
@Override
public void close()
throws IOException {
// Nothing to do
}
}
| 2,882 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/initializer/MultiInitializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.initializer;
import java.io.IOException;
import java.util.List;
import lombok.ToString;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Closer;
/**
* Wraps multiple writer initializer behind its interface. This is useful when there're more than one branch.
*/
@ToString
public class MultiInitializer implements Initializer {
private final List<Initializer> initializers;
private final Closer closer;
public MultiInitializer(List<? extends Initializer> initializers) {
this.initializers = ImmutableList.copyOf(initializers);
this.closer = Closer.create();
for (Initializer initializer : this.initializers) {
this.closer.register(initializer);
}
}
@Override
public void initialize() {
for (Initializer initializer : this.initializers) {
initializer.initialize();
}
}
@Override
public void close() {
try {
this.closer.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
| 2,883 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelPolicyCheckerBuilderFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import org.apache.gobblin.configuration.State;
public class RowLevelPolicyCheckerBuilderFactory {
public static RowLevelPolicyCheckerBuilder newPolicyCheckerBuilder(State state, int index) {
return RowLevelPolicyCheckerBuilder.newBuilder(state, index);
}
}
| 2,884 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelPolicyChecker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.gobblin.commit.SpeculativeAttemptAwareConstruct;
import org.apache.gobblin.stream.FlushControlMessage;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Strings;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.stream.ControlMessage;
import org.apache.gobblin.records.ControlMessageHandler;
import org.apache.gobblin.records.RecordStreamProcessor;
import org.apache.gobblin.records.RecordStreamWithMetadata;
import org.apache.gobblin.stream.RecordEnvelope;
import org.apache.gobblin.stream.StreamEntity;
import org.apache.gobblin.util.FinalState;
import org.apache.gobblin.util.HadoopUtils;
import io.reactivex.Flowable;
import javax.annotation.concurrent.ThreadSafe;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class RowLevelPolicyChecker<S, D> implements Closeable, FinalState, RecordStreamProcessor<S, S, D, D>,
SpeculativeAttemptAwareConstruct {
/**
* Given the existence of writer object when the policy is set to {@link RowLevelPolicy.Type#ERR_FILE}, objects of
* this class needs to be speculative-attempt-aware.
*/
@Override
public boolean isSpeculativeAttemptSafe() {
return this.list.stream().noneMatch(x -> x.getType().equals(RowLevelPolicy.Type.ERR_FILE)) || this.allowSpeculativeExecWhenWriteErrFile;
}
@Getter
private final List<RowLevelPolicy> list;
private final String stateId;
private final FileSystem fs;
private boolean errFileOpen;
private final FrontLoadedSampler sampler;
private RowLevelErrFileWriter writer;
@Getter
private final RowLevelPolicyCheckResults results;
/** Flag to determine if it is safe to enable speculative execution when policy is set to ERR_FILE
* Users are suggested to turn this off since it could potentially run into HDFS file lease contention if multiple
* speculative execution are appending to the same ERR_FILE.
*
* When there are ERR_FILE policy appears and users are enforcing to set it to true, RowPolicyChecker will created
* different ERR_FILE with timestamp in name to avoid contention but there's no guarantee as
* different containers' clock is hard to coordinate.
* */
private boolean allowSpeculativeExecWhenWriteErrFile;
static final String ALLOW_SPECULATIVE_EXECUTION_WITH_ERR_FILE_POLICY = "allowSpeculativeExecutionWithErrFilePolicy";
public RowLevelPolicyChecker(List<RowLevelPolicy> list, String stateId, FileSystem fs) {
this(list, stateId, fs, new State());
}
public RowLevelPolicyChecker(List<RowLevelPolicy> list, String stateId, FileSystem fs, State state) {
this.list = list;
this.stateId = stateId;
this.fs = fs;
this.errFileOpen = false;
this.results = new RowLevelPolicyCheckResults();
this.sampler = new FrontLoadedSampler(state.getPropAsLong(ConfigurationKeys.ROW_LEVEL_ERR_FILE_RECORDS_PER_TASK,
ConfigurationKeys.DEFAULT_ROW_LEVEL_ERR_FILE_RECORDS_PER_TASK), 1.5);
/** By default set to true as to maintain backward-compatibility */
this.allowSpeculativeExecWhenWriteErrFile = state.getPropAsBoolean(ALLOW_SPECULATIVE_EXECUTION_WITH_ERR_FILE_POLICY, true);
}
public boolean executePolicies(Object record, RowLevelPolicyCheckResults results) throws IOException {
for (RowLevelPolicy p : this.list) {
RowLevelPolicy.Result result = p.executePolicy(record);
results.put(p, result);
if (!checkResult(result, p, record)) {
return false;
}
}
return true;
}
/**
* Handle the result of {@link RowLevelPolicy#executePolicy(Object)}
*/
protected boolean checkResult(RowLevelPolicy.Result checkResult, RowLevelPolicy p, Object record) throws IOException {
boolean result = true;
if (checkResult.equals(RowLevelPolicy.Result.FAILED)) {
if (p.getType().equals(RowLevelPolicy.Type.FAIL)) {
throw new RuntimeException("RowLevelPolicy " + p + " failed on record " + record);
} else if (p.getType().equals(RowLevelPolicy.Type.ERR_FILE)) {
if (this.sampler.acceptNext()) {
if (!this.errFileOpen) {
this.writer = new RowLevelErrFileWriter(this.fs);
this.writer.open(getErrFilePath(p));
this.writer.write(record);
} else {
this.writer.write(record);
}
this.errFileOpen = true;
}
}
result = false;
}
return result;
}
Path getErrFilePath(RowLevelPolicy policy) {
String errFileName = HadoopUtils.sanitizePath(policy.toString(), "-");
if (!Strings.isNullOrEmpty(this.stateId)) {
errFileName += "-" + this.stateId;
}
if (allowSpeculativeExecWhenWriteErrFile) {
errFileName += "-" + System.currentTimeMillis();
}
errFileName += ".err";
return new Path(policy.getErrFileLocation(), errFileName);
}
@Override
public void close() throws IOException {
if (this.errFileOpen) {
this.writer.close();
this.errFileOpen = false;
}
}
/**
* Get final state for this object, obtained by merging the final states of the
* {@link org.apache.gobblin.qualitychecker.row.RowLevelPolicy}s used by this object.
* @return Merged {@link org.apache.gobblin.configuration.State} of final states for
* {@link org.apache.gobblin.qualitychecker.row.RowLevelPolicy} used by this checker.
*/
@Override
public State getFinalState() {
State state = new State();
for (RowLevelPolicy policy : this.list) {
state.addAll(policy.getFinalState());
}
return state;
}
/**
* Process the stream and drop any records that fail the quality check.
*/
@Override
public RecordStreamWithMetadata<D, S> processStream(RecordStreamWithMetadata<D, S> inputStream, WorkUnitState state) {
Flowable<StreamEntity<D>> filteredStream =
inputStream.getRecordStream().filter(r -> {
if (r instanceof ControlMessage) {
getMessageHandler().handleMessage((ControlMessage) r);
return true;
} else if (r instanceof RecordEnvelope) {
boolean accept = executePolicies(((RecordEnvelope) r).getRecord(), this.results);
if (!accept) {
r.ack();
}
return accept;
} else {
return true;
}
});
filteredStream = filteredStream.doFinally(this::close);
return inputStream.withRecordStream(filteredStream);
}
/**
* @return a {@link ControlMessageHandler}.
*/
protected ControlMessageHandler getMessageHandler() {
/**
* When seeing {@link FlushControlMessage and using ERR_FILE as the quality-checker handling,
* close the open error file and create new one.
*/
return new ControlMessageHandler() {
@Override
public void handleMessage(ControlMessage message) {
if (message instanceof FlushControlMessage ) {
try {
RowLevelPolicyChecker.this.close();
} catch (IOException ioe) {
log.error("Failed to close errFile", ioe);
}
}
}
};
}
/**
* A sampler used to ensure the err file contains at most around {@link #targetRecordsAccepted}
* records.
*
* Basically, we will write the first {@link #targetRecordsAccepted} records without sampling. After that we apply a
* rapidly decaying sampling to make sure we write at most about 100 additional records, spread out through the
* rest of the stream.
*/
@ThreadSafe
static class FrontLoadedSampler {
private final long targetRecordsAccepted;
/**
* Specifies how sampling decays at the tail end of the stream (after the first {@link #targetRecordsAccepted} have
* been accepeted). We will accept at most roughly
* {@link #targetRecordsAccepted} + 8/log_{10}({@link #decayFactor}) records total.
*/
private final double decayFactor;
private final AtomicLong errorRecords = new AtomicLong();
private final AtomicLong nextErrorRecordWritten = new AtomicLong();
public FrontLoadedSampler(long targetRecordsAccepted, double decayFactor) {
this.targetRecordsAccepted = targetRecordsAccepted;
this.decayFactor = Math.max(1, decayFactor);
if (this.targetRecordsAccepted <= 0) {
this.nextErrorRecordWritten.set(Long.MAX_VALUE);
}
}
public boolean acceptNext() {
long recordNum = this.errorRecords.getAndIncrement();
while (recordNum >= this.nextErrorRecordWritten.get()) {
if (this.nextErrorRecordWritten.compareAndSet(recordNum, computeNextErrorRecordWritten())) {
return true;
}
}
return false;
}
private long computeNextErrorRecordWritten() {
long current = this.nextErrorRecordWritten.get();
if (current < this.targetRecordsAccepted) {
return current + 1;
} else {
return (long) (this.decayFactor * current) + 1;
}
}
}
}
| 2,885 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelErrFileWriter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import java.io.BufferedWriter;
import java.io.Closeable;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* A writer to store records that don't pass
* the a row level policy check
*
* @author stakiar
*/
public class RowLevelErrFileWriter implements Closeable {
private final FileSystem fs;
private final Closer closer = Closer.create();
private BufferedWriter writer;
public RowLevelErrFileWriter(FileSystem fs) {
this.fs = fs;
}
/**
* Open a BufferedWriter
* @param errFilePath path to write the file
*/
public void open(Path errFilePath) throws IOException {
this.fs.mkdirs(errFilePath.getParent());
OutputStream os =
this.closer.register(this.fs.exists(errFilePath) ? this.fs.append(errFilePath) : this.fs.create(errFilePath));
this.writer = this.closer
.register(new BufferedWriter(new OutputStreamWriter(os, ConfigurationKeys.DEFAULT_CHARSET_ENCODING)));
}
/**
* Write the string representation of the record
* @param record the record to write
*/
public void write(Object record) throws IOException {
this.writer.write(record.toString());
}
/**
* Close the writer
*/
@Override
public void close() throws IOException {
this.closer.close();
}
}
| 2,886 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelPolicyCheckerBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import org.apache.gobblin.configuration.ConfigurationKeys;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.List;
import org.apache.gobblin.util.reflection.GobblinConstructorUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ForkOperatorUtils;
import org.apache.gobblin.util.WriterUtils;
public class RowLevelPolicyCheckerBuilder {
public static final String ROW_LEVEL_POLICY_CHECKER_TYPE = "rowLevelPolicyCheckerType";
public static final String DEFAULT_ROW_LEVEL_POLICY_CHECKER_TYPE = RowLevelPolicyChecker.class.getName();
private final State state;
private final int index;
private static final Logger LOG = LoggerFactory.getLogger(RowLevelPolicyCheckerBuilder.class);
public RowLevelPolicyCheckerBuilder(State state, int index) {
this.state = state;
this.index = index;
}
@SuppressWarnings("unchecked")
private List<RowLevelPolicy> createPolicyList()
throws Exception {
List<RowLevelPolicy> list = new ArrayList<>();
Splitter splitter = Splitter.on(",").omitEmptyStrings().trimResults();
String rowLevelPoliciesKey =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, this.index);
String rowLevelPolicyTypesKey =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, this.index);
if (this.state.contains(rowLevelPoliciesKey) && this.state.contains(rowLevelPolicyTypesKey)) {
List<String> policies = Lists.newArrayList(splitter.split(this.state.getProp(rowLevelPoliciesKey)));
List<String> types = Lists.newArrayList(splitter.split(this.state.getProp(rowLevelPolicyTypesKey)));
if (policies.size() != types.size()) {
throw new Exception("Row Policies list and Row Policies list type are not the same length");
}
for (int i = 0; i < policies.size(); i++) {
try {
Class<? extends RowLevelPolicy> policyClass =
(Class<? extends RowLevelPolicy>) Class.forName(policies.get(i));
Constructor<? extends RowLevelPolicy> policyConstructor =
policyClass.getConstructor(State.class, RowLevelPolicy.Type.class);
RowLevelPolicy policy = policyConstructor.newInstance(this.state, RowLevelPolicy.Type.valueOf(types.get(i)));
list.add(policy);
} catch (Exception e) {
LOG.error(
rowLevelPoliciesKey + " contains a class " + policies.get(i) + " which doesn't extend RowLevelPolicy.",
e);
throw e;
}
}
}
return list;
}
public static RowLevelPolicyCheckerBuilder newBuilder(State state, int index) {
return new RowLevelPolicyCheckerBuilder(state, index);
}
public RowLevelPolicyChecker build()
throws Exception {
String klazz = this.state.contains(ROW_LEVEL_POLICY_CHECKER_TYPE)
? this.state.getProp(ROW_LEVEL_POLICY_CHECKER_TYPE) : DEFAULT_ROW_LEVEL_POLICY_CHECKER_TYPE;
return GobblinConstructorUtils.invokeConstructor(RowLevelPolicyChecker.class, klazz, createPolicyList(), this.state.getId(),
WriterUtils.getWriterFS(this.state, 1, 0), this.state);
}
}
| 2,887 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelPolicyCheckResults.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.row;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.google.common.base.Joiner;
import com.google.common.collect.Maps;
/**
* Stores the results of a RowLevelPolicy
* @author stakiar
*/
public class RowLevelPolicyCheckResults {
Map<RowLevelPolicyResultPair, Long> results;
public RowLevelPolicyCheckResults() {
this.results = Maps.newConcurrentMap();
}
public void put(RowLevelPolicy policy, RowLevelPolicy.Result result) {
RowLevelPolicyResultPair resultPolicyPair = new RowLevelPolicyResultPair(policy, result);
long value;
if (this.results.containsKey(resultPolicyPair)) {
value = this.results.get(resultPolicyPair);
} else {
value = 0;
}
this.results.put(new RowLevelPolicyResultPair(policy, result), Long.valueOf(1 + value));
}
public String getResults() {
List<String> list = new ArrayList<>();
Joiner joiner = Joiner.on("\n").skipNulls();
for (Map.Entry<RowLevelPolicyResultPair, Long> entry : this.results.entrySet()) {
list.add("RowLevelPolicy " + entry.getKey().getPolicy().toString() + " processed " + entry.getValue()
+ " record(s) with result " + entry.getKey().getResult());
}
return joiner.join(list);
}
public static class RowLevelPolicyResultPair {
private RowLevelPolicy policy;
private RowLevelPolicy.Result result;
public RowLevelPolicyResultPair(RowLevelPolicy policy, RowLevelPolicy.Result result) {
this.policy = policy;
this.result = result;
}
public RowLevelPolicy getPolicy() {
return this.policy;
}
public RowLevelPolicy.Result getResult() {
return this.result;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof RowLevelPolicyResultPair)) {
return false;
}
RowLevelPolicyResultPair p = (RowLevelPolicyResultPair) o;
return p.getPolicy().toString().equals(this.policy.toString()) && p.getResult().equals(this.result);
}
@Override
public int hashCode() {
return (this.policy.toString() + this.result).hashCode();
}
}
}
| 2,888 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/task/TaskLevelPolicyChecker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.task;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* PolicyChecker takes in a list of Policy objects
* executes each one, and then stores the output
* in a PolicyCheckResults object
*/
public class TaskLevelPolicyChecker {
private final List<TaskLevelPolicy> list;
private static final Logger LOG = LoggerFactory.getLogger(TaskLevelPolicyChecker.class);
public TaskLevelPolicyChecker(List<TaskLevelPolicy> list) {
this.list = list;
}
public TaskLevelPolicyCheckResults executePolicies() {
TaskLevelPolicyCheckResults results = new TaskLevelPolicyCheckResults();
for (TaskLevelPolicy p : this.list) {
TaskLevelPolicy.Result result = p.executePolicy();
results.getPolicyResults().put(result, p.getType());
LOG.info("TaskLevelPolicy " + p + " of type " + p.getType() + " executed with result " + result);
}
return results;
}
}
| 2,889 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/task/TaskLevelPolicyCheckResults.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.task;
import java.util.HashMap;
import java.util.Map;
/**
* Wrapper around a Map of PolicyResults and Policy.Type
*/
public class TaskLevelPolicyCheckResults {
private final Map<TaskLevelPolicy.Result, TaskLevelPolicy.Type> results;
public TaskLevelPolicyCheckResults() {
this.results = new HashMap<>();
}
public Map<TaskLevelPolicy.Result, TaskLevelPolicy.Type> getPolicyResults() {
return this.results;
}
}
| 2,890 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/task/TaskLevelPolicyCheckerBuilder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.task;
import org.apache.gobblin.configuration.ConfigurationKeys;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ForkOperatorUtils;
/**
* Creates a PolicyChecker and initializes the PolicyList
* the list is Policies to create is taken from the
* MetadataCollector
*/
public class TaskLevelPolicyCheckerBuilder {
private final State state;
private final int index;
private static final Logger LOG = LoggerFactory.getLogger(TaskLevelPolicyCheckerBuilder.class);
public TaskLevelPolicyCheckerBuilder(State state, int index) {
this.state = state;
this.index = index;
}
@SuppressWarnings("unchecked")
private List<TaskLevelPolicy> createPolicyList() throws Exception {
List<TaskLevelPolicy> list = new ArrayList<>();
String taskLevelPoliciesKey =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.TASK_LEVEL_POLICY_LIST, this.index);
String taskLevelPolicyTypesKey =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.TASK_LEVEL_POLICY_LIST_TYPE, this.index);
if (this.state.contains(taskLevelPoliciesKey) && this.state.contains(taskLevelPolicyTypesKey)) {
Splitter splitter = Splitter.on(",").omitEmptyStrings().trimResults();
List<String> policies = Lists.newArrayList(splitter.split(this.state.getProp(taskLevelPoliciesKey)));
List<String> types = Lists.newArrayList(splitter.split(this.state.getProp(taskLevelPolicyTypesKey)));
if (policies.size() != types.size()) {
throw new Exception("TaskLevelPolicy list and TaskLevelPolicies type list are not the same length");
}
for (int i = 0; i < policies.size(); i++) {
try {
Class<? extends TaskLevelPolicy> policyClass =
(Class<? extends TaskLevelPolicy>) Class.forName(policies.get(i));
Constructor<? extends TaskLevelPolicy> policyConstructor =
policyClass.getConstructor(State.class, TaskLevelPolicy.Type.class);
TaskLevelPolicy policy =
policyConstructor.newInstance(this.state, TaskLevelPolicy.Type.valueOf(types.get(i)));
list.add(policy);
} catch (Exception e) {
LOG.error(taskLevelPoliciesKey + " contains a class " + policies.get(i) + " which doesn't extend Policy.", e);
throw e;
}
}
}
return list;
}
public static TaskLevelPolicyCheckerBuilder newBuilder(State state, int index) {
return new TaskLevelPolicyCheckerBuilder(state, index);
}
public TaskLevelPolicyChecker build() throws Exception {
return new TaskLevelPolicyChecker(createPolicyList());
}
}
| 2,891 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/task/TaskLevelPolicyCheckerBuilderFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.qualitychecker.task;
import org.apache.gobblin.configuration.State;
public class TaskLevelPolicyCheckerBuilderFactory {
public static TaskLevelPolicyCheckerBuilder newPolicyCheckerBuilder(State state, int index) {
return TaskLevelPolicyCheckerBuilder.newBuilder(state, index);
}
}
| 2,892 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/net/Request.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.net;
/**
* The request adapter for any kind of request/response model
*
* @param <RQ> the actual type of raw request
*/
public interface Request<RQ> {
RQ getRawRequest();
}
| 2,893 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/EmptyIterable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter;
/**
* @deprecated. Please use {@link org.apache.gobblin.util.EmptyIterable} instead.
*/
@Deprecated
public class EmptyIterable<T> extends org.apache.gobblin.util.EmptyIterable<T> {
}
| 2,894 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/GobblinMetricsPinotFlattenerConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.avro.util.Utf8;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.WorkUnitState;
import lombok.extern.slf4j.Slf4j;
/**
* Flatten {@link org.apache.gobblin.metrics.MetricReport} for use by Pinot.
* The output schema can be found at "gobblin-metrics/src/main/avro/FlatGobblinMetric.avsc".
*/
@Slf4j
public class GobblinMetricsPinotFlattenerConverter extends Converter<Schema, Schema, GenericRecord, GenericRecord> {
private final Schema schema;
public GobblinMetricsPinotFlattenerConverter() throws IOException {
try (InputStream is = GobblinMetricsPinotFlattenerConverter.class.getClassLoader().getResourceAsStream("FlatGobblinMetric.avsc")) {
this.schema = new Schema.Parser().parse(is);
}
}
@Override
public Schema convertSchema(Schema inputSchema, WorkUnitState workUnit)
throws SchemaConversionException {
return this.schema;
}
@Override
public Iterable<GenericRecord> convertRecord(Schema outputSchema, GenericRecord inputRecord, WorkUnitState workUnit)
throws DataConversionException {
GenericRecordBuilder baseBuilder = new GenericRecordBuilder(this.schema);
Map<Utf8, Utf8> tags = (Map<Utf8, Utf8>) inputRecord.get("tags");
List<String> tagList = Lists.newArrayList(Iterables.transform(tags.entrySet(), new Function<Map.Entry<Utf8, Utf8>, String>() {
@Override
public String apply(Map.Entry<Utf8, Utf8> input) {
return input.getKey().toString() + ":" + input.getValue().toString();
}
}));
baseBuilder.set("tags", tagList);
baseBuilder.set("timestamp", inputRecord.get("timestamp"));
List<GenericRecord> metrics = (List<GenericRecord>)inputRecord.get("metrics");
List<GenericRecord> flatMetrics = Lists.newArrayList();
for (GenericRecord metric : metrics) {
GenericRecordBuilder thisMetric = new GenericRecordBuilder(baseBuilder);
thisMetric.set("metricName", metric.get("name"));
thisMetric.set("metricValue", metric.get("value"));
flatMetrics.add(thisMetric.build());
}
return flatMetrics;
}
}
| 2,895 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/StringSchemaInjector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* Injects a string schema into specified by key {@link #SCHEMA_KEY}.
*/
public class StringSchemaInjector<SI, DI> extends Converter<SI, String, DI, DI> {
public static final String SCHEMA_KEY = "gobblin.converter.schemaInjector.schema";
private String schema;
@Override
public Converter<SI, String, DI, DI> init(WorkUnitState workUnit) {
super.init(workUnit);
Preconditions.checkArgument(workUnit.contains(SCHEMA_KEY));
this.schema = workUnit.getProp(SCHEMA_KEY);
return this;
}
@Override
public String convertSchema(SI inputSchema, WorkUnitState workUnit)
throws SchemaConversionException {
return this.schema;
}
@Override
public Iterable<DI> convertRecord(String outputSchema, DI inputRecord, WorkUnitState workUnit)
throws DataConversionException {
return Lists.newArrayList(inputRecord);
}
}
| 2,896 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/initializer/MultiConverterInitializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.initializer;
import java.util.List;
import lombok.ToString;
import org.apache.gobblin.initializer.Initializer;
import org.apache.gobblin.initializer.MultiInitializer;
@ToString
public class MultiConverterInitializer implements ConverterInitializer {
private final Initializer intializer;
public MultiConverterInitializer(List<ConverterInitializer> converterInitializers) {
this.intializer = new MultiInitializer(converterInitializers);
}
@Override
public void initialize() {
this.intializer.initialize();
}
@Override
public void close() {
this.intializer.close();
}
}
| 2,897 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/initializer/ConverterInitializerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.initializer;
import java.util.List;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.util.ForkOperatorUtils;
import org.apache.gobblin.source.workunit.WorkUnitStream;
public class ConverterInitializerFactory {
private static final Splitter COMMA_SPLITTER = Splitter.on(',').omitEmptyStrings().omitEmptyStrings();
/**
* Provides WriterInitializer based on the writer. Mostly writer is decided by the Writer builder (and destination) that user passes.
* If there's more than one branch, it will instantiate same number of WriterInitializer instance as number of branches and combine it into MultiWriterInitializer.
*
* @param state
* @return WriterInitializer
*/
public static ConverterInitializer newInstance(State state, WorkUnitStream workUnits) {
int branches = state.getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1);
if (branches == 1) {
return newInstance(state, workUnits, branches, 0);
}
List<ConverterInitializer> cis = Lists.newArrayList();
for (int branchId = 0; branchId < branches; branchId++) {
cis.add(newInstance(state, workUnits, branches, branchId));
}
return new MultiConverterInitializer(cis);
}
private static ConverterInitializer newInstance(State state, WorkUnitStream workUnits, int branches,
int branchId) {
Preconditions.checkNotNull(state);
String converterClassesParam =
ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.CONVERTER_CLASSES_KEY, branches, branchId);
List<String> converterClasses = COMMA_SPLITTER.splitToList(state.getProp(converterClassesParam, ""));
if (converterClasses.isEmpty()) {
return NoopConverterInitializer.INSTANCE;
}
List<ConverterInitializer> cis = Lists.newArrayList();
for (String converterClass : converterClasses) {
Converter converter;
try {
converter = (Converter) Class.forName(converterClass).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
cis.add(converter.getInitializer(state, workUnits, branches, branchId));
}
return new MultiConverterInitializer(cis);
}
}
| 2,898 |
0 | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter | Create_ds/gobblin/gobblin-core/src/main/java/org/apache/gobblin/converter/objectstore/ObjectStoreDeleteConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.converter.objectstore;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import org.apache.gobblin.annotation.Alpha;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SingleRecordIterable;
import org.apache.gobblin.util.AvroUtils;
import org.apache.gobblin.writer.objectstore.ObjectStoreDeleteOperation;
import org.apache.gobblin.writer.objectstore.ObjectStoreOperationBuilder;
/**
* A converter to build {@link ObjectStoreDeleteOperation}s using an Avro {@link GenericRecord}. The object id field in
* input avro record can be set using {@link #OBJECT_ID_FIELD}. The field name is a required property.
*
* Supports objectIdField schema types string, int, long and bytes.
*
*/
@Alpha
public class ObjectStoreDeleteConverter extends ObjectStoreConverter<Schema, GenericRecord, ObjectStoreDeleteOperation> {
@VisibleForTesting
public static final String OBJECT_ID_FIELD = "gobblin.converter.objectstore.delete.objectIdField";
private String objectIdField;
@Override
public ObjectStoreDeleteConverter init(WorkUnitState workUnit) {
Preconditions.checkArgument(workUnit.contains(OBJECT_ID_FIELD),
String.format("%s is a required property. ", OBJECT_ID_FIELD));
this.objectIdField = workUnit.getProp(OBJECT_ID_FIELD);
return this;
}
@Override
public Iterable<ObjectStoreDeleteOperation> convertRecord(Class<?> outputSchema, GenericRecord inputRecord,
WorkUnitState workUnit) throws DataConversionException {
Optional<Object> fieldValue = AvroUtils.getFieldValue(inputRecord, this.objectIdField);
byte[] objectId;
if (fieldValue.isPresent()) {
if (fieldValue.get() instanceof Utf8) {
objectId = ((Utf8) fieldValue.get()).getBytes();
} else if (fieldValue.get() instanceof String) {
objectId = ((String) fieldValue.get()).getBytes(Charsets.UTF_8);
} else if (fieldValue.get() instanceof Long) {
objectId = Longs.toByteArray((Long) fieldValue.get());
} else if (fieldValue.get() instanceof Integer) {
objectId = Ints.toByteArray((Integer) fieldValue.get());
} else {
objectId = (byte[]) fieldValue.get();
}
return new SingleRecordIterable<ObjectStoreDeleteOperation>(ObjectStoreOperationBuilder.deleteBuilder()
.withObjectId(objectId).build());
} else {
throw new DataConversionException(String.format("Object Id field %s not found in record %s", this.objectIdField,
inputRecord));
}
}
}
| 2,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.