index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy/row/DefaultRowSelectionPolicyFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.audit.values.policy.row;
import java.lang.reflect.InvocationTargetException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import com.google.common.base.Preconditions;
import com.typesafe.config.Config;
import org.apache.gobblin.audit.values.auditor.ValueAuditGenerator;
import org.apache.gobblin.audit.values.auditor.ValueAuditRuntimeMetadata;
import org.apache.gobblin.audit.values.policy.column.ColumnProjectionPolicy;
import org.apache.gobblin.util.ClassAliasResolver;
/**
* Default factory class to create new {@link RowSelectionPolicy}s
*/
@Slf4j
public class DefaultRowSelectionPolicyFactory {
private static final String ROW_SELECTION_POLICY_CLASS_NAME_KEY = "class";
private final ClassAliasResolver<RowSelectionPolicy> aliasResolver;
private DefaultRowSelectionPolicyFactory() {
this.aliasResolver = new ClassAliasResolver<>(RowSelectionPolicy.class);
}
/**
* Create a new {@link RowSelectionPolicy} using the alias or cannonical classname specified at {@value #ROW_SELECTION_POLICY_CLASS_NAME_KEY} in the <code>config</code>
* The {@link RowSelectionPolicy} class MUST have an accessible constructor <code>abc(Config config, TableMetadata tableMetadata, ColumnProjectionPolicy columnProjectionPolicy)</code>
* <b>Note : must have the key {@value #ROW_SELECTION_POLICY_CLASS_NAME_KEY} set in <code>config</code> to create the {@link RowSelectionPolicy}</b>
*
* @param config job configs, must have the key {@value #ROW_SELECTION_POLICY_CLASS_NAME_KEY} set to create the {@link RowSelectionPolicy}
* @param tableMetadata runtime table metadata
* @param columnProjectionPolicy used by the {@link ValueAuditGenerator}
*
* @return a new instance of {@link RowSelectionPolicy}
*/
public RowSelectionPolicy create(Config config, ValueAuditRuntimeMetadata.TableMetadata tableMetadata, ColumnProjectionPolicy columnProjectionPolicy) {
Preconditions.checkArgument(config.hasPath(ROW_SELECTION_POLICY_CLASS_NAME_KEY));
log.info("Using row selection class name/alias " + config.getString(ROW_SELECTION_POLICY_CLASS_NAME_KEY));
try {
return (RowSelectionPolicy)ConstructorUtils.invokeConstructor(Class.forName(this.aliasResolver.resolve(
config.getString(ROW_SELECTION_POLICY_CLASS_NAME_KEY))), config, tableMetadata, columnProjectionPolicy);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
| ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private static class DefaultRowSelectionPolicyFactoryHolder {
private static final DefaultRowSelectionPolicyFactory INSTANCE = new DefaultRowSelectionPolicyFactory();
}
public static DefaultRowSelectionPolicyFactory getInstance() {
return DefaultRowSelectionPolicyFactoryHolder.INSTANCE;
}
}
| 4,000 |
0 | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy/row/SelectAllRowSelectionPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.audit.values.policy.row;
import org.apache.avro.generic.GenericRecord;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.audit.values.auditor.ValueAuditRuntimeMetadata;
import org.apache.gobblin.audit.values.policy.column.ColumnProjectionPolicy;
/**
* A {@link RowSelectionPolicy} that selects all rows for auditing
*/
@Alias(value = "SelectAll")
public class SelectAllRowSelectionPolicy extends AbstractRowSelectionPolicy {
public SelectAllRowSelectionPolicy(Config config, ValueAuditRuntimeMetadata.TableMetadata tableMetadata, ColumnProjectionPolicy columnProjectionPolicy) {
super(config, tableMetadata, columnProjectionPolicy);
}
/**
* Return <code>true</code> always
* {@inheritDoc}
* @see org.apache.gobblin.audit.values.policy.row.RowSelectionPolicy#shouldSelectRow(org.apache.avro.generic.GenericRecord)
*/
@Override
public boolean shouldSelectRow(GenericRecord genericRecord) {
return true;
}
}
| 4,001 |
0 | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy | Create_ds/gobblin/gobblin-audit/src/main/java/org/apache/gobblin/audit/values/policy/row/AbstractRowSelectionPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.audit.values.policy.row;
import com.typesafe.config.Config;
import org.apache.gobblin.audit.values.auditor.ValueAuditGenerator;
import org.apache.gobblin.audit.values.auditor.ValueAuditRuntimeMetadata;
import org.apache.gobblin.audit.values.policy.column.ColumnProjectionPolicy;
/**
* An abstract {@link RowSelectionPolicy} that contains references to {@link ValueAuditRuntimeMetadata.TableMetadata} and
* {@link ColumnProjectionPolicy} used by the {@link ValueAuditGenerator}.
* Concrete classes need to implement {@link RowSelectionPolicy#shouldSelectRow(org.apache.avro.generic.GenericRecord)}
*/
public abstract class AbstractRowSelectionPolicy implements RowSelectionPolicy {
protected final ValueAuditRuntimeMetadata.TableMetadata tableMetadata;
protected final ColumnProjectionPolicy columnProjectionPolicy;
public AbstractRowSelectionPolicy(Config config, ValueAuditRuntimeMetadata.TableMetadata tableMetadata,
ColumnProjectionPolicy columnProjectionPolicy) {
this.tableMetadata = tableMetadata;
this.columnProjectionPolicy = columnProjectionPolicy;
}
}
| 4,002 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/TestFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import java.io.Closeable;
import java.io.IOException;
import java.util.Random;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.iface.ConfigView;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import lombok.Data;
public class TestFactory<S extends ScopeType<S>> implements SharedResourceFactory<TestFactory.SharedResource, TestResourceKey, S> {
private static final Joiner JOINER = Joiner.on(".");
private static final String AUTOSCOPE_AT = "autoscope.at";
public static final String NAME = TestFactory.class.getSimpleName();
public static Config setAutoScopeLevel(Config config, GobblinScopeTypes level) {
return ConfigFactory.parseMap(ImmutableMap.of(
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, NAME, AUTOSCOPE_AT), level.name()))
.withFallback(config);
}
@Override
public String getName() {
return NAME;
}
@Override
public SharedResourceFactoryResponse<SharedResource>
createResource(SharedResourcesBroker<S> broker, ScopedConfigView<S, TestResourceKey> config) {
return new ResourceInstance<>(new SharedResource(config.getKey().getKey(), config.getConfig()));
}
@Override
public S getAutoScope(SharedResourcesBroker<S> broker, ConfigView<S, TestResourceKey> config) {
if (config.getConfig().hasPath(AUTOSCOPE_AT)) {
return (S) GobblinScopeTypes.valueOf(config.getConfig().getString(AUTOSCOPE_AT));
} else {
return broker.selfScope().getType();
}
}
@Data
public static class SharedResource implements Closeable {
private final String key;
private final Config config;
private final long id = new Random().nextLong();
private boolean closed = false;
@Override
public void close()
throws IOException {
if (this.closed) {
throw new RuntimeException("Already closed.");
}
this.closed = true;
}
}
}
| 4,003 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/GobblinBrokerCreationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.TaskScopeInstance;
public class GobblinBrokerCreationTest {
@Test
public void testCreationOfBrokers() throws Exception {
// Correct creation behavior
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
new SimpleScope<GobblinScopeTypes>(GobblinScopeTypes.GLOBAL, "myGlobalScope"));
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskabc"))
.withAdditionalParentBroker(containerBroker).build();
Assert.assertEquals(taskBroker.selfScope().getType(), GobblinScopeTypes.TASK);
Assert.assertEquals(((TaskScopeInstance) taskBroker.selfScope()).getTaskId(), "taskabc");
Assert.assertEquals(taskBroker.getScope(GobblinScopeTypes.CONTAINER).getType(), GobblinScopeTypes.CONTAINER);
Assert.assertEquals(((GobblinScopeInstance) taskBroker.getScope(GobblinScopeTypes.CONTAINER)).getScopeId(), "container");
}
@Test
public void testFailIfSubBrokerAtHigherScope() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
try {
jobBroker.newSubscopedBuilder(new GobblinScopeInstance(GobblinScopeTypes.INSTANCE, "instance")).build();
Assert.fail();
} catch (IllegalArgumentException iae) {
// expected
}
}
@Test
public void testFailIfIntermediateScopeHasNoDefault() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
// should trow error if an intermediate scope does not have a default
try {
topBroker.newSubscopedBuilder(new TaskScopeInstance("taskxyz")).build();
Assert.fail();
} catch (IllegalArgumentException iae) {
// expected
}
}
@Test
public void testFailIfDifferentAncestors() throws Exception {
// Correct creation behavior
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker2 = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker2.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
try {
jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskxyz")).withAdditionalParentBroker(containerBroker).build();
Assert.fail();
} catch (IllegalArgumentException iae) {
// expected
}
}
}
| 4,004 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/DefaultGobblinBrokerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.util.Random;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.TaskScopeInstance;
import org.apache.gobblin.broker.iface.ConfigView;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import lombok.Data;
import lombok.Getter;
public class DefaultGobblinBrokerTest {
private static final Joiner JOINER = Joiner.on(".");
@Test
public void testSharedObjects() throws Exception {
// Correct creation behavior
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskabc"))
.withAdditionalParentBroker(containerBroker).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker2 = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskxyz"))
.withAdditionalParentBroker(containerBroker).build();
// create a shared resource
TestFactory.SharedResource resource =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB);
Assert.assertEquals(resource.getKey(), "myKey");
// using same broker with same scope and key returns same object
Assert.assertEquals(taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB),
resource);
// using different broker with same scope and key returns same object
Assert.assertEquals(taskBroker2.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB),
resource);
Assert.assertEquals(jobBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB),
resource);
// Using different key returns a different object
Assert.assertNotEquals(taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("otherKey"), GobblinScopeTypes.JOB),
resource);
// Using different scope returns different object
Assert.assertNotEquals(taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.TASK),
resource);
// Requesting unscoped resource returns different object
Assert.assertNotEquals(taskBroker.getSharedResource(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey")),
resource);
}
@Test
public void testRedirect() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
// create a shared resource
TestFactory.SharedResource resource =
jobBroker.getSharedResourceAtScope(new TestFactoryWithRedirect<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB);
Assert.assertEquals(resource.getKey(), "myKey");
Assert.assertEquals(topBroker.getSharedResourceAtScope(new TestFactoryWithRedirect<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.GLOBAL),
resource);
Assert.assertEquals(topBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.GLOBAL),
resource);
}
@Test
public void testConfigurationInjection() throws Exception {
String key = "myKey";
Config config = ConfigFactory.parseMap(ImmutableMap.of(
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key1"), "value1",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key2"), "value2",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), "key2"), "value2scope",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, key, "key2"), "value2key",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), key, "key2"), "value2scopekey"
));
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
// create a shared resource
TestFactory.SharedResource resource =
containerBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.CONTAINER);
Assert.assertEquals(resource.getConfig().getString("key1"), "value1");
Assert.assertEquals(resource.getConfig().getString("key2"), "value2scopekey");
}
@Test
public void testScoping() throws Exception {
// Correct creation behavior
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
Assert.assertEquals(jobBroker.getScope(GobblinScopeTypes.INSTANCE).getType(), GobblinScopeTypes.INSTANCE);
Assert.assertEquals(jobBroker.getScope(GobblinScopeTypes.INSTANCE).getClass(), GobblinScopeInstance.class);
Assert.assertEquals(jobBroker.getScope(GobblinScopeTypes.INSTANCE), GobblinScopeTypes.INSTANCE.defaultScopeInstance());
Assert.assertEquals(jobBroker.getScope(GobblinScopeTypes.JOB).getType(), GobblinScopeTypes.JOB);
Assert.assertEquals(jobBroker.getScope(GobblinScopeTypes.JOB).getClass(), JobScopeInstance.class);
Assert.assertEquals(((JobScopeInstance) jobBroker.getScope(GobblinScopeTypes.JOB)).getJobId(), "job123");
try {
jobBroker.getScope(GobblinScopeTypes.TASK);
Assert.fail();
} catch (NoSuchScopeException nsse) {
// should throw no scope exception
}
}
@Test
public void testLifecycle() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskabc"))
.withAdditionalParentBroker(containerBroker).build();
// create a shared resource
TestFactory.SharedResource jobResource =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB);
TestFactory.SharedResource taskResource =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.TASK);
Assert.assertFalse(jobResource.isClosed());
Assert.assertFalse(taskResource.isClosed());
taskBroker.close();
// only resources at lower scopes than task should be closed
Assert.assertFalse(jobResource.isClosed());
Assert.assertTrue(taskResource.isClosed());
// since taskResource has been closed, broker should return a new instance of the object
TestFactory.SharedResource taskResource2 =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.TASK);
Assert.assertNotEquals(taskResource, taskResource2);
topBroker.close();
Assert.assertTrue(jobResource.isClosed());
Assert.assertTrue(taskResource.isClosed());
}
@Test
public void testScopedView() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> instanceView = jobBroker.getScopedView(GobblinScopeTypes.INSTANCE);
Assert.assertEquals(instanceView.selfScope().getType(), GobblinScopeTypes.INSTANCE);
TestFactory.SharedResource resource =
instanceView.getSharedResource(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"));
TestFactory.SharedResource resource2 =
jobBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.INSTANCE);
Assert.assertEquals(resource, resource2);
try {
instanceView.newSubscopedBuilder(new JobScopeInstance("otherJob", "job234"));
Assert.fail();
} catch (UnsupportedOperationException exc) {
// Expected
}
}
@Test
public void testExplicitBinding() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<SimpleScopeType> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
SimpleScopeType.GLOBAL.defaultScopeInstance());
SharedResourceFactory<Long, EmptyKey, SimpleScopeType> factory = new SharedResourceFactory<Long, EmptyKey, SimpleScopeType>() {
@Override
public String getName() {
return "myTestFactory";
}
@Override
public SharedResourceFactoryResponse<Long> createResource(SharedResourcesBroker<SimpleScopeType> broker,
ScopedConfigView<SimpleScopeType, EmptyKey> config) throws NotConfiguredException {
throw new UnsupportedOperationException();
}
@Override
public SimpleScopeType getAutoScope(SharedResourcesBroker<SimpleScopeType> broker,
ConfigView<SimpleScopeType, EmptyKey> config) {
return broker.selfScope().getType();
}
};
topBroker.bindSharedResourceAtScope(factory, new EmptyKey(), SimpleScopeType.GLOBAL, 10l);
Assert.assertEquals(topBroker.getSharedResource(factory, new EmptyKey()), new Long(10));
}
@Test
public void testExpiringResource() throws Exception {
Config config = ConfigFactory.empty();
SharedResourcesBrokerImpl<SimpleScopeType> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
SimpleScopeType.GLOBAL.defaultScopeInstance());
InvalidatableResourceFactory factory = new InvalidatableResourceFactory();
long value = topBroker.getSharedResource(factory, new EmptyKey());
Assert.assertEquals(topBroker.getSharedResource(factory, new EmptyKey()), new Long(value));
factory.getLastResourceEntry().setValid(false);
Assert.assertNotEquals(topBroker.getSharedResource(factory, new EmptyKey()), value);
value = topBroker.getSharedResource(factory, new EmptyKey());
Assert.assertEquals(topBroker.getSharedResource(factory, new EmptyKey()), new Long(value));
}
@Data
private static class MyResourceEntry<T> implements ResourceEntry<T> {
private final T resource;
boolean valid = true;
@Override
public void onInvalidate() {
}
}
private static class InvalidatableResourceFactory implements SharedResourceFactory<Long, EmptyKey, SimpleScopeType> {
@Getter
MyResourceEntry<Long> lastResourceEntry;
@Override
public String getName() {
return "myTestFactory";
}
@Override
public SharedResourceFactoryResponse<Long> createResource(SharedResourcesBroker<SimpleScopeType> broker,
ScopedConfigView<SimpleScopeType, EmptyKey> config) throws NotConfiguredException {
MyResourceEntry<Long> resourceEntry = new MyResourceEntry<>(new Random().nextLong());
lastResourceEntry = resourceEntry;
return resourceEntry;
}
@Override
public SimpleScopeType getAutoScope(SharedResourcesBroker<SimpleScopeType> broker,
ConfigView<SimpleScopeType, EmptyKey> config) {
return broker.selfScope().getType();
}
}
}
| 4,005 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/TestFactoryWithRedirect.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
public class TestFactoryWithRedirect<S extends ScopeType<S>> extends TestFactory<S> {
@Override
public SharedResourceFactoryResponse<SharedResource> createResource(SharedResourcesBroker broker, ScopedConfigView config) {
return new ResourceCoordinate<>(new TestFactory<S>(), (TestResourceKey) config.getKey(), (S) config.getScope().rootScope());
}
}
| 4,006 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/AutoscopedFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.TaskScopeInstance;
public class AutoscopedFactoryTest {
@Test
public void testAutoscoping() throws Exception {
Config config = ConfigFactory.empty();
config = TestFactory.setAutoScopeLevel(config, GobblinScopeTypes.JOB);
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskabc"))
.withAdditionalParentBroker(containerBroker).build();
TestFactory.SharedResource jobScopedResource =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.JOB);
TestFactory.SharedResource taskScopedResource =
taskBroker.getSharedResourceAtScope(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"), GobblinScopeTypes.TASK);
TestFactory.SharedResource autoscopedResource =
taskBroker.getSharedResource(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"));
Assert.assertEquals(jobScopedResource, autoscopedResource);
Assert.assertNotEquals(taskScopedResource, autoscopedResource);
}
@Test
public void testAutoscopedResourcesOnlyClosedInCorrectScope() throws Exception {
Config config = ConfigFactory.empty();
config = TestFactory.setAutoScopeLevel(config, GobblinScopeTypes.JOB);
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).build();
SharedResourcesBrokerImpl<GobblinScopeTypes>
containerBroker = topBroker.newSubscopedBuilder(GobblinScopeTypes.CONTAINER.defaultScopeInstance()).build();
SharedResourcesBrokerImpl<GobblinScopeTypes> taskBroker = jobBroker.newSubscopedBuilder(new TaskScopeInstance("taskabc"))
.withAdditionalParentBroker(containerBroker).build();
TestFactory.SharedResource autoscopedResource =
taskBroker.getSharedResource(new TestFactory<GobblinScopeTypes>(), new TestResourceKey("myKey"));
// since object autoscopes at job level, it should not be closed if we close the task broker
taskBroker.close();
Assert.assertFalse(autoscopedResource.isClosed());
// however, when closing job broker, resource should be closed
jobBroker.close();
Assert.assertTrue(autoscopedResource.isClosed());
}
}
| 4,007 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/TestResourceKey.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode
public class TestResourceKey implements SharedResourceKey {
private final String key;
@Override
public String toConfigurationKey() {
return this.key;
}
}
| 4,008 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/SharedResourcesBrokerFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.iface.ConfigView;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import lombok.RequiredArgsConstructor;
public class SharedResourcesBrokerFactoryTest {
private static final SharedResourcesBroker<?> IMPLICIT = SharedResourcesBrokerFactory.getImplicitBroker();
@Test
public void testImplicitBroker() {
Assert.assertEquals(SharedResourcesBrokerFactory.getImplicitBroker(), IMPLICIT);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future<?> future = executorService.submit(new ImplicitBrokerTest());
try {
future.get();
} catch (ExecutionException | InterruptedException ee) {
throw new RuntimeException(ee);
}
executorService.shutdownNow();
}
@Test
public void testLoadingOfClasspath() {
Config config =
ConfigFactory.parseMap(ImmutableMap.of(SharedResourcesBrokerFactory.BROKER_CONF_FILE_KEY, "/broker/testBroker.conf"));
SharedResourcesBrokerImpl<SimpleScopeType> broker =
SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config, SimpleScopeType.GLOBAL.defaultScopeInstance());
ConfigView configView = broker.getConfigView(null, null, "factory");
Assert.assertTrue(configView.getConfig().hasPath("testKey"));
Assert.assertEquals(configView.getConfig().getString("testKey"), "testValue");
}
@Test
public void testGetBrokerConfig() {
Map<String, String> srcConfigMap = new HashMap<>();
srcConfigMap.put("gobblin.broker.key1", "value1");
// Test global namespace, "gobblin.broker"
Config brokerConfig = SharedResourcesBrokerFactory.getBrokerConfig(ConfigFactory.parseMap(srcConfigMap));
Config expectedConfig = ConfigFactory.parseMap(ImmutableMap.of("key1", "value1"));
Assert.assertEquals(brokerConfig, expectedConfig);
// Test extra namespace, "gobblin.shared"
srcConfigMap.put("gobblin.shared.key2", "value2");
srcConfigMap.put("gobblin.brokerNamespaces", "gobblin.shared");
brokerConfig = SharedResourcesBrokerFactory.getBrokerConfig(ConfigFactory.parseMap(srcConfigMap));
expectedConfig = ConfigFactory.parseMap(ImmutableMap.of("key1", "value1","key2", "value2"));
Assert.assertEquals(brokerConfig, expectedConfig);
// Test a list of extra namespaces, configurations are respected in order
srcConfigMap.put("gobblin.shared.key2", "value2");
srcConfigMap.put("gobblin.shared.key3", "value3");
srcConfigMap.put("gobblin.shared2.key3", "value3x");
srcConfigMap.put("gobblin.shared2.key4", "value4");
srcConfigMap.put("gobblin.brokerNamespaces", "gobblin.shared, gobblin.shared2");
brokerConfig = SharedResourcesBrokerFactory.getBrokerConfig(ConfigFactory.parseMap(srcConfigMap));
expectedConfig = ConfigFactory.parseMap(ImmutableMap.of("key1", "value1", "key2", "value2",
"key3", "value3", "key4", "value4"));
Assert.assertEquals(brokerConfig, expectedConfig);
}
public static class ImplicitBrokerTest implements Runnable {
@Override
public void run() {
Assert.assertEquals(SharedResourcesBrokerFactory.getImplicitBroker(), IMPLICIT);
SharedResourcesBroker<SimpleScopeType> broker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(
ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
Assert.assertNotEquals(SharedResourcesBrokerFactory.getImplicitBroker(), broker);
SharedResourcesBrokerFactory.registerImplicitBroker(broker);
Assert.assertEquals(SharedResourcesBrokerFactory.getImplicitBroker(), broker);
ExecutorService executorService = Executors.newSingleThreadExecutor();
Future<?> future = executorService.submit(new InnerImplicitBrokerTest(broker));
try {
future.get();
} catch (ExecutionException | InterruptedException ee) {
throw new RuntimeException(ee);
}
executorService.shutdownNow();
}
}
@RequiredArgsConstructor
public static class InnerImplicitBrokerTest implements Runnable {
private final SharedResourcesBroker<?> expectedBroker;
@Override
public void run() {
Assert.assertEquals(this.expectedBroker, SharedResourcesBrokerFactory.getImplicitBroker());
}
}
}
| 4,009 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/GobblinBrokerConfTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class GobblinBrokerConfTest {
private static final Joiner JOINER = Joiner.on(".");
@Test
public void testCorrectConfigInjection() {
String key = "myKey";
Config config = ConfigFactory.parseMap(ImmutableMap.of(
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key1"), "value1",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key2"), "value2",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), "key2"), "value2scope",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, key, "key2"), "value2key",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), key, "key2"), "value2scopekey"
));
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
KeyedScopedConfigViewImpl<GobblinScopeTypes, TestResourceKey> configView =
topBroker.getConfigView(GobblinScopeTypes.CONTAINER, new TestResourceKey(key), TestFactory.NAME);
Assert.assertEquals(configView.getScope(), GobblinScopeTypes.CONTAINER);
Assert.assertEquals(configView.getKey().toConfigurationKey(), key);
Assert.assertEquals(configView.getKeyedConfig().getString("key2"), "value2key");
Assert.assertEquals(configView.getScopedConfig().getString("key2"), "value2scope");
Assert.assertEquals(configView.getKeyedScopedConfig().getString("key2"), "value2scopekey");
Assert.assertEquals(configView.getFactorySpecificConfig().getString("key1"), "value1");
Assert.assertEquals(configView.getFactorySpecificConfig().getString("key2"), "value2");
Assert.assertEquals(configView.getConfig().getString("key2"), "value2scopekey");
Assert.assertEquals(configView.getConfig().getString("key1"), "value1");
configView =
topBroker.getConfigView(GobblinScopeTypes.TASK, new TestResourceKey(key), TestFactory.NAME);
Assert.assertEquals(configView.getConfig().getString("key2"), "value2key");
}
@Test
public void testOverrides() {
String key = "myKey";
// Correct creation behavior
Config config = ConfigFactory.parseMap(ImmutableMap.<String, String>builder()
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key1"), "value1")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key2"), "value2")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), "key2"), "value2scope")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, key, "key2"), "value2key")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), key, "key2"), "value2scopekey")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.JOB.name(), "key2"), "value2scope")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.JOB.name(), key, "key2"), "value2scopekey")
.build());
SharedResourcesBrokerImpl<GobblinScopeTypes> topBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
GobblinScopeTypes.GLOBAL.defaultScopeInstance());
Config overrides = ConfigFactory.parseMap(ImmutableMap.<String, String>builder()
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key1"), "value1_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, "key2"), "value2_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), "key2"), "value2scope_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, key, "key2"), "value2key_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.CONTAINER.name(), key, "key2"), "value2scopekey_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.JOB.name(), "key2"), "value2scope_o")
.put(JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, TestFactory.NAME, GobblinScopeTypes.JOB.name(), key, "key2"), "value2scopekey_o")
.build());
SharedResourcesBrokerImpl<GobblinScopeTypes> jobBroker =
topBroker.newSubscopedBuilder(new JobScopeInstance("myJob", "job123")).
withOverridingConfig(overrides).build();
ScopedConfigView<GobblinScopeTypes, TestResourceKey> configView =
jobBroker.getConfigView(GobblinScopeTypes.CONTAINER, new TestResourceKey(key), TestFactory.NAME);
Assert.assertEquals(configView.getConfig().getString("key1"), "value1");
Assert.assertEquals(configView.getConfig().getString("key2"), "value2scopekey");
configView =
jobBroker.getConfigView(GobblinScopeTypes.JOB, new TestResourceKey(key), TestFactory.NAME);
Assert.assertEquals(configView.getConfig().getString("key1"), "value1");
Assert.assertEquals(configView.getConfig().getString("key2"), "value2scopekey_o");
}
}
| 4,010 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/broker/KeyedScopedConfigViewImplTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
public class KeyedScopedConfigViewImplTest {
private static final Joiner JOINER = Joiner.on(".");
@Test
public void test() {
String key = "myKey";
// Correct creation behavior
Config config = ConfigFactory.parseMap(ImmutableMap.<String, String>builder()
.put("key1", "value1")
.put("key2", "value2")
.put(JOINER.join(key, "key2"), "value2key")
.put(JOINER.join(GobblinScopeTypes.JOB.name(), "key2"), "value2scope")
.put(JOINER.join(GobblinScopeTypes.JOB.name(), key, "key2"), "value2scopekey")
.build());
KeyedScopedConfigViewImpl<GobblinScopeTypes, TestResourceKey> configView =
new KeyedScopedConfigViewImpl<>(GobblinScopeTypes.JOB, new TestResourceKey(key), TestFactory.NAME, config);
Assert.assertEquals(configView.getScope(), GobblinScopeTypes.JOB);
Assert.assertEquals(configView.getKey().toConfigurationKey(), key);
Assert.assertEquals(configView.getKeyedConfig().getString("key2"), "value2key");
Assert.assertEquals(configView.getScopedConfig().getString("key2"), "value2scope");
Assert.assertEquals(configView.getKeyedScopedConfig().getString("key2"), "value2scopekey");
Assert.assertEquals(configView.getFactorySpecificConfig().getString("key1"), "value1");
Assert.assertEquals(configView.getFactorySpecificConfig().getString("key2"), "value2");
Assert.assertEquals(configView.getConfig().getString("key2"), "value2scopekey");
Assert.assertEquals(configView.getConfig().getString("key1"), "value1");
}
}
| 4,011 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/ForkOperatorUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* Unit tests for {@link ForkOperatorUtils}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.util"})
public class ForkOperatorUtilsTest {
private static final String FORK_BRANCH_NAME_0 = "fork_foo_0";
private static final String FORK_BRANCH_NAME_1 = "fork_foo_1";
private static final String PROPERTY_FOO = "foo";
private static final String PATH_FOO = "foo";
@Test
public void testGetPropertyNameForBranch() {
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, -1), PROPERTY_FOO);
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 0), PROPERTY_FOO + ".0");
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 1), PROPERTY_FOO + ".1");
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 0, 0), PROPERTY_FOO);
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 1, 0), PROPERTY_FOO);
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 2, 0), PROPERTY_FOO + ".0");
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(PROPERTY_FOO, 2, 1), PROPERTY_FOO + ".1");
}
@Test
public void testGetPathForBranch() {
State state = new State();
state.setProp(ConfigurationKeys.FORK_BRANCH_NAME_KEY + ".0", FORK_BRANCH_NAME_0);
state.setProp(ConfigurationKeys.FORK_BRANCH_NAME_KEY + ".1", FORK_BRANCH_NAME_1);
Assert.assertEquals(ForkOperatorUtils.getPathForBranch(state, PATH_FOO, 2, 0), PATH_FOO + "/"
+ FORK_BRANCH_NAME_0);
Assert.assertEquals(ForkOperatorUtils.getPathForBranch(state, PATH_FOO, 2, 1), PATH_FOO + "/"
+ FORK_BRANCH_NAME_1);
}
/**
* Test for {@link ForkOperatorUtils#getPropertyNameForBranch(WorkUnitState, String)}.
*/
@Test
public void testGetPropertyNameForBranchWithWorkUnitState() {
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(PROPERTY_FOO, PATH_FOO);
// Test that if the fork id key is not specified that the original property is preserved
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(workUnitState, PROPERTY_FOO), PROPERTY_FOO);
// Test that if the fork id key is set to -1 that the original property is preserved
workUnitState.setProp(ConfigurationKeys.FORK_BRANCH_ID_KEY, -1);
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(workUnitState, PROPERTY_FOO), PROPERTY_FOO);
// Test that if the fork id key is set to 0 that the new property is properly created
workUnitState.setProp(ConfigurationKeys.FORK_BRANCH_ID_KEY, 0);
Assert.assertEquals(ForkOperatorUtils.getPropertyNameForBranch(workUnitState, PROPERTY_FOO), PROPERTY_FOO
+ ".0");
}
}
| 4,012 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/ClustersNamesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link ClustersNames}. This test relies on the ClustersNames.properties file
*/
public class ClustersNamesTest {
ClustersNames clustersNames = ClustersNames.getInstance();
@Test
public void testRegisteredUrls() {
Assert.assertEquals(clustersNames.getClusterName("http://cluster1-rm.some.company.com"),
"cluster1");
Assert.assertEquals(clustersNames.getClusterName("http://cluster2-rm.some.company.com:12345"),
"cluster2");
Assert.assertEquals(clustersNames.getClusterName("http://coloc1-some-identifier.some.company.com:8032"),
"cluster-no-scheme-with-port");
Assert.assertEquals(clustersNames.getClusterName("https://coloc1-some-identifier.some.company.com:8032"),
"cluster-no-scheme-with-port");
}
@Test
public void testHostNameWithoutScheme() {
Assert.assertEquals(clustersNames.getClusterName("cluster1-rm.some.company.com"),
"cluster1-rm.some.company.com");
Assert.assertEquals(clustersNames.getClusterName("cluster-host-name-4.some.company.com"),
"cluster4");
Assert.assertEquals(clustersNames.getClusterName("coloc1-some-identifier.some.company.com:8032"),
"cluster-no-scheme-with-port");
}
@Test
public void testUnregisteredUrl() {
Assert.assertEquals(clustersNames.getClusterName("http://nonexistent-cluster-rm.some.company.com:12345"),
"nonexistent-cluster-rm.some.company.com");
}
@Test
public void testPortSpecificOverrides() {
Assert.assertEquals(clustersNames.getClusterName("http://cluster-host-name-4.some.company.com/"),
"cluster4");
Assert.assertEquals(clustersNames.getClusterName("http://cluster-host-name-4.some.company.com:12345"),
"cluster4");
Assert.assertEquals(clustersNames.getClusterName("http://cluster-host-name-4.some.company.com:789"),
"cluster4-custom-port");
}
@Test
public void testLocalPaths() {
Assert.assertEquals(clustersNames.getClusterName("file:///"), "localhost");
Assert.assertEquals(clustersNames.getClusterName("file:/home/test"), "localhost");
}
@Test
public void testEmptyNames() {
Assert.assertEquals(clustersNames.getClusterName(""), "");
Assert.assertNull(clustersNames.getClusterName((String) null));
}
@Test
public void testInvalidUrls() {
Assert.assertEquals(clustersNames.getClusterName("uri:fancy-uri"), "uri_fancy-uri");
Assert.assertEquals(clustersNames.getClusterName("test/path"), "test_path");
Assert.assertEquals(clustersNames.getClusterName("http://host/?s=^test"), "http___host__s__test");
}
}
| 4,013 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/LoggingUncaughtExceptionHandlerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
/**
* Unit tests for {@link LoggingUncaughtExceptionHandler}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.util"})
public class LoggingUncaughtExceptionHandlerTest {
@Test(expectedExceptions = RuntimeException.class)
public void testUncaughtException() {
Logger logger = Mockito.mock(Logger.class);
Thread thread = new Thread();
thread.setName("foo");
RuntimeException runtimeException = new RuntimeException();
String errorMessage = String.format("Thread %s threw an uncaught exception: %s", thread, runtimeException);
Mockito.doThrow(runtimeException).when(logger).error(errorMessage, runtimeException);
Thread.UncaughtExceptionHandler uncaughtExceptionHandler = new LoggingUncaughtExceptionHandler(Optional.of(logger));
uncaughtExceptionHandler.uncaughtException(thread, runtimeException);
}
}
| 4,014 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/FileListUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Sets;
/**
* Unit tests for the job configuration file monitor in {@link org.apache.gobblin.util.FileListUtils}.
*/
@Test(groups = { "gobblin.util" })
public class FileListUtilsTest {
private static final String FILE_UTILS_TEST_DIR = "gobblin-utility/src/test/resources/";
private static final String TEST_FILE_NAME1 = "test1";
private static final String TEST_FILE_NAME2 = "test2";
@Test
public void testListFilesRecursively() throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "fileListTestDir1");
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
localFs.create(new Path(baseDir, TEST_FILE_NAME1));
Path subDir = new Path(baseDir, "subDir");
localFs.mkdirs(subDir);
localFs.create(new Path(subDir, TEST_FILE_NAME2));
List<FileStatus> testFiles = FileListUtils.listFilesRecursively(localFs, baseDir);
Assert.assertEquals(2, testFiles.size());
Set<String> fileNames = Sets.newHashSet();
for (FileStatus testFileStatus : testFiles) {
fileNames.add(testFileStatus.getPath().getName());
}
Assert.assertTrue(fileNames.contains(TEST_FILE_NAME1) && fileNames.contains(TEST_FILE_NAME2));
} finally {
localFs.delete(baseDir, true);
}
}
@Test
public void testListPathsRecursively() throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "fileListTestDir2");
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
localFs.create(new Path(baseDir, TEST_FILE_NAME1));
Path subDir = new Path(baseDir, "subDir");
localFs.mkdirs(subDir);
localFs.create(new Path(subDir, TEST_FILE_NAME2));
List<FileStatus> testFiles = FileListUtils.listPathsRecursively(localFs, baseDir, new PathFilter() {
@Override
public boolean accept(Path path) {
return true;
}
});
Assert.assertEquals(4, testFiles.size());
Set<String> fileNames = Sets.newHashSet();
for (FileStatus testFileStatus : testFiles) {
fileNames.add(testFileStatus.getPath().getName());
}
Set<String> expectedFileNames = Sets.newHashSet();
expectedFileNames.add(baseDir.getName());
expectedFileNames.add(subDir.getName());
expectedFileNames.add(TEST_FILE_NAME1);
expectedFileNames.add(TEST_FILE_NAME2);
Assert.assertEquals(fileNames, expectedFileNames);
} finally {
localFs.delete(baseDir, true);
}
}
@Test
public void testListMostNestedPathRecursively() throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "fileListTestDir3");
String emptyDir1 = "emptyDir1";
String emptyDir2 = "emptyDir2";
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
localFs.create(new Path(baseDir, TEST_FILE_NAME1));
localFs.mkdirs(new Path(baseDir, emptyDir1));
Path subDir = new Path(baseDir, "subDir");
localFs.mkdirs(subDir);
localFs.create(new Path(subDir, TEST_FILE_NAME2));
localFs.mkdirs(new Path(subDir, emptyDir2));
List<FileStatus> testFiles = FileListUtils.listMostNestedPathRecursively(localFs, baseDir);
Assert.assertEquals(4, testFiles.size());
Set<String> fileNames = Sets.newHashSet();
for (FileStatus testFileStatus : testFiles) {
fileNames.add(testFileStatus.getPath().getName());
}
Set<String> expectedFileNames = Sets.newHashSet();
expectedFileNames.add(emptyDir1);
expectedFileNames.add(emptyDir2);
expectedFileNames.add(TEST_FILE_NAME1);
expectedFileNames.add(TEST_FILE_NAME2);
Assert.assertEquals(fileNames, expectedFileNames);
} finally {
localFs.delete(baseDir, true);
}
}
public void testListAllFiles () throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "listAllFiles");
System.out.println (baseDir);
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
// Empty root directory
List<FileStatus> testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER);
Assert.assertTrue(testFiles.size() == 0);
// With two avro files (1.avro, 2.avro)
Path file1 = new Path(baseDir, "1.avro");
localFs.create(file1);
Path file2 = new Path(baseDir, "2.avro");
localFs.create(file2);
testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER);
Assert.assertTrue(testFiles.size() == 2);
// With an avro schema file (part.avsc)
Path avsc = new Path(baseDir, "part.avsc");
localFs.create(avsc);
testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER);
Assert.assertTrue(testFiles.size() == 3);
testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, (path)->path.getName().endsWith(".avro"));
Assert.assertTrue(testFiles.size() == 2);
// A complicated hierarchy
// baseDir ____ 1.avro
// |____ 2.avro
// |____ part.avsc
// |____ subDir ____ 3.avro
// |____ subDir2 ____ 4.avro
// |____ part2.avsc
Path subDir = new Path(baseDir, "subDir");
localFs.mkdirs(subDir);
Path file3 = new Path(subDir, "3.avro");
localFs.create(file3);
Path subDir2 = new Path(subDir, "subDir2");
localFs.mkdirs(subDir2);
Path file4 = new Path(subDir2, "4.avro");
localFs.create(file4);
Path avsc2 = new Path(subDir2, "part2.avsc");
localFs.create(avsc2);
testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, (path)->path.getName().endsWith(".avro"));
Assert.assertTrue(testFiles.size() == 4);
testFiles = FileListUtils.listFilesRecursively(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER);
Assert.assertTrue(testFiles.size() == 6);
} finally {
localFs.delete(baseDir, true);
}
}
public void testListFilesToCopyAtPath() throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "fileListTestDir4");
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
// Empty root directory
List<FileStatus> testFiles = FileListUtils.listFilesToCopyAtPath(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER, true);
Assert.assertEquals(testFiles.size(), 1);
Assert.assertEquals(testFiles.get(0).getPath().getName(), baseDir.getName());
// With an empty sub directory
Path subDir = new Path(baseDir, "subDir");
localFs.mkdirs(subDir);
testFiles = FileListUtils.listFilesToCopyAtPath(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER, true);
Assert.assertEquals(testFiles.size(), 1);
Assert.assertEquals(testFiles.get(0).getPath().getName(), subDir.getName());
// Disable include empty directories
testFiles = FileListUtils.listFilesToCopyAtPath(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER, false);
Assert.assertEquals(testFiles.size(), 0);
// With file subDir/tes1
Path test1Path = new Path(subDir, TEST_FILE_NAME1);
localFs.create(test1Path);
testFiles = FileListUtils.listFilesToCopyAtPath(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER, true);
Assert.assertEquals(testFiles.size(), 1);
Assert.assertEquals(testFiles.get(0).getPath().getName(), test1Path.getName());
// With file subDir/test2
Path test2Path = new Path(subDir, TEST_FILE_NAME2);
localFs.create(test2Path);
testFiles = FileListUtils.listFilesToCopyAtPath(localFs, baseDir, FileListUtils.NO_OP_PATH_FILTER, true);
Assert.assertEquals(testFiles.size(), 2);
Set<String> fileNames = Sets.newHashSet();
for (FileStatus testFileStatus : testFiles) {
fileNames.add(testFileStatus.getPath().getName());
}
Assert.assertTrue(fileNames.contains(TEST_FILE_NAME1) && fileNames.contains(TEST_FILE_NAME2));
} finally {
localFs.delete(baseDir, true);
}
}
@Test
public void testGetAnyNonHiddenFile() throws IOException {
final String file1 = "test1";
FileSystem localFs = FileSystem.getLocal(new Configuration());
Path baseDir = new Path(FILE_UTILS_TEST_DIR, "anyFileDir");
try {
if (localFs.exists(baseDir)) {
localFs.delete(baseDir, true);
}
localFs.mkdirs(baseDir);
Path emptySubDir = new Path(baseDir, "emptySubDir");
localFs.mkdirs(emptySubDir);
Path hiddenDir = new Path(baseDir, "_hidden");
localFs.mkdirs(hiddenDir);
localFs.create(new Path(hiddenDir, file1));
Path dataDir = new Path(baseDir, "dataDir");
localFs.mkdirs(dataDir);
File dataFile = new File(dataDir.toString(), file1);
localFs.create(new Path(dataDir, file1));
FileStatus file = FileListUtils.getAnyNonHiddenFile(localFs, baseDir);
Assert.assertEquals(file.getPath().toString(), dataFile.toURI().toString());
} finally {
localFs.delete(baseDir, true);
}
}
}
| 4,015 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/ExecutorsUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
/**
* Unit tests for {@link ExecutorsUtils}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.util"})
public class ExecutorsUtilsTest {
@Test(expectedExceptions = RuntimeException.class)
public void testNewThreadFactory() throws InterruptedException {
Logger logger = Mockito.mock(Logger.class);
ThreadFactory threadFactory = ExecutorsUtils.newThreadFactory(Optional.of(logger));
final RuntimeException runtimeException = new RuntimeException();
Thread thread = threadFactory.newThread(new Runnable() {
@Override
public void run() {
throw runtimeException;
}
});
thread.setName("foo");
String errorMessage = String.format("Thread %s threw an uncaught exception: %s", thread, runtimeException);
Mockito.doThrow(runtimeException).when(logger).error(errorMessage, runtimeException);
thread.run();
}
/**
* Test to verify that {@link ExecutorsUtils#parallelize(List, Function, int, int, Optional)} returns the result in
* the same order as the input
*
*/
@Test
public void testParallelize() throws Exception {
List<Integer> nums = ImmutableList.of(3, 5, 10, 5, 20);
final int factor = 5;
Function<Integer, String> multiply = new Function<Integer, String>() {
@Override
public String apply(Integer input) {
return Integer.toString(input * factor);
}
};
List<String> result = ExecutorsUtils.parallelize(nums, multiply, 2, 60, Optional.<Logger> absent());
Assert.assertEquals(Arrays.asList("15", "25", "50", "25", "100"), result);
}
/**
* Test to verify that {@link ExecutorsUtils#parallelize(List, Function, int, int, Optional)} throws
* {@link ExecutionException} when any of the threads throw and exception
*/
@Test(expectedExceptions = ExecutionException.class)
public void testParallelizeException() throws Exception {
List<Integer> nums = ImmutableList.of(3, 5);
final int factor = 5;
Function<Integer, String> exceptionFunction = new Function<Integer, String>() {
@Override
public String apply(Integer input) {
if (input == 3) {
throw new RuntimeException("testParallelizeException thrown for testing");
}
return Integer.toString(input * factor);
}
};
ExecutorsUtils.parallelize(nums, exceptionFunction, 2, 1, Optional.<Logger> absent());
}
/**
* Test to verify that {@link ExecutorsUtils#parallelize(List, Function, int, int, Optional)} throws
* {@link ExecutionException} when any of the threads timesout.
*/
@Test(expectedExceptions = ExecutionException.class)
public void testParallelizeTimeout() throws Exception {
List<Integer> nums = ImmutableList.of(3, 5);
final int factor = 5;
Function<Integer, String> sleepAndMultiply = new Function<Integer, String>() {
@Override
public String apply(Integer input) {
try {
if (input == 5) {
TimeUnit.SECONDS.sleep(2);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return Integer.toString(input * factor);
}
};
ExecutorsUtils.parallelize(nums, sleepAndMultiply, 2, 1, Optional.<Logger> absent());
}
}
| 4,016 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/DatePartitionTypeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.Assert;
import org.testng.annotations.Test;
public class DatePartitionTypeTest {
@Test
public void testGetMillis() {
Assert.assertEquals(DatePartitionType.MINUTE.getUnitMilliseconds(), 60000L);
Assert.assertEquals(DatePartitionType.HOUR.getUnitMilliseconds(), 60 * 60 * 1000L);
}
}
| 4,017 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/JobLauncherUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link JobLauncherUtils}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util" })
public class JobLauncherUtilsTest {
private static final String JOB_NAME = "foo";
private static final Pattern PATTERN = Pattern.compile("job_" + JOB_NAME + "_\\d+");
private String jobId;
@Test
public void testNewJobId() {
this.jobId = JobLauncherUtils.newJobId(JOB_NAME);
Assert.assertTrue(PATTERN.matcher(this.jobId).matches());
}
@Test(dependsOnMethods = "testNewJobId")
public void testNewTaskId() {
Assert.assertEquals(JobLauncherUtils.newTaskId(this.jobId, 0), this.jobId.replace("job", "task") + "_0");
Assert.assertEquals(JobLauncherUtils.newTaskId(this.jobId, 1), this.jobId.replace("job", "task") + "_1");
}
@Test(dependsOnMethods = "testNewJobId")
public void testNewMultiTaskId() {
Assert.assertEquals(JobLauncherUtils.newMultiTaskId(this.jobId, 0), this.jobId.replace("job", "multitask") + "_0");
Assert.assertEquals(JobLauncherUtils.newMultiTaskId(this.jobId, 1), this.jobId.replace("job", "multitask") + "_1");
}
@Test
public void testFlattenWorkUnits() {
List<WorkUnit> workUnitsOnly =
Arrays.asList(WorkUnit.createEmpty(), WorkUnit.createEmpty(), WorkUnit.createEmpty());
Assert.assertEquals(JobLauncherUtils.flattenWorkUnits(workUnitsOnly).size(), 3);
MultiWorkUnit multiWorkUnit1 = MultiWorkUnit.createEmpty();
multiWorkUnit1.addWorkUnits(Arrays.asList(WorkUnit.createEmpty(), WorkUnit.createEmpty(), WorkUnit.createEmpty()));
MultiWorkUnit multiWorkUnit2 = MultiWorkUnit.createEmpty();
multiWorkUnit1.addWorkUnits(Arrays.asList(WorkUnit.createEmpty(), WorkUnit.createEmpty(), WorkUnit.createEmpty()));
List<WorkUnit> workUnitsAndMultiWorkUnits = Arrays.asList(WorkUnit.createEmpty(), WorkUnit.createEmpty(),
WorkUnit.createEmpty(), multiWorkUnit1, multiWorkUnit2);
Assert.assertEquals(JobLauncherUtils.flattenWorkUnits(workUnitsAndMultiWorkUnits).size(), 9);
}
@Test
public void testDeleteStagingData() throws IOException {
FileSystem fs = FileSystem.getLocal(new Configuration());
Path rootDir = new Path("gobblin-test/job-launcher-utils-test");
Path writerStagingDir0 = new Path(rootDir, "staging/fork_0");
Path writerStagingDir1 = new Path(rootDir, "staging/fork_1");
Path writerOutputDir0 = new Path(rootDir, "output/fork_0");
Path writerOutputDir1 = new Path(rootDir, "output/fork_1");
String writerPath0 = "test0";
String writerPath1 = "test1";
try {
WorkUnitState state = new WorkUnitState();
state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, "2");
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, 2, 0),
ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, 2, 1),
ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_STAGING_DIR, 2, 0),
writerStagingDir0.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_STAGING_DIR, 2, 1),
writerStagingDir1.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_OUTPUT_DIR, 2, 0),
writerOutputDir0.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_OUTPUT_DIR, 2, 1),
writerOutputDir1.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PATH, 2, 0), writerPath0);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_PATH, 2, 1), writerPath1);
Path writerStagingPath0 = new Path(writerStagingDir0, writerPath0);
fs.mkdirs(writerStagingPath0);
Path writerStagingPath1 = new Path(writerStagingDir1, writerPath1);
fs.mkdirs(writerStagingPath1);
Path writerOutputPath0 = new Path(writerOutputDir0, writerPath0);
fs.mkdirs(writerOutputPath0);
Path writerOutputPath1 = new Path(writerOutputDir1, writerPath1);
fs.mkdirs(writerOutputPath1);
JobLauncherUtils.cleanTaskStagingData(state, LoggerFactory.getLogger(JobLauncherUtilsTest.class));
Assert.assertFalse(fs.exists(writerStagingPath0));
Assert.assertFalse(fs.exists(writerStagingPath1));
Assert.assertFalse(fs.exists(writerOutputPath0));
Assert.assertFalse(fs.exists(writerOutputPath1));
} finally {
fs.delete(rootDir, true);
}
}
@Test
public void testDeleteStagingDataWithOutWriterFilePath() throws IOException {
FileSystem fs = FileSystem.getLocal(new Configuration());
String branchName0 = "fork_0";
String branchName1 = "fork_1";
String namespace = "gobblin.test";
String tableName = "test-table";
Path rootDir = new Path("gobblin-test/job-launcher-utils-test");
Path writerStagingDir0 = new Path(rootDir, "staging" + Path.SEPARATOR + branchName0);
Path writerStagingDir1 = new Path(rootDir, "staging" + Path.SEPARATOR + branchName1);
Path writerOutputDir0 = new Path(rootDir, "output" + Path.SEPARATOR + branchName0);
Path writerOutputDir1 = new Path(rootDir, "output" + Path.SEPARATOR + branchName1);
try {
SourceState sourceState = new SourceState();
WorkUnitState state =
new WorkUnitState(WorkUnit.create(new Extract(sourceState, TableType.APPEND_ONLY, namespace, tableName)));
state.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, "2");
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.FORK_BRANCH_NAME_KEY, 2, 0),
branchName0);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.FORK_BRANCH_NAME_KEY, 2, 1),
branchName1);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, 2, 0),
ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, 2, 1),
ConfigurationKeys.LOCAL_FS_URI);
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_STAGING_DIR, 2, 0),
writerStagingDir0.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_STAGING_DIR, 2, 1),
writerStagingDir1.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_OUTPUT_DIR, 2, 0),
writerOutputDir0.toString());
state.setProp(ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.WRITER_OUTPUT_DIR, 2, 1),
writerOutputDir1.toString());
Path writerStagingPath0 = new Path(writerStagingDir0,
ForkOperatorUtils.getPathForBranch(state, state.getExtract().getOutputFilePath(), 2, 0));
fs.mkdirs(writerStagingPath0);
Path writerStagingPath1 = new Path(writerStagingDir1,
ForkOperatorUtils.getPathForBranch(state, state.getExtract().getOutputFilePath(), 2, 1));
fs.mkdirs(writerStagingPath1);
Path writerOutputPath0 = new Path(writerOutputDir0,
ForkOperatorUtils.getPathForBranch(state, state.getExtract().getOutputFilePath(), 2, 0));
fs.mkdirs(writerOutputPath0);
Path writerOutputPath1 = new Path(writerOutputDir1,
ForkOperatorUtils.getPathForBranch(state, state.getExtract().getOutputFilePath(), 2, 1));
fs.mkdirs(writerOutputPath1);
JobLauncherUtils.cleanTaskStagingData(state, LoggerFactory.getLogger(JobLauncherUtilsTest.class));
Assert.assertFalse(fs.exists(writerStagingPath0));
Assert.assertFalse(fs.exists(writerStagingPath1));
Assert.assertFalse(fs.exists(writerOutputPath0));
Assert.assertFalse(fs.exists(writerOutputPath1));
} finally {
fs.delete(rootDir, true);
}
}
}
| 4,018 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/WriterUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import java.io.IOException;
import org.apache.avro.file.CodecFactory;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.source.workunit.Extract.TableType;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
/**
* Tests for {@link WriterUtils}.
*/
@Test(groups = { "gobblin.util" })
public class WriterUtilsTest {
public static final Path TEST_WRITER_STAGING_DIR = new Path("gobblin-test/writer-staging/");
public static final Path TEST_WRITER_OUTPUT_DIR = new Path("gobblin-test/writer-output/");
public static final Path TEST_WRITER_FILE_PATH = new Path("writer/file/path/");
public static final Path TEST_DATA_PUBLISHER_FINAL_DIR = new Path("writer/final/dir/");
@Test
public void testGetWriterDir() {
State state = new State();
state.setProp(ConfigurationKeys.WRITER_STAGING_DIR, TEST_WRITER_STAGING_DIR);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, TEST_WRITER_OUTPUT_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH, TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getWriterStagingDir(state, 0, 0), new Path(TEST_WRITER_STAGING_DIR,
TEST_WRITER_FILE_PATH));
state.setProp(ConfigurationKeys.WRITER_STAGING_DIR + ".0", TEST_WRITER_STAGING_DIR);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR + ".0", TEST_WRITER_OUTPUT_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH + ".0", TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getWriterStagingDir(state, 2, 0), new Path(TEST_WRITER_STAGING_DIR,
TEST_WRITER_FILE_PATH));
state.setProp(ConfigurationKeys.WRITER_STAGING_DIR + ".1", TEST_WRITER_STAGING_DIR);
state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR + ".1", TEST_WRITER_OUTPUT_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH + ".1", TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getWriterStagingDir(state, 2, 1), new Path(TEST_WRITER_STAGING_DIR,
TEST_WRITER_FILE_PATH));
}
@Test
public void testGetDataPublisherFinalOutputDir() {
State state = new State();
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, TEST_DATA_PUBLISHER_FINAL_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH, TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getDataPublisherFinalDir(state, 0, 0), new Path(TEST_DATA_PUBLISHER_FINAL_DIR,
TEST_WRITER_FILE_PATH));
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".0", TEST_DATA_PUBLISHER_FINAL_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH + ".0", TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getDataPublisherFinalDir(state, 2, 0), new Path(TEST_DATA_PUBLISHER_FINAL_DIR,
TEST_WRITER_FILE_PATH));
state.setProp(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR + ".1", TEST_DATA_PUBLISHER_FINAL_DIR);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH + ".1", TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getDataPublisherFinalDir(state, 2, 1), new Path(TEST_DATA_PUBLISHER_FINAL_DIR,
TEST_WRITER_FILE_PATH));
}
@Test
public void testGetWriterFilePath() {
Extract extract = new Extract(TableType.SNAPSHOT_ONLY, "org.apache.gobblin.dbNamespace", "tableName");
WorkUnit state = WorkUnit.create(extract);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH, TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 0, 0), TEST_WRITER_FILE_PATH);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH + ".0", TEST_WRITER_FILE_PATH);
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 1, 1), TEST_WRITER_FILE_PATH);
state.removeProp(ConfigurationKeys.WRITER_FILE_PATH);
state.setProp(ConfigurationKeys.WRITER_FILE_PATH_TYPE, "tablename");
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 0, 0), new Path("tableName"));
state.setProp(ConfigurationKeys.WRITER_FILE_PATH_TYPE, "namespace_table");
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 0, 0),
new Path("org/apache/gobblin/dbNamespace/tableName"));
}
@Test
public void testGetDefaultWriterFilePath() {
String namespace = "gobblin.test";
String tableName = "test-table";
SourceState sourceState = new SourceState();
WorkUnit state = WorkUnit.create(new Extract(sourceState, TableType.APPEND_ONLY, namespace, tableName));
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 0, 0), new Path(state.getExtract().getOutputFilePath()));
Assert.assertEquals(WriterUtils.getWriterFilePath(state, 2, 0), new Path(state.getExtract().getOutputFilePath(),
ConfigurationKeys.DEFAULT_FORK_BRANCH_NAME + "0"));
}
@Test
public void testMkdirsPermissionsSet() throws IOException {
Path testRoot = new Path("/tmp");
Path testParent = new Path(testRoot, "mkdirs-1");
Path testChild = new Path(testParent, "mkdirs-2/mkdirs-3/mkdirs-4");
Config retryConfig = ConfigFactory.empty();
FileSystem fs = FileSystem.getLocal(new Configuration());
try {
fs.delete(testParent, true);
FsPermission all = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL);
WriterUtils.mkdirsWithRecursivePermissionWithRetry(fs, testParent, all, retryConfig);
Assert.assertTrue(fs.exists(testParent));
Assert.assertEquals(fs.getFileStatus(testParent).getPermission(), all);
FsPermission restricted = new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.EXECUTE);
WriterUtils.mkdirsWithRecursivePermissionWithRetry(fs, testChild, restricted, retryConfig);
Assert.assertTrue(fs.exists(testChild));
// created parent permission remains unchanged but uncreated parents and new dir set to restricted
Assert.assertEquals(fs.getFileStatus(testParent).getPermission(), all);
Assert.assertEquals(fs.getFileStatus(testChild.getParent().getParent()).getPermission(), restricted);
Assert.assertEquals(fs.getFileStatus(testChild.getParent()).getPermission(), restricted);
Assert.assertEquals(fs.getFileStatus(testChild).getPermission(), restricted);
} finally {
fs.delete(testParent, true);
}
}
@Test
public void testGetDefaultWriterFilePathWithWorkUnitState() {
String namespace = "gobblin.test";
String tableName = "test-table";
SourceState sourceState = new SourceState();
WorkUnit workUnit = WorkUnit.create(new Extract(sourceState, TableType.APPEND_ONLY, namespace, tableName));
WorkUnitState workUnitState = new WorkUnitState(workUnit);
Assert.assertEquals(WriterUtils.getWriterFilePath(workUnitState, 0, 0), new Path(workUnitState.getExtract()
.getOutputFilePath()));
Assert.assertEquals(WriterUtils.getWriterFilePath(workUnitState, 2, 0), new Path(workUnitState.getExtract()
.getOutputFilePath(), ConfigurationKeys.DEFAULT_FORK_BRANCH_NAME + "0"));
}
@Test
public void testGetCodecFactoryIgnoresCase() {
CodecFactory codecFactory = WriterUtils.getCodecFactory(Optional.of("SNAPPY"), Optional.<String>absent());
Assert.assertEquals(codecFactory.toString(), "snappy");
codecFactory = WriterUtils.getCodecFactory(Optional.of("snappy"), Optional.<String>absent());
Assert.assertEquals(codecFactory.toString(), "snappy");
}
}
| 4,019 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/PortUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.io.IOException;
public class PortUtilsTest {
@Test
public void testReplaceAbsolutePortToken() throws Exception {
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.specific(1025)).thenReturn(1025);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_1025}");
Assert.assertEquals(actual, "-Dvar1=1025");
}
@Test(expectedExceptions = RuntimeException.class)
public void testFailIfCannotReplaceAbsolutePortToken() throws Exception {
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.specific(1025)).thenThrow(new IOException());
PortUtils portUtils = new PortUtils(portLocator);
portUtils.replacePortTokens("-Dvar1=${PORT_1025}");
}
@Test
public void testReplaceUnboundMinimumPortToken() throws Exception {
int expectedPort = PortUtils.MINIMUM_PORT + 1;
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.specific(PortUtils.MINIMUM_PORT)).thenThrow(new IOException());
Mockito.when(portLocator.specific(expectedPort)).thenReturn(expectedPort);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_?1026}");
Assert.assertEquals(actual, "-Dvar1=1026");
}
@Test
public void testReplaceUnboundMaximumPortToken() throws Exception {
int expectedPort = PortUtils.MINIMUM_PORT + 1;
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.specific(PortUtils.MINIMUM_PORT)).thenThrow(new IOException());
Mockito.when(portLocator.specific(expectedPort)).thenReturn(expectedPort);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_1025?}");
Assert.assertEquals(actual, "-Dvar1=1026");
}
@Test
public void testReplaceRandomPortToken() throws Exception {
int expectedPort = PortUtils.MINIMUM_PORT + 1;
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.random()).thenReturn(1027);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_?}");
Assert.assertEquals(actual, "-Dvar1=1027");
}
@Test
public void testReplaceDuplicateTokensGetSamePort() throws Exception {
final int expectedPort = PortUtils.MINIMUM_PORT + 1;
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.random()).thenAnswer(new Answer<Integer>() {
public int callCount;
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
if (this.callCount++ == 0) {
return expectedPort;
}
return expectedPort + callCount++;
}
});
Mockito.when(portLocator.specific(expectedPort)).thenReturn(expectedPort);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_?} -Dvar2=${PORT_?}");
Assert.assertEquals(actual, "-Dvar1=1026 -Dvar2=1026");
}
@Test
public void testReplacePortTokensKeepsTrackOfAssignedPorts() throws Exception {
int expectedPort1 = PortUtils.MINIMUM_PORT + 1;
int expectedPort2 = PortUtils.MINIMUM_PORT + 2;
PortUtils.PortLocator portLocator = Mockito.mock(PortUtils.PortLocator.class);
Mockito.when(portLocator.specific(PortUtils.MINIMUM_PORT)).thenThrow(new IOException());
Mockito.when(portLocator.specific(expectedPort1)).thenReturn(expectedPort1);
Mockito.when(portLocator.specific(expectedPort2)).thenReturn(expectedPort2);
PortUtils portUtils = new PortUtils(portLocator);
String actual = portUtils.replacePortTokens("-Dvar1=${PORT_1026?} -Dvar2=${PORT_1025?}");
Assert.assertEquals(actual, "-Dvar1=1026 -Dvar2=1027");
}
}
| 4,020 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/SystemPropertiesWrapperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.annotations.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class SystemPropertiesWrapperTest {
final SystemPropertiesWrapper propertiesWrapper = new SystemPropertiesWrapper();
@Test
public void testGetJavaHome() {
final String home = this.propertiesWrapper.getJavaHome();
// It's hard to assert which java JRE home directory is used to launch this process.
// This test is designed to print out the actual value for debugging and demonstration
// purposes.
System.out.println(home);
assertThat(home).isNotEmpty();
}
@Test
public void testGetJavaClassPath() {
final String classPath = this.propertiesWrapper.getJavaClassPath();
// It's hard to assert what classpath is used to launch this process.
// This test is designed to print out the actual value for debugging and demonstration
// purposes.
System.out.println(classPath);
assertThat(classPath).isNotEmpty();
}
}
| 4,021 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/PropertiesUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
public class PropertiesUtilsTest {
@Test
public void testExtractPropertiesWithPrefix() {
Properties properties = new Properties();
properties.setProperty("k1.kk1", "v1");
properties.setProperty("k1.kk2", "v2");
properties.setProperty("k2.kk", "v3");
// First prefix
Properties extractedPropertiesK1 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k1"));
Assert.assertEquals(extractedPropertiesK1.getProperty("k1.kk1"), "v1");
Assert.assertEquals(extractedPropertiesK1.getProperty("k1.kk2"), "v2");
Assert.assertTrue(!extractedPropertiesK1.containsKey("k2.kk"));
// Second prefix
Properties extractedPropertiesK2 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k2"));
Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk2"));
Assert.assertEquals(extractedPropertiesK2.getProperty("k2.kk"), "v3");
// Missing prefix
Properties extractedPropertiesK3 = PropertiesUtils.extractPropertiesWithPrefix(properties, Optional.of("k3"));
Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK3.containsKey("k2.kk"));
}
@Test
public void testExtractPropertiesWithPrefixAfterRemovingPrefix() {
Properties properties = new Properties();
properties.setProperty("k1.kk1", "v1");
properties.setProperty("k1.kk2", "v2");
properties.setProperty("k2.kk", "v3");
// First prefix
Properties extractedPropertiesK1 = PropertiesUtils.extractPropertiesWithPrefixAfterRemovingPrefix(properties, "k1.");
Assert.assertEquals(extractedPropertiesK1.getProperty("kk1"), "v1");
Assert.assertEquals(extractedPropertiesK1.getProperty("kk2"), "v2");
Assert.assertTrue(!extractedPropertiesK1.containsKey("k2.kk"));
// Second prefix
Properties extractedPropertiesK2 = PropertiesUtils.extractPropertiesWithPrefixAfterRemovingPrefix(properties, "k2");
Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK2.containsKey("k1.kk2"));
Assert.assertEquals(extractedPropertiesK2.getProperty(".kk"), "v3");
// Missing prefix
Properties extractedPropertiesK3 = PropertiesUtils.extractPropertiesWithPrefixAfterRemovingPrefix(properties, "k3");
Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK3.containsKey("k1.kk1"));
Assert.assertTrue(!extractedPropertiesK3.containsKey("k2.kk"));
}
@Test
public void testGetStringList() {
Properties properties = new Properties();
properties.put("key", "1,2, 3");
// values as comma separated strings
Assert.assertEquals(PropertiesUtils.getPropAsList(properties, "key"), ImmutableList.of("1", "2", "3"));
Assert.assertEquals(PropertiesUtils.getPropAsList(properties, "key2", "default"), ImmutableList.of("default"));
}
@Test
public void testGetValuesAsList() {
Properties properties = new Properties();
properties.put("k1", "v1");
properties.put("k2", "v2");
properties.put("k3", "v2");
properties.put("K3", "v4");
Assert.assertEqualsNoOrder(PropertiesUtils.getValuesAsList(properties, Optional.of("k")).toArray(), new String[]{"v1", "v2", "v2"});
}
}
| 4,022 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/FileUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.testng.annotations.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class FileUtilsTest {
@Test
public void testSaveToFile()
throws IOException {
FileUtils utils = new FileUtils();
Path destPath = Paths.get("fileUtilTest.txt");
utils.saveToFile("foo", destPath);
assertThat(destPath).exists().isReadable().hasContent("foo\n");
Files.deleteIfExists(destPath);
}
@Test
public void testIsSubFile() throws IOException {
File parentPath = new File("/tmp/foo/bar");
File childPath = new File("/tmp/foo/../tar/file.txt");
assertThat(false).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
childPath = new File("/tmp/foo/tar/../bar/file.txt");
assertThat(true).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
childPath = new File("/tmp/foo/bar/car/file.txt");
assertThat(true).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
}
@Test
public void testIsSubPath() throws IOException {
org.apache.hadoop.fs.Path parentPath = new org.apache.hadoop.fs.Path("/tmp/foo/bar");
org.apache.hadoop.fs.Path childPath = new org.apache.hadoop.fs.Path("/tmp/foo/../tar/file.txt");
assertThat(false).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
childPath = new org.apache.hadoop.fs.Path("/tmp/foo/tar/../bar/file.txt");
assertThat(true).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
childPath = new org.apache.hadoop.fs.Path("/tmp/foo/bar/car/file.txt");
assertThat(true).isEqualTo(FileUtils.isSubPath(parentPath, childPath));
}
}
| 4,023 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/HeapDumpForTaskUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
@Test(groups = { "gobblin.util" })
public class HeapDumpForTaskUtilsTest {
private FileSystem fs;
private static final String TEST_DIR = "dumpScript";
private static final String SCRIPT_NAME = "dump.sh";
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.fs.mkdirs(new Path(TEST_DIR));
}
@Test
public void testGenerateDumpScript() throws IOException {
Path dumpScript = new Path(TEST_DIR, SCRIPT_NAME);
HeapDumpForTaskUtils.generateDumpScript(dumpScript, this.fs, "test.hprof", "chmod 777 ");
Assert.assertEquals(true, this.fs.exists(dumpScript));
Assert.assertEquals(true, this.fs.exists(new Path(dumpScript.getParent(), "dumps")));
Closer closer = Closer.create();
try {
BufferedReader scriptReader =
closer.register(new BufferedReader(new InputStreamReader(this.fs.open(dumpScript))));
Assert.assertEquals("#!/bin/sh", scriptReader.readLine());
Assert.assertEquals("if [ -n \"$HADOOP_PREFIX\" ]; then", scriptReader.readLine());
Assert.assertEquals(" ${HADOOP_PREFIX}/bin/hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof",
scriptReader.readLine());
Assert.assertEquals("else", scriptReader.readLine());
Assert.assertEquals(" ${HADOOP_HOME}/bin/hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof",
scriptReader.readLine());
Assert.assertEquals("fi", scriptReader.readLine());
} catch (Throwable t) {
closer.rethrow(t);
} finally {
closer.close();
}
}
@AfterClass
public void tearDown() throws IOException {
fs.delete(new Path(TEST_DIR), true);
fs.close();
}
}
| 4,024 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/PullFileLoaderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
public class PullFileLoaderTest {
private final Path basePath;
private final PullFileLoader loader;
public PullFileLoaderTest() throws Exception {
this.basePath = new Path(this.getClass().getClassLoader().getResource("pullFileLoaderTest").getFile());
this.loader = new PullFileLoader(this.basePath, FileSystem.getLocal(new Configuration()),
PullFileLoader.DEFAULT_JAVA_PROPS_PULL_FILE_EXTENSIONS, PullFileLoader.DEFAULT_HOCON_PULL_FILE_EXTENSIONS);
}
@Test
public void testSimpleJobLoading() throws Exception {
Path path;
Config pullFile;
path = new Path(this.basePath, "ajob.pull");
pullFile = loader.loadPullFile(path, ConfigFactory.empty(), false);
Assert.assertEquals(pullFile.getString("key2"), "aValue");
Assert.assertEquals(pullFile.getString("key10"), "aValue");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 3);
path = new Path(this.basePath, "dir1/job.pull");
pullFile = loader.loadPullFile(path, ConfigFactory.empty(), false);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 3);
path = new Path(this.basePath, "dir1/job.conf");
pullFile = loader.loadPullFile(path, ConfigFactory.empty(), false);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString("key10"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 4);
}
@Test
public void testJobLoadingWithSysProps() throws Exception {
Path path;
Config pullFile;
Properties sysProps = new Properties();
sysProps.put("key1", "sysProps1");
path = new Path(this.basePath, "ajob.pull");
pullFile = loader.loadPullFile(path, ConfigUtils.propertiesToConfig(sysProps), false);
Assert.assertEquals(pullFile.getString("key1"), "sysProps1");
Assert.assertEquals(pullFile.getString("key2"), "aValue");
Assert.assertEquals(pullFile.getString("key10"), "aValue");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 4);
path = new Path(this.basePath, "dir1/job.pull");
pullFile = loader.loadPullFile(path, ConfigUtils.propertiesToConfig(sysProps), false);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 3);
path = new Path(this.basePath, "dir1/job.conf");
pullFile = loader.loadPullFile(path, ConfigFactory.empty(), false);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString("key10"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 4);
}
@Test
public void testRecursiveJobLoading() throws Exception {
Path path;
Config pullFile;
Properties sysProps = new Properties();
sysProps.put("key1", "sysProps1");
Collection<Config> configs =
loader.loadPullFilesRecursively(this.basePath, ConfigUtils.propertiesToConfig(sysProps), false);
// Only 4 files should generate configs (ajob.pull, bjob.pull, dir1/job.pull, dir1/job.conf)
Assert.assertEquals(configs.size(), 4);
path = new Path(this.basePath, "ajob.pull");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "sysProps1");
Assert.assertEquals(pullFile.getString("key2"), "aValue");
Assert.assertEquals(pullFile.getString("key10"), "aValue");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 4);
path = new Path(this.basePath, "dir1/job.pull");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 3);
path = new Path(this.basePath, "dir1/job.conf");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString("key10"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 4);
}
/**
* Tests to verify job written first to the job catalog is picked up first.
* @throws Exception
*/
@Test void testJobLoadingOrder() throws Exception {
Properties sysProps = new Properties();
FileSystem fs = FileSystem.getLocal(new Configuration());
File tmpDir = Files.createTempDir();
tmpDir.deleteOnExit();
Path localBasePath = new Path(tmpDir.getAbsolutePath(), "PullFileLoaderTestDir");
fs.mkdirs(localBasePath);
for (int i=5; i>0; i--) {
String job = localBasePath.toString() + "/job" + i + ".conf";
PrintWriter writer = new PrintWriter(job, "UTF-8");
writer.println("key=job" + i + "_val");
writer.close();
Thread.sleep(1000);
}
List<Config> configs =
loader.loadPullFilesRecursively(localBasePath, ConfigUtils.propertiesToConfig(sysProps), false);
int i = 5;
for (Config config : configs) {
Assert.assertEquals(config.getString("key"), "job" + i + "_val");
i--;
}
}
@Test
public void testJobLoadingWithSysPropsAndGlobalProps() throws Exception {
Path path;
Config pullFile;
Properties sysProps = new Properties();
sysProps.put("key1", "sysProps1");
path = new Path(this.basePath, "ajob.pull");
pullFile = loader.loadPullFile(path, ConfigUtils.propertiesToConfig(sysProps), true);
Assert.assertEquals(pullFile.getString("key1"), "rootValue1");
Assert.assertEquals(pullFile.getString("key2"), "aValue");
Assert.assertEquals(pullFile.getString("key10"), "aValue");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 5);
path = new Path(this.basePath, "dir1/job.pull");
pullFile = loader.loadPullFile(path, ConfigUtils.propertiesToConfig(sysProps), true);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString("key2a"), "jobValue2");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString("key4"), "dir1Value4");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 6);
path = new Path(this.basePath, "dir1/job.conf");
pullFile = loader.loadPullFile(path, ConfigFactory.empty(), true);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key2a"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString("key4"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key10"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 7);
}
@Test
public void testRecursiveJobLoadingWithAndGlobalProps() throws Exception {
Path path;
Config pullFile;
Properties sysProps = new Properties();
sysProps.put("key1", "sysProps1");
Collection<Config> configs =
loader.loadPullFilesRecursively(this.basePath, ConfigUtils.propertiesToConfig(sysProps), true);
path = new Path(this.basePath, "ajob.pull");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "rootValue1");
Assert.assertEquals(pullFile.getString("key2"), "aValue");
Assert.assertEquals(pullFile.getString("key10"), "aValue");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 5);
path = new Path(this.basePath, "dir1/job.pull");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "jobValue2");
Assert.assertEquals(pullFile.getString("key2a"), "jobValue2");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString("key4"), "dir1Value4");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 6);
path = new Path(this.basePath, "dir1/job.conf");
pullFile = pullFileFromPath(configs, path);
Assert.assertEquals(pullFile.getString("key1"), "jobValue1,jobValue2,jobValue3");
Assert.assertEquals(pullFile.getString("key2"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key2a"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key3"), "rootValue3");
Assert.assertEquals(pullFile.getString("key4"), "dir1Value4");
Assert.assertEquals(pullFile.getString("key10"), "jobValue2");
Assert.assertEquals(pullFile.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY), path.toString());
Assert.assertEquals(pullFile.entrySet().size(), 7);
}
@Test
public void testJsonPropertyReuseJobLoading() throws Exception {
Path path;
Config pullFile;
path = new Path(this.basePath, "bjob.pull");
Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(PullFileLoader.PROPERTY_DELIMITER_PARSING_ENABLED_KEY, true)
.build());
pullFile = loader.loadPullFile(path, cfg, false);
Assert.assertEquals(pullFile.getString("json.property.key"), pullFile.getString("json.property.key1"));
}
private Config pullFileFromPath(Collection<Config> configs, Path path) throws IOException {
for (Config config : configs) {
if (config.getString(ConfigurationKeys.JOB_CONFIG_FILE_PATH_KEY).equals(path.toString())) {
return config;
}
}
throw new IOException("Not found.");
}
@Test
public void testExceptionWrapping() throws Exception {
Path path = new Path(this.basePath, "dir2/badjob.conf");
try {
Config pullFile = loader.loadPullFile(path, ConfigFactory.empty(), true);
Assert.fail("Should throw exception");
} catch (IOException ie) {
String message = ie.getMessage();
Assert.assertEquals(message, "Failed to parse config file " + path.toString() + " at lineNo:18");
}
}
}
| 4,025 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/AvroSchemaUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import com.google.common.collect.Lists;
import com.linkedin.avroutil1.compatibility.AvroCompatibilityHelper;
import java.io.IOException;
import java.util.List;
import org.apache.avro.Schema;
import org.testng.Assert;
import org.testng.annotations.Test;
public class AvroSchemaUtilsTest {
@Test
public void testGetValueAsInteger() throws IOException {
Schema schema = readSchemaFromJsonFile("props/schema_with_logical_field.json");
Schema.Field field = schema.getField("logicalFieldDecimal");
Schema fieldSchema = field.schema();
Assert.assertEquals(AvroSchemaUtils.getValueAsInteger(fieldSchema, "precision").intValue(), 4);
Assert.assertEquals(AvroSchemaUtils.getValueAsInteger(fieldSchema, "scale").intValue(), 2);
}
@Test
public void testCopySchemaProperties() throws IOException {
Schema fromSchema = Schema.createRecord("name", "", "namespace", false);
fromSchema.addProp("prop1", "val1");
fromSchema.addProp("prop2", "val2");
List<Schema.Field> fieldList = Lists.newArrayList();
Schema.Field field1 =
AvroCompatibilityHelper.createSchemaField("key", Schema.create(Schema.Type.LONG), "", 0L);
fieldList.add(field1);
Schema.Field field2 =
AvroCompatibilityHelper.createSchemaField("double", Schema.create(Schema.Type.DOUBLE), "", 0.0);
fieldList.add(field2);
fromSchema.setFields(Lists.newArrayList(fieldList));
Schema toSchema = readSchemaFromJsonFile("props/schema_without_props.json");
AvroSchemaUtils.copySchemaProperties(fromSchema, toSchema);
Assert.assertEquals(fromSchema.toString(), toSchema.toString());
for(Schema.Field field : toSchema.getFields()) {
Schema.Field oldField = fromSchema.getField(field.name());
Assert.assertEquals(field.toString(), oldField.toString());
}
Assert.assertTrue(fromSchema.getObjectProps().equals(toSchema.getObjectProps()));
}
@Test
public void testCopySchemaPropertiesWithAdditionalProps() throws IOException {
Schema fromSchema = readSchemaFromJsonFile("props/schema_with_props.json");
fromSchema.addProp("prop3", "val3");
fromSchema.addProp("prop4", "val4");
Schema toSchema = readSchemaFromJsonFile("props/schema_without_props.json");
AvroSchemaUtils.copySchemaProperties(fromSchema, toSchema);
Assert.assertEquals(fromSchema.toString(), toSchema.toString());
for(Schema.Field field : toSchema.getFields()) {
Schema.Field oldField = fromSchema.getField(field.name());
Assert.assertEquals(field.toString(), oldField.toString());
}
Assert.assertTrue(fromSchema.getObjectProps().equals(toSchema.getObjectProps()));
}
@Test
public void testCopyFieldProperties() throws IOException {
Schema fromSchema = Schema.createRecord("name", "", "namespace", false);
fromSchema.addProp("prop1", "val1");
fromSchema.addProp("prop2", "val2");
List<Schema.Field> fieldList = Lists.newArrayList();
Schema.Field field1 =
AvroCompatibilityHelper.createSchemaField("key", Schema.create(Schema.Type.LONG), "", 0L);
field1.addProp("primaryKey", "true");
fieldList.add(field1);
Schema.Field field2 =
AvroCompatibilityHelper.createSchemaField("double", Schema.create(Schema.Type.DOUBLE), "", 0.0);
fieldList.add(field2);
fromSchema.setFields(Lists.newArrayList(fieldList));
Schema toSchema = readSchemaFromJsonFile("props/schema_without_field_props.json");
AvroSchemaUtils.copyFieldProperties(fromSchema.getField("key"), toSchema.getField("key"));
Assert.assertEquals(fromSchema.toString(), toSchema.toString());
for(Schema.Field field : toSchema.getFields()) {
Schema.Field oldField = fromSchema.getField(field.name());
Assert.assertEquals(field.toString(), oldField.toString());
}
Assert.assertTrue(fromSchema.getObjectProps().equals(toSchema.getObjectProps()));
}
@Test
public void testCopyFieldPropertiesWithAdditionalProps() throws IOException {
Schema fromSchema = readSchemaFromJsonFile("props/schema_with_field_props.json");
Schema.Field keyField = fromSchema.getField("key");
keyField.addProp("key1", "val1");
Schema.Field doubleField = fromSchema.getField("double");
doubleField.addProp("key1", "val1");
doubleField.addProp("key2", "val2");
Schema toSchema = readSchemaFromJsonFile("props/schema_without_field_props.json");
Schema.Field toKeyField = toSchema.getField("key");
Schema.Field toDoubleField = toSchema.getField("double");
AvroSchemaUtils.copyFieldProperties(keyField, toKeyField);
AvroSchemaUtils.copyFieldProperties(doubleField, toDoubleField);
Assert.assertEquals(fromSchema.toString(), toSchema.toString());
for(Schema.Field field : toSchema.getFields()) {
Schema.Field oldField = fromSchema.getField(field.name());
Assert.assertEquals(field.toString(), oldField.toString());
}
Assert.assertTrue(fromSchema.getObjectProps().equals(toSchema.getObjectProps()));
}
private static Schema readSchemaFromJsonFile(String filename)
throws IOException {
return new Schema.Parser()
.parse(AvroSchemaUtilsTest.class.getClassLoader().getResourceAsStream(filename));
}
}
| 4,026 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/AvroFlattenerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.IOException;
import org.apache.avro.Schema;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.linkedin.avroutil1.compatibility.AvroCompatibilityHelper;
public class AvroFlattenerTest {
private static Schema readSchemaFromJsonFile(String filename)
throws IOException {
return new Schema.Parser()
.parse(AvroFlattenerTest.class.getClassLoader().getResourceAsStream("flattenAvro/" + filename));
}
/**
* Test flattening for Record within another Record
* Record R1 {
* fields: { Record R2 }
* }
*/
@Test
public void testRecordWithinRecord() throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for Record within Record within another Record
* Record R1 {
* fields:
* { Record R2
* fields:
* {
* Record R3
* }
* }
* }
*/
@Test
public void testRecordWithinRecordWithinRecord() throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinRecordWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinRecordWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for Record within an Option within another Record
* Record R1 {
* fields: { Union [ null, Record R2 ] }
* }
*/
@Test
public void testRecordWithinOptionWithinRecord () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinOptionWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinOptionWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for Record within an Union within another Record
* Record R1 {
* fields: { Union [ Record R2, null ] }
* }
*/
@Test
public void testRecordWithinUnionWithinRecord () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinUnionWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinUnionWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for Option within an Option within another Record
* Record R1 {
* fields: {
* Union [ null,
* Record 2 {
* fields: { Union [ null, Record 3] }
* }
* ]
* }
* }
*/
@Test
public void testOptionWithinOptionWithinRecord () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("optionWithinOptionWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("optionWithinOptionWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false).toString(), expectedSchema.toString());
}
/**
* Test flattening for a Record within Array within Array
* (no flattening should happen)
* Array A1 {
* [
* Array A2 {
* [
* Record R1
* ]
* }
* ]
* }
*/
@Test
public void testRecordWithinArrayWithinArray () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinArrayWithinArray_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinArrayWithinArray_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for an Array within Record within Array within Record
* (no flattening should happen)
* Record R1 {
* fields: { [
* Array A1 {
* [
* Record R2 {
* fields: { [
* Array A2
* ] }
* }
* ]
* }
* ] }
* }
*/
@Test
public void testArrayWithinRecordWithinArrayWithinRecord () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("arrayWithinRecordWithinArrayWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("arrayWithinRecordWithinArrayWithinRecord_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for a Record within Map within Map
* (no flattening should happen)
* Map M1 {
* values: {
* Map M2 {
* values: {
* Record R1
* }
* }
* }
* }
*/
@Test
public void testRecordWithinMapWithinMap () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("recordWithinMapWithinMap_original.json");
Schema expectedSchema = readSchemaFromJsonFile("recordWithinMapWithinMap_flattened.json");
Assert.assertEquals(new AvroFlattener().flatten(originalSchema, false), expectedSchema);
}
/**
* Test flattening for non-null default within an Option within another Record
* Record R1 {
* fields: {
* Union [ null,
* Record 2 {
* field: type
* default: type
* }
* ]
* }
* }
*/
@Test
public void testNonNullDefaultWithinOptionWithinRecord () throws IOException {
Schema originalSchema = readSchemaFromJsonFile("nonNullDefaultWithinOptionWithinRecord_original.json");
Schema expectedSchema = readSchemaFromJsonFile("nonNullDefaultWithinOptionWithinRecord_flattened.json");
Schema flattenedSchema = new AvroFlattener().flatten(originalSchema, false);
Assert.assertEquals(AvroCompatibilityHelper.getSpecificDefaultValue(
flattenedSchema.getField("parentFieldUnion__unionRecordMemberFieldUnion__superNestedFieldString1")).toString(),
"defaultString1");
Assert.assertEquals(flattenedSchema.toString(), expectedSchema.toString());
}
}
| 4,027 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/AvroUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import com.linkedin.avroutil1.compatibility.AvroCompatibilityHelper;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableInput;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.mapred.FsInput;
import org.apache.avro.util.internal.JacksonUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class AvroUtilsTest {
private static final String AVRO_DIR = "gobblin-utility/src/test/resources/avroDirParent/";
@Test
public void testSchemaCompatiability() {
Schema readerSchema = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GobblinTrackingEvent_GaaS2\",\"namespace\":\"gobblin.metrics\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\",\"doc\":\"Time at which event was created.\",\"default\":0},{\"name\":\"namespace\",\"type\":[{\"type\":\"string\",\"avro.java.string\":\"String\"},\"null\"],\"doc\":\"Namespace used for filtering of events.\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"doc\":\"Event name.\"},{\"name\":\"metadata\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"avro.java.string\":\"String\"},\"doc\":\"Event metadata.\",\"default\":{}}]}");
Schema writerSchema1 = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GobblinTrackingEvent\",\"namespace\":\"org.apache.gobblin.metrics\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\",\"doc\":\"Time at which event was created.\",\"default\":0},{\"name\":\"namespace\",\"type\":[\"string\",\"null\"],\"doc\":\"Namespace used for filtering of events.\"},{\"name\":\"name\",\"type\":\"string\",\"doc\":\"Event name.\"},{\"name\":\"metadata\",\"type\":{\"type\":\"map\",\"values\":\"string\"},\"doc\":\"Event metadata.\",\"default\":{}}]}");
Schema writerSchema2 = new Schema.Parser().parse("{\"type\":\"record\",\"name\":\"GobblinTrackingEvent\",\"namespace\":\"org.apache.gobblin.metrics\",\"fields\":[{\"name\":\"timestamp\",\"type\":\"long\",\"doc\":\"Time at which event was created.\",\"default\":0},{\"name\":\"namespace\",\"type\":[\"string\",\"null\"],\"doc\":\"Namespace used for filtering of events.\"},{\"name\":\"name2\",\"type\":\"string\",\"doc\":\"Event name.\"},{\"name\":\"metadata\",\"type\":{\"type\":\"map\",\"values\":\"string\"},\"doc\":\"Event metadata.\",\"default\":{}}]}");
Assert.assertTrue(AvroUtils.checkReaderWriterCompatibility(readerSchema, writerSchema1, true));
Assert.assertFalse(AvroUtils.checkReaderWriterCompatibility(readerSchema, writerSchema1, false));
Assert.assertFalse(AvroUtils.checkReaderWriterCompatibility(readerSchema, writerSchema2, true));
}
@Test
public void testGetDirectorySchema() throws IOException {
Configuration conf = new Configuration();
conf.set("fs.default.name", "file:///");
conf.set("mapred.job.tracker", "local");
Path mockAvroFilePath = new Path(AVRO_DIR);
Assert.assertNotNull(AvroUtils.getDirectorySchema(mockAvroFilePath, conf, true));
}
/**
* Test nullifying fields for non-union types, including array.
*/
@Test
public void testNullifyFieldForNonUnionSchemaMerge() {
Schema oldSchema1 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, " + "{\"name\": \"number\", \"type\": \"int\"}" + "]}");
Schema newSchema1 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema1 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, " + "{\"name\": \"number\", \"type\": [\"null\", \"int\"],\"default\":null}]}"
+ "]}");
Assert.assertEquals(expectedOutputSchema1, AvroUtils.nullifyFieldsForSchemaMerge(oldSchema1, newSchema1));
Schema oldSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": {\"type\": \"array\", \"items\": \"string\"}}" + "]}");
Schema newSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [\"null\", {\"type\": \"array\", \"items\": \"string\"}],\"default\":null}]}" + "]}");
Assert.assertEquals(expectedOutputSchema2, AvroUtils.nullifyFieldsForSchemaMerge(oldSchema2, newSchema2));
}
/**
* Test nullifying fields for union type. One case does not have "null", and the other case already has a default "null".
*/
@Test
public void testNullifyFieldForUnionSchemaMerge() {
Schema oldSchema1 =
new Schema.Parser()
.parse("{\"type\":\"record\", \"name\":\"test\", "
+ "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [{\"type\": \"string\"}, {\"type\": \"array\", \"items\": \"string\"}]}"
+ "]}");
Schema newSchema1 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema1 =
new Schema.Parser()
.parse("{\"type\":\"record\", \"name\":\"test\", "
+ "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [\"null\", {\"type\": \"string\"}, {\"type\": \"array\", \"items\": \"string\"}], \"default\": null}]}"
+ "]}");
Assert.assertEquals(expectedOutputSchema1.toString(), AvroUtils.nullifyFieldsForSchemaMerge(oldSchema1, newSchema1).toString());
Schema oldSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [\"null\", {\"type\": \"array\", \"items\": \"string\"}]}" + "]}");
Schema newSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema2 =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [\"null\", {\"type\": \"array\", \"items\": \"string\"}], \"default\": null}" + "]}");
Assert.assertEquals(expectedOutputSchema2.toString(), AvroUtils.nullifyFieldsForSchemaMerge(oldSchema2, newSchema2).toString());
}
/**
* Test nullifying fields when more than one field is removed in the new schema.
*/
@Test
public void testNullifyFieldForMultipleFieldsRemoved() {
Schema oldSchema =
new Schema.Parser()
.parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, " + "{\"name\": \"color\", \"type\": \"string\"}, "
+ "{\"name\": \"number\", \"type\": [{\"type\": \"string\"}, {\"type\": \"array\", \"items\": \"string\"}]}"
+ "]}");
Schema newSchema =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema =
new Schema.Parser()
.parse("{\"type\":\"record\", \"name\":\"test\", "
+ "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, "
+ "{\"name\": \"color\", \"type\": [\"null\", \"string\"], \"default\": null}, "
+ "{\"name\": \"number\", \"type\": [\"null\", {\"type\": \"string\"}, {\"type\": \"array\", \"items\": \"string\"}], \"default\": null}]}"
+ "]}");
Assert.assertEquals(expectedOutputSchema.toString(), AvroUtils.nullifyFieldsForSchemaMerge(oldSchema, newSchema).toString());
}
/**
* Test nullifying fields when one schema is not record type.
*/
@Test
public void testNullifyFieldWhenOldSchemaNotRecord() {
Schema oldSchema = new Schema.Parser().parse("{\"type\": \"array\", \"items\": \"string\"}");
Schema newSchema =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}" + "]}");
Schema expectedOutputSchema = newSchema;
Assert.assertEquals(expectedOutputSchema, AvroUtils.nullifyFieldsForSchemaMerge(oldSchema, newSchema));
}
@Test
public void testSwitchName() {
String originalName = "originalName";
String newName = "newName";
Schema schema = SchemaBuilder.record(originalName).fields().
requiredDouble("double").optionalFloat("float").endRecord();
Schema newSchema = AvroUtils.switchName(schema, newName);
Assert.assertEquals(newSchema.getName(), newName);
for(Schema.Field field : newSchema.getFields()) {
Assert.assertEquals(field, schema.getField(field.name()));
}
Assert.assertEquals(newName, AvroUtils.switchName(schema, newName).getName());
Assert.assertEquals(schema,
AvroUtils.switchName(AvroUtils.switchName(schema, newName), schema.getName()));
}
@Test
public void testSwitchNamespace() {
String originalNamespace = "originalNamespace";
String originalName = "originalName";
String newNamespace = "newNamespace";
Schema schema = Schema.createRecord(originalName, "", originalNamespace, false);
schema.addProp("prop1", "val1");
schema.addProp("prop2", "val2");
List<Schema.Field> fieldList = Lists.newArrayList();
Schema.Field field1 =
AvroCompatibilityHelper.createSchemaField("key", Schema.create(Schema.Type.LONG), "", 0L);
field1.addProp("primaryKey", "true");
fieldList.add(field1);
Schema.Field field2 =
AvroCompatibilityHelper.createSchemaField("double", Schema.create(Schema.Type.DOUBLE), "", 0.0);
fieldList.add(field2);
schema.setFields(Lists.newArrayList(fieldList));
Map<String, String> map = Maps.newHashMap();
map.put(originalNamespace, newNamespace);
Schema newSchema = AvroUtils.switchNamespace(schema, map);
Assert.assertEquals(newSchema.getNamespace(), newNamespace);
Assert.assertEquals(newSchema.getName(), originalName);
System.out.println("newSchema: " + newSchema);
for(Schema.Field field : newSchema.getFields()) {
Schema.Field oldField = schema.getField(field.name());
System.out.println("OldField: " + oldField);
System.out.println("Field: " + field);
Boolean bool = oldField.equals(field);
System.out.println("Equal: " + bool);
Assert.assertEquals(field.toString(), oldField.toString());
}
Assert.assertTrue(schema.getObjectProps().equals(newSchema.getObjectProps()));
}
@Test public void testSerializeAsPath() throws Exception {
Schema schema =
new Schema.Parser().parse("{\"type\":\"record\", \"name\":\"test\", " + "\"fields\":["
+ "{\"name\": \"name\", \"type\": \"string\"}, " + "{\"name\": \"title\", \"type\": \"string\"}" + "]}");
GenericRecord partition = new GenericData.Record(schema);
partition.put("name", "a/b:c\\d e");
partition.put("title", "title");
Assert.assertEquals(AvroUtils.serializeAsPath(partition, true, true), new Path("name=a_b_c_d_e/title=title"));
Assert.assertEquals(AvroUtils.serializeAsPath(partition, false, true), new Path("a_b_c_d_e/title"));
Assert.assertEquals(AvroUtils.serializeAsPath(partition, false, false), new Path("a/b_c_d_e/title"));
}
@Test
public void testStringEscaping() {
String invalidString = "foo;foo'bar";
String expectedString = "foo\\;foo\\'bar";
String actualString = AvroUtils.sanitizeSchemaString(invalidString);
Assert.assertEquals(actualString, expectedString);
// Verify that there's only one slash being added.
Assert.assertEquals(actualString.length(), invalidString.length() + 2);
// An instance of invalid string that contains a slash followed by a quote, both of which should be escaped.
String invalidStringWithSlash = "abc\\\"";
// Should have a slash before the actual slash, and a slash before the actual quote.
actualString = AvroUtils.sanitizeSchemaString(invalidStringWithSlash);
// Meaning for each slash:
// first two: escape in java and the actual escape the output string
// second pair: escape in java and the actual slash
// third pair: escape for the actual quote
// last pair: java escape slash with the actual quote.
expectedString = "abc\\\\\\\"";
Assert.assertEquals(actualString, expectedString);
Assert.assertEquals(actualString.length(), invalidStringWithSlash.length() + 2);
String stringWithBackslash = "\\\\d+";
actualString = AvroUtils.sanitizeSchemaString(stringWithBackslash);
Assert.assertEquals(actualString, "\\\\\\\\d+");
Assert.assertEquals(actualString.length(), stringWithBackslash.length() + 2);
}
public static List<GenericRecord> getRecordFromFile(String path)
throws IOException {
Configuration config = new Configuration();
SeekableInput input = new FsInput(new Path(path), config);
DatumReader<GenericRecord> reader1 = new GenericDatumReader<>();
FileReader<GenericRecord> fileReader = DataFileReader.openReader(input, reader1);
List<GenericRecord> records = new ArrayList<>();
for (GenericRecord datum : fileReader) {
records.add(datum);
}
fileReader.close();
return records;
}
/**
* This is a test to validate support of maps in {@link org.apache.gobblin.util.AvroUtils#getFieldValue(GenericRecord, String)}
* and {@link org.apache.gobblin.util.AvroUtils#getFieldSchema(Schema, String)}
* @throws IOException
*/
@Test
public void testGetObjectFromMap()
throws IOException {
final String TEST_FIELD_LOCATION = "Map.stringKey.Field";
String avroFilePath = this.AVRO_DIR + "avroDir/avroUtilsTestFile.avro";
GenericRecord record = getRecordFromFile(avroFilePath).get(0);
Assert.assertEquals(AvroUtils.getFieldValue(record, TEST_FIELD_LOCATION).get().toString(), "stringValue2");
Assert.assertEquals(AvroUtils.getFieldSchema(record.getSchema(), TEST_FIELD_LOCATION).get().getType(),
Schema.Type.STRING);
}
/**
* In case of complex data types in union {@link AvroUtils#getFieldSchema(Schema, String)} should throw {@link AvroRuntimeException}
* @throws IOException
*/
@Test(expectedExceptions = AvroRuntimeException.class)
public void testComplexTypesInUnionNotSupported()
throws IOException {
final String TEST_LOCATION = "TestUnionObject.RecordInUnion";
String avroFilePath = this.AVRO_DIR + "avroDir/avroUtilsTestFile.avro";
GenericRecord record = getRecordFromFile(avroFilePath).get(0);
AvroUtils.getFieldSchema(record.getSchema(), TEST_LOCATION);
}
@Test
public void testUnionWithNull() {
Schema nestedRecord = SchemaBuilder.record("nested").fields().requiredDouble("double")
.requiredString("string").endRecord();
Schema union = SchemaBuilder.unionOf().nullType().and().type(nestedRecord).endUnion();
Schema schema = SchemaBuilder.record("record").fields().name("union").type(union).noDefault().endRecord();
Schema doubleSchema = AvroUtils.getFieldSchema(schema, "union.double").get();
Assert.assertEquals(doubleSchema.getType(), Schema.Type.DOUBLE);
GenericRecord nested = new GenericData.Record(nestedRecord);
nested.put("double", 10);
nested.put("string", "testString");
GenericRecord record = new GenericData.Record(schema);
record.put("union", nested);
String stringValue = AvroUtils.getFieldValue(record, "union.string").get().toString();
Assert.assertEquals(stringValue, "testString");
}
@Test
public void testDecorateSchemaWithSingleField() {
Schema inputRecord = SchemaBuilder.record("test").fields().requiredInt("numeric1")
.requiredString("string1").endRecord();
Schema fieldSchema = SchemaBuilder.builder().intType();
Schema.Field field = new Schema.Field("newField", fieldSchema, "",null);
Schema outputRecord = AvroUtils.decorateRecordSchema(inputRecord, Collections.singletonList(field));
checkFieldsMatch(inputRecord, outputRecord);
Assert.assertNotNull(outputRecord.getField("newField"));
Assert.assertEquals(outputRecord.getField("newField").schema(), fieldSchema);
}
private void checkFieldsMatch(Schema inputRecord, Schema outputRecord) {
inputRecord.getFields().forEach(f -> {
Schema.Field outField = outputRecord.getField(f.name());
Assert.assertEquals(f, outField);
});
}
@Test
public void testDecorateSchemaWithStringProperties() {
Schema inputRecord = SchemaBuilder.record("test").fields()
.name("integer1")
.prop("innerProp", "innerVal")
.type().intBuilder().endInt().noDefault()
.requiredString("string1")
.endRecord();
inputRecord.addProp("topLevelProp", "topLevelVal");
Schema.Field additionalField = getTestInnerRecordField();
Schema outputSchema = AvroUtils.decorateRecordSchema(inputRecord, Collections.singletonList(additionalField));
checkFieldsMatch(inputRecord, outputSchema);
Assert.assertEquals(outputSchema.getProp("topLevelProp"), "topLevelVal");
Assert.assertEquals(outputSchema.getField("integer1").getProp("innerProp"), "innerVal");
}
@Test
public void testDecorateSchemaWithObjectProperties() throws IOException {
String customPropertyString = "{\"custom\": {\"prop1\": \"val1\"}}";
JsonNode customPropertyValue = new ObjectMapper().readTree(customPropertyString);
Schema inputRecord = SchemaBuilder.record("test").fields()
.name("integer1")
.prop("innerProp", "innerVal")
.type().intBuilder().endInt().noDefault()
.requiredString("string1")
.endRecord();
inputRecord.addProp("topLevelProp", customPropertyValue);
Schema.Field additionalField = getTestInnerRecordField();
Schema outputSchema = AvroUtils.decorateRecordSchema(inputRecord, Collections.singletonList(additionalField));
checkFieldsMatch(inputRecord, outputSchema);
Assert.assertEquals(outputSchema.getProp("topLevelProp"), inputRecord.getProp("topLevelProp"));
Assert.assertEquals(outputSchema.getField("integer1").getProp("innerProp"), "innerVal");
}
private Schema.Field getTestInnerRecordField() {
Schema fieldSchema = SchemaBuilder.record("innerRecord")
.fields().requiredInt("innerInt").requiredString("innerString")
.endRecord();
Schema.Field field = new Schema.Field("innerRecord", fieldSchema, "",null);
return field;
}
@Test
public void testDecorateSchemaWithSingleRecord() {
Schema inputRecord = SchemaBuilder.record("test").fields().requiredInt("numeric1")
.requiredString("string1").endRecord();
Schema fieldSchema = SchemaBuilder.record("innerRecord")
.fields().requiredInt("innerInt").requiredString("innerString")
.endRecord();
Schema.Field field = new Schema.Field("innerRecord", fieldSchema, "",null);
Schema outputRecord = AvroUtils.decorateRecordSchema(inputRecord, Collections.singletonList(field));
checkFieldsMatch(inputRecord, outputRecord);
Assert.assertNotNull(outputRecord.getField("innerRecord"));
Assert.assertEquals(outputRecord.getField("innerRecord").schema(), fieldSchema);
}
@Test
public void testDecorateRecordWithPrimitiveField() {
Schema inputRecordSchema = SchemaBuilder.record("test").fields()
.name("integer1")
.prop("innerProp", "innerVal")
.type().intBuilder().endInt().noDefault()
.requiredString("string1")
.endRecord();
GenericRecord inputRecord = new GenericData.Record(inputRecordSchema);
inputRecord.put("integer1", 10);
inputRecord.put("string1", "hello");
Schema outputRecordSchema = AvroUtils.decorateRecordSchema(inputRecordSchema, Collections.singletonList(new Schema.Field("newField", SchemaBuilder.builder().intType(), "test field", null)));
Map<String, Object> newFields = new HashMap<>();
newFields.put("newField", 5);
GenericRecord outputRecord = AvroUtils.decorateRecord(inputRecord, newFields, outputRecordSchema);
Assert.assertEquals(outputRecord.get("newField"), 5);
Assert.assertEquals(outputRecord.get("integer1"), 10);
Assert.assertEquals(outputRecord.get("string1"), "hello");
}
@Test
public void testDecorateRecordWithNestedField() throws IOException {
Schema inputRecordSchema = SchemaBuilder.record("test").fields()
.name("integer1")
.prop("innerProp", "innerVal")
.type().intBuilder().endInt().noDefault()
.requiredString("string1")
.endRecord();
GenericRecord inputRecord = new GenericData.Record(inputRecordSchema);
inputRecord.put("integer1", 10);
inputRecord.put("string1", "hello");
Schema nestedFieldSchema = SchemaBuilder.builder().record("metadata")
.fields()
.requiredString("source")
.requiredLong("timestamp")
.endRecord();
Schema.Field nestedField = new Schema.Field("metadata", nestedFieldSchema, "I am a nested field", null);
Schema outputRecordSchema = AvroUtils.decorateRecordSchema(inputRecordSchema, Collections.singletonList(nestedField));
Map<String, Object> newFields = new HashMap<>();
GenericData.Record metadataRecord = new GenericData.Record(nestedFieldSchema);
metadataRecord.put("source", "oracle");
metadataRecord.put("timestamp", 1234L);
newFields.put("metadata", metadataRecord);
GenericRecord outputRecord = AvroUtils.decorateRecord(inputRecord, newFields, outputRecordSchema);
Assert.assertEquals(outputRecord.get("integer1"), 10);
Assert.assertEquals(outputRecord.get("string1"), "hello");
Assert.assertEquals(outputRecord.get("metadata"), metadataRecord);
// Test that serializing and deserializing this record works.
GenericDatumWriter writer = new GenericDatumWriter(outputRecordSchema);
ByteArrayOutputStream baos = new ByteArrayOutputStream(1000);
Encoder binaryEncoder = EncoderFactory.get().binaryEncoder(baos, null);
writer.write(outputRecord, binaryEncoder);
binaryEncoder.flush();
baos.close();
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
Decoder binaryDecoder = DecoderFactory.get().binaryDecoder(bais, null);
GenericDatumReader reader = new GenericDatumReader(outputRecordSchema);
GenericRecord deserialized = (GenericRecord) reader.read(null, binaryDecoder);
Assert.assertEquals(deserialized.get("integer1"), 10);
Assert.assertEquals(deserialized.get("string1").toString(), "hello"); //extra toString: avro returns Utf8
Assert.assertEquals(deserialized.get("metadata"), metadataRecord);
}
@Test
public void overrideNameAndNamespaceTest() throws IOException{
String inputName = "input_name";
String inputNamespace = "input_namespace";
String outputName = "output_name";
String outputNamespace = "output_namespace";
Schema inputRecordSchema = SchemaBuilder.record(inputName).namespace(inputNamespace).fields()
.name("integer1")
.type().intBuilder().endInt().noDefault()
.endRecord();
GenericRecord inputRecord = new GenericData.Record(inputRecordSchema);
inputRecord.put("integer1", 10);
GenericRecord outputRecord = AvroUtils.overrideNameAndNamespace(inputRecord, outputName, Optional.of(Collections.EMPTY_MAP));
Assert.assertEquals(outputRecord.getSchema().getName(), outputName);
Assert.assertEquals(outputRecord.getSchema().getNamespace(), inputNamespace);
Assert.assertEquals(outputRecord.get("integer1"), 10);
Map<String,String> namespaceOverrideMap = new HashMap<>();
namespaceOverrideMap.put(inputNamespace,outputNamespace);
outputRecord = AvroUtils.overrideNameAndNamespace(inputRecord, outputName, Optional.of(namespaceOverrideMap));
Assert.assertEquals(outputRecord.getSchema().getName(), outputName);
Assert.assertEquals(outputRecord.getSchema().getNamespace(), outputNamespace);
Assert.assertEquals(outputRecord.get("integer1"), 10);
}
@Test
public void overrideSchemaNameAndNamespaceTest() {
String inputName = "input_name";
String inputNamespace = "input_namespace";
String outputName = "output_name";
String outputNamespace = "output_namespace";
Schema inputSchema = SchemaBuilder.record(inputName).namespace(inputNamespace).fields()
.name("integer1")
.type().intBuilder().endInt().noDefault()
.endRecord();
Map<String,String> namespaceOverrideMap = new HashMap<>();
namespaceOverrideMap.put(inputNamespace, outputNamespace);
Schema newSchema = AvroUtils.overrideNameAndNamespace(inputSchema, outputName, Optional.of(namespaceOverrideMap));
Assert.assertEquals(newSchema.getName(), outputName);
Assert.assertEquals(newSchema.getNamespace(), outputNamespace);
}
@Test
public void testisSchemaRecursive()
throws IOException {
for (String scenario : new String[]{"norecursion", "simple", "union", "multiple", "nested", "array", "map"}) {
System.out.println("Processing scenario for " + scenario);
Schema inputSchema = new Schema.Parser()
.parse(getClass().getClassLoader().getResourceAsStream("recursive_schemas/recursive_" + scenario + ".avsc"));
if (scenario.equals("norecursion")) {
Assert.assertFalse(AvroUtils.isSchemaRecursive(inputSchema, Optional.of(log)),
"Schema for scenario " + scenario + " should not be recursive");
} else {
Assert.assertTrue(AvroUtils.isSchemaRecursive(inputSchema, Optional.of(log)),
"Schema for scenario " + scenario + " should be recursive");
}
}
}
@Test
public void testDropRecursiveSchema()
throws IOException {
for (String scenario : new String[]{"norecursion", "simple", "union", "multiple", "nested", "array", "map"}) {
System.out.println("Processing scenario for " + scenario);
Schema inputSchema = new Schema.Parser().parse(getClass().getClassLoader()
.getResourceAsStream("recursive_schemas/recursive_" + scenario + ".avsc"));
Schema solutionSchema = new Schema.Parser().parse(getClass().getClassLoader()
.getResourceAsStream("recursive_schemas/recursive_" + scenario + "_solution.avsc"));
// get the answer from the input schema (test author needs to provide this)
// Avro 1.9 compatible change - replaced deprecated public api getJsonProps with getObjectProps
// Use internal JacksonUtils to convert object to the corresponding JsonNode (ArrayNode)
ArrayNode foo = (ArrayNode) JacksonUtils.toJsonNode(inputSchema.getObjectProp(
"recursive_fields"));
HashSet<String> answers = new HashSet<>();
for (JsonNode fieldsWithRecursion: foo) {
answers.add(fieldsWithRecursion.asText());
}
Pair<Schema, List<AvroUtils.SchemaEntry>> results = AvroUtils.dropRecursiveFields(inputSchema);
List<AvroUtils.SchemaEntry> fieldsWithRecursion = results.getSecond();
Schema transformedSchema = results.getFirst();
// Prove that fields with recursion are no longer present
for (String answer: answers) {
Assert.assertFalse(AvroUtils.getField(transformedSchema, answer).isPresent());
}
// Additionally compare schema with solution schema
Assert.assertEquals(solutionSchema, transformedSchema,"Transformed schema differs from solution schema for scenario " + scenario);
Set<String> recursiveFieldNames = fieldsWithRecursion.stream().map(se -> se.fieldName).collect(Collectors.toSet());
Assert.assertEquals(recursiveFieldNames, answers,
"Found recursive fields differ from answers listed in the schema for scenario " + scenario);
}
}
}
| 4,028 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/RatedControlledFileSystemTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.IOException;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.Meter;
import com.google.common.math.DoubleMath;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.util.limiter.Limiter;
import org.apache.gobblin.util.limiter.RateBasedLimiter;
/**
* Unit tests for {@link RatedControlledFileSystem}.
*/
@Slf4j
@Test(groups = { "gobblin.util" })
public class RatedControlledFileSystemTest {
private static final Random RANDOM = new Random();
private RateControlledFileSystem rateControlledFs;
private class TestRateControlledFileSystem extends RateControlledFileSystem {
private final Limiter limiter;
public TestRateControlledFileSystem(FileSystem fs, long limitPerSecond, Limiter limiter) {
super(fs, limitPerSecond);
this.limiter = limiter;
}
@Override
protected Limiter getRateLimiter() {
return this.limiter;
}
}
@BeforeClass
public void setUp() throws IOException, ExecutionException {
Limiter limiter = new RateBasedLimiter(20);
this.rateControlledFs = new TestRateControlledFileSystem(FileSystem.getLocal(new Configuration()), 20, limiter);
this.rateControlledFs.startRateControl();
}
@Test
public void testFsOperation() throws IOException {
Meter meter = new Meter();
Path fakePath = new Path("fakePath");
for (int i = 0; i < 500; i++) {
Assert.assertFalse(this.rateControlledFs.exists(fakePath));
meter.mark();
}
// Assert a fuzzy equal with 5% of tolerance
Assert.assertTrue(DoubleMath.fuzzyEquals(meter.getMeanRate(), 20d, 20d * 0.10));
}
@AfterClass
public void tearDown() throws IOException {
this.rateControlledFs.close();
}
}
| 4,029 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/TestImmutableProperties.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
/**
* Test {@link ImmutableProperties}.
*/
public class TestImmutableProperties {
ImmutableProperties props;
@BeforeTest
public void setUp() {
Properties originalProps = new Properties();
originalProps.setProperty("a", "1");
originalProps.setProperty("b", "2");
originalProps.setProperty("c", "3");
props = new ImmutableProperties(originalProps);
}
@Test
public void testGetMethods() {
Assert.assertEquals(props.getProperty("a"), "1");
Assert.assertEquals(props.get("b"), "2");
Assert.assertEquals(props.getProperty("c", "4"), "3");
Assert.assertEquals(props.getProperty("d", "default"), "default");
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testSetMethods() {
props.setProperty("a", "2");
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testPutMethods() {
props.put("b", "3");
}
@Test(expectedExceptions = UnsupportedOperationException.class)
public void testRemoveMethods() {
props.remove("c");
}
}
| 4,030 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/SerializationUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link SerializationUtils}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util" })
public class SerializationUtilsTest {
private FileSystem fs;
private Path outputPath;
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.outputPath = new Path(SerializationUtilsTest.class.getSimpleName());
}
@Test
public void testSerializeState() throws IOException {
WorkUnit workUnit1 = WorkUnit.createEmpty();
workUnit1.setProp("foo", "bar");
workUnit1.setProp("a", 10);
SerializationUtils.serializeState(this.fs, new Path(this.outputPath, "wu1"), workUnit1);
WorkUnit workUnit2 = WorkUnit.createEmpty();
workUnit2.setProp("foo", "baz");
workUnit2.setProp("b", 20);
SerializationUtils.serializeState(this.fs, new Path(this.outputPath, "wu2"), workUnit2);
}
@Test(dependsOnMethods = "testSerializeState")
public void testDeserializeState() throws IOException {
WorkUnit workUnit1 = WorkUnit.createEmpty();
WorkUnit workUnit2 = WorkUnit.createEmpty();
SerializationUtils.deserializeState(this.fs, new Path(this.outputPath, "wu1"), workUnit1);
SerializationUtils.deserializeState(this.fs, new Path(this.outputPath, "wu2"), workUnit2);
Assert.assertEquals(workUnit1.getPropertyNames().size(), 2);
Assert.assertEquals(workUnit1.getProp("foo"), "bar");
Assert.assertEquals(workUnit1.getPropAsInt("a"), 10);
Assert.assertEquals(workUnit2.getPropertyNames().size(), 2);
Assert.assertEquals(workUnit2.getProp("foo"), "baz");
Assert.assertEquals(workUnit2.getPropAsInt("b"), 20);
}
@AfterClass
public void tearDown() throws IOException {
if (this.fs != null && this.outputPath != null) {
this.fs.delete(this.outputPath, true);
}
}
}
| 4,031 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/HadoopUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.IOException;
import java.net.URI;
import java.nio.file.AccessDeniedException;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.TrashPolicy;
import org.apache.hadoop.fs.permission.FsPermission;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.State;
@Test(groups = { "gobblin.util" })
public class HadoopUtilsTest {
@Test
public void fsShortSerializationTest() {
State state = new State();
short mode = 420;
FsPermission perms = new FsPermission(mode);
HadoopUtils.serializeWriterFilePermissions(state, 0, 0, perms);
FsPermission deserializedPerms = HadoopUtils.deserializeWriterFilePermissions(state, 0, 0);
Assert.assertEquals(mode, deserializedPerms.toShort());
}
@Test
public void fsOctalSerializationTest() {
State state = new State();
String mode = "0755";
HadoopUtils.setWriterFileOctalPermissions(state, 0, 0, mode);
FsPermission deserializedPerms = HadoopUtils.deserializeWriterFilePermissions(state, 0, 0);
Assert.assertEquals(Integer.parseInt(mode, 8), deserializedPerms.toShort());
}
@Test
public void testRenameRecursively() throws Exception {
final Path hadoopUtilsTestDir = new Path(Files.createTempDir().getAbsolutePath(), "HadoopUtilsTestDir");
FileSystem fs = FileSystem.getLocal(new Configuration());
try {
fs.mkdirs(hadoopUtilsTestDir);
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRename/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/e"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/t1.txt"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/e/t2.txt"));
HadoopUtils.renameRecursively(fs, new Path(hadoopUtilsTestDir, "testRenameStaging"), new Path(hadoopUtilsTestDir, "testRename"));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testRename/a/b/c/t1.txt")));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testRename/a/b/c/e/t2.txt")));
} finally {
fs.delete(hadoopUtilsTestDir, true);
}
}
@Test
public void testRenameRecursivelyWithAccessDeniedOnExistenceCheck() throws Exception {
final Path hadoopUtilsTestDir = new Path(Files.createTempDir().getAbsolutePath(), "HadoopUtilsTestDir");
FileSystem fs = Mockito.spy(FileSystem.getLocal(new Configuration()));
Path targetDir = new Path(hadoopUtilsTestDir, "testRename");
// For testing that the rename works when the target
Mockito.doThrow(new AccessDeniedException("Test")).when(fs).exists(targetDir);
try {
fs.mkdirs(hadoopUtilsTestDir);
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRename/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/e"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/t1.txt"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging/a/b/c/e/t2.txt"));
HadoopUtils.renameRecursively(fs, new Path(hadoopUtilsTestDir, "testRenameStaging"), targetDir);
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testRename/a/b/c/t1.txt")));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testRename/a/b/c/e/t2.txt")));
} finally {
fs.delete(hadoopUtilsTestDir, true);
}
}
@Test(groups = { "performance" })
public void testRenamePerformance() throws Exception {
FileSystem fs = Mockito.mock(FileSystem.class);
Path sourcePath = new Path("/source");
Path s1 = new Path(sourcePath, "d1");
FileStatus[] sourceStatuses = new FileStatus[10000];
FileStatus[] targetStatuses = new FileStatus[1000];
for (int i = 0; i < sourceStatuses.length; i++) {
sourceStatuses[i] = getFileStatus(new Path(s1, "path" + i), false);
}
for (int i = 0; i < targetStatuses.length; i++) {
targetStatuses[i] = getFileStatus(new Path(s1, "path" + i), false);
}
Mockito.when(fs.getUri()).thenReturn(new URI("file:///"));
Mockito.when(fs.getFileStatus(sourcePath)).thenAnswer(getDelayedAnswer(getFileStatus(sourcePath, true)));
Mockito.when(fs.exists(sourcePath)).thenAnswer(getDelayedAnswer(true));
Mockito.when(fs.listStatus(sourcePath)).thenAnswer(getDelayedAnswer(new FileStatus[]{getFileStatus(s1, true)}));
Mockito.when(fs.exists(s1)).thenAnswer(getDelayedAnswer(true));
Mockito.when(fs.listStatus(s1)).thenAnswer(getDelayedAnswer(sourceStatuses));
Path target = new Path("/target");
Path s1Target = new Path(target, "d1");
Mockito.when(fs.exists(target)).thenAnswer(getDelayedAnswer(true));
Mockito.when(fs.exists(s1Target)).thenAnswer(getDelayedAnswer(true));
Mockito.when(fs.mkdirs(Mockito.any(Path.class))).thenAnswer(getDelayedAnswer(true));
Mockito.when(fs.rename(Mockito.any(Path.class), Mockito.any(Path.class))).thenAnswer(getDelayedAnswer(true));
HadoopUtils.renameRecursively(fs, sourcePath, target);
}
private <T> Answer<T> getDelayedAnswer(final T result) throws Exception {
return new Answer<T>() {
@Override
public T answer(InvocationOnMock invocation)
throws Throwable {
Thread.sleep(50);
return result;
}
};
}
private FileStatus getFileStatus(Path path, boolean dir) {
return new FileStatus(1, dir, 1, 1, 1, path);
}
@Test
public void testSafeRenameRecursively() throws Exception {
final Logger log = LoggerFactory.getLogger("HadoopUtilsTest.testSafeRenameRecursively");
final Path hadoopUtilsTestDir = new Path(Files.createTempDir().getAbsolutePath(), "HadoopUtilsTestDir");
final FileSystem fs = FileSystem.getLocal(new Configuration());
try {
// do many iterations to catch rename race conditions
for (int i = 0; i < 100; i++) {
fs.mkdirs(hadoopUtilsTestDir);
fs.mkdirs(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging1/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging1/a/b/c/e"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging1/a/b/c/t1.txt"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging1/a/b/c/e/t2.txt"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging2/a/b/c"));
fs.mkdirs(new Path(hadoopUtilsTestDir, "testRenameStaging2/a/b/c/e"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging2/a/b/c/t3.txt"));
fs.create(new Path(hadoopUtilsTestDir, "testRenameStaging2/a/b/c/e/t4.txt"));
ExecutorService executorService = Executors.newFixedThreadPool(2);
final Throwable[] runnableErrors = {null, null};
Future<?> renameFuture = executorService.submit(new Runnable() {
@Override
public void run() {
try {
HadoopUtils.renameRecursively(fs, new Path(hadoopUtilsTestDir, "testRenameStaging1"), new Path(
hadoopUtilsTestDir, "testSafeRename"));
} catch (Throwable e) {
log.error("Rename error: " + e, e);
runnableErrors[0] = e;
}
}
});
Future<?> safeRenameFuture = executorService.submit(new Runnable() {
@Override
public void run() {
try {
HadoopUtils.safeRenameRecursively(fs, new Path(hadoopUtilsTestDir, "testRenameStaging2"), new Path(
hadoopUtilsTestDir, "testSafeRename"));
} catch (Throwable e) {
log.error("Safe rename error: " + e, e);
runnableErrors[1] = e;
}
}
});
// Wait for the executions to complete
renameFuture.get(10, TimeUnit.SECONDS);
safeRenameFuture.get(10, TimeUnit.SECONDS);
executorService.shutdownNow();
Assert.assertNull(runnableErrors[0], "Runnable 0 error: " + runnableErrors[0]);
Assert.assertNull(runnableErrors[1], "Runnable 1 error: " + runnableErrors[1]);
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c/t1.txt")));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c/t3.txt")));
Assert.assertTrue(!fs.exists(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c/e/e/t2.txt")));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c/e/t2.txt")));
Assert.assertTrue(fs.exists(new Path(hadoopUtilsTestDir, "testSafeRename/a/b/c/e/t4.txt")));
fs.delete(hadoopUtilsTestDir, true);
}
} finally {
fs.delete(hadoopUtilsTestDir, true);
}
}
@Test
public void testSanitizePath() throws Exception {
Assert.assertEquals(HadoopUtils.sanitizePath("/A:B/::C:::D\\", "abc"), "/AabcB/abcabcCabcabcabcDabc");
Assert.assertEquals(HadoopUtils.sanitizePath(":\\:\\/", ""), "/");
try {
HadoopUtils.sanitizePath("/A:B/::C:::D\\", "a:b");
throw new RuntimeException();
} catch (RuntimeException e) {
Assert.assertTrue(e.getMessage().contains("substitute contains illegal characters"));
}
}
@Test
public void testStateToConfiguration() throws IOException {
Map<String, String> vals = Maps.newHashMap();
vals.put("test_key1", "test_val1");
vals.put("test_key2", "test_val2");
Configuration expected = HadoopUtils.newConfiguration();
State state = new State();
for (Map.Entry<String, String> entry : vals.entrySet()) {
state.setProp(entry.getKey(), entry.getValue());
expected.set(entry.getKey(), entry.getValue());
}
Assert.assertEquals(HadoopUtils.getConfFromState(state), expected);
Assert.assertEquals(HadoopUtils.getConfFromState(state, Optional.<String>absent()), expected);
Assert.assertEquals(HadoopUtils.getConfFromState(state, Optional.of("dummy")), expected);
}
@Test
public void testEncryptedStateToConfiguration() throws IOException {
Map<String, String> vals = Maps.newHashMap();
vals.put("test_key1", "test_val1");
vals.put("test_key2", "test_val2");
State state = new State();
for (Map.Entry<String, String> entry : vals.entrySet()) {
state.setProp(entry.getKey(), entry.getValue());
}
Map<String, String> encryptedVals = Maps.newHashMap();
encryptedVals.put("key1", "val1");
encryptedVals.put("key2", "val2");
final String encryptedPath = "encrypted.name.space";
for (Map.Entry<String, String> entry : encryptedVals.entrySet()) {
state.setProp(encryptedPath + "." + entry.getKey(), entry.getValue());
}
Configuration configuration = HadoopUtils.getConfFromState(state, Optional.of(encryptedPath));
for (Map.Entry<String, String> entry : vals.entrySet()) {
String val = configuration.get(entry.getKey());
Assert.assertEquals(val, entry.getValue());
}
for (Map.Entry<String, String> entry : encryptedVals.entrySet()) {
Assert.assertNotNull(configuration.get(entry.getKey())); //Verify key with child path exist as decryption is unit tested in ConfigUtil.
}
}
@Test
public void testMoveToTrash() throws IOException {
Path hadoopUtilsTestDir = new Path(Files.createTempDir().getAbsolutePath(), "HadoopUtilsTestDir");
Configuration conf = new Configuration();
// Set the time to keep it in trash to 10 minutes.
// 0 means object will be deleted instantly.
conf.set("fs.trash.interval", "10");
FileSystem fs = FileSystem.getLocal(conf);
Trash trash = new Trash(fs, conf);
TrashPolicy trashPolicy = TrashPolicy.getInstance(conf, fs, fs.getHomeDirectory());
Path trashPath = Path.mergePaths(trashPolicy.getCurrentTrashDir(), hadoopUtilsTestDir);
fs.mkdirs(hadoopUtilsTestDir);
Assert.assertTrue(fs.exists(hadoopUtilsTestDir));
// Move the parent dir to trash because we created it at the beginning of this function.
HadoopUtils.moveToTrash(fs, hadoopUtilsTestDir.getParent(), conf);
Assert.assertFalse(fs.exists(hadoopUtilsTestDir));
Assert.assertTrue(fs.exists(trashPath));
}
}
| 4,032 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/HiveJdbcConnectorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.Assert;
import org.testng.annotations.Test;
public class HiveJdbcConnectorTest {
@Test
public void testChoppedStatementNoLineChange() {
String example1 = "This is\na test";
String example2 = "This is\r\na test\nstring";
String expected1 = "This is a test";
String expected2 = "This is a test string";
Assert.assertEquals(HiveJdbcConnector.choppedStatementNoLineChange(example1), expected1);
Assert.assertEquals(HiveJdbcConnector.choppedStatementNoLineChange(example2), expected2);
// Generate a random string longer than 1000 charaters
int iter = 501;
StringBuilder exampleExpected = new StringBuilder();
StringBuilder exampleResult = new StringBuilder();
while (iter > 0) {
exampleExpected.append("a ");
exampleResult.append("a\n");
iter -- ;
}
String expected = exampleExpected.toString().substring(0, 1000) + "...... (2 characters omitted)";
Assert.assertEquals(HiveJdbcConnector.choppedStatementNoLineChange(exampleResult.toString()), expected);
}
} | 4,033 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/StringParsingUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import org.testng.Assert;
import org.testng.annotations.Test;
public class StringParsingUtilsTest {
@Test
public void testHumanReadableToByteCount()
throws Exception {
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("10"), 10L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1k"), 1024L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1m"), 1048576L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1g"), 1073741824L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1t"), 1099511627776L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1K"), 1024L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1kb"), 1024L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("1KB"), 1024L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("2k"), 2048L);
Assert.assertEquals(StringParsingUtils.humanReadableToByteCount("2.5k"), 2560L);
}
}
| 4,034 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/ParallelRunnerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Queue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Queues;
import com.google.common.io.Closer;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.Watermark;
import org.apache.gobblin.source.extractor.WatermarkSerializerHelper;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.io.SeekableFSInputStream;
/**
* Unit tests for {@link ParallelRunner}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util" })
public class ParallelRunnerTest {
private FileSystem fs;
private Path outputPath;
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.outputPath = new Path(ParallelRunnerTest.class.getSimpleName());
}
@Test
public void testSerializeToFile() throws IOException {
try (ParallelRunner parallelRunner = new ParallelRunner(2, this.fs)) {
WorkUnit workUnit1 = WorkUnit.createEmpty();
workUnit1.setProp("foo", "bar");
workUnit1.setProp("a", 10);
parallelRunner.serializeToFile(workUnit1, new Path(this.outputPath, "wu1"));
WorkUnit workUnit2 = WorkUnit.createEmpty();
workUnit2.setProp("foo", "baz");
workUnit2.setProp("b", 20);
parallelRunner.serializeToFile(workUnit2, new Path(this.outputPath, "wu2"));
}
}
@Test(dependsOnMethods = "testSerializeToFile")
public void testDeserializeFromFile() throws IOException {
WorkUnit workUnit1 = WorkUnit.createEmpty();
WorkUnit workUnit2 = WorkUnit.createEmpty();
try (ParallelRunner parallelRunner = new ParallelRunner(2, this.fs)) {
parallelRunner.deserializeFromFile(workUnit1, new Path(this.outputPath, "wu1"));
parallelRunner.deserializeFromFile(workUnit2, new Path(this.outputPath, "wu2"));
}
Assert.assertEquals(workUnit1.getPropertyNames().size(), 2);
Assert.assertEquals(workUnit1.getProp("foo"), "bar");
Assert.assertEquals(workUnit1.getPropAsInt("a"), 10);
Assert.assertEquals(workUnit2.getPropertyNames().size(), 2);
Assert.assertEquals(workUnit2.getProp("foo"), "baz");
Assert.assertEquals(workUnit2.getPropAsInt("b"), 20);
}
@Test
@SuppressWarnings("deprecation")
public void testSerializeToSequenceFile() throws IOException {
Closer closer = Closer.create();
Configuration conf = new Configuration();
WritableShimSerialization.addToHadoopConfiguration(conf);
try {
SequenceFile.Writer writer1 = closer.register(SequenceFile.createWriter(this.fs, conf,
new Path(this.outputPath, "seq1"), Text.class, WorkUnitState.class));
Text key = new Text();
WorkUnitState workUnitState = new WorkUnitState();
TestWatermark watermark = new TestWatermark();
watermark.setLongWatermark(10L);
workUnitState.setActualHighWatermark(watermark);
writer1.append(key, workUnitState);
SequenceFile.Writer writer2 = closer.register(SequenceFile.createWriter(this.fs, conf,
new Path(this.outputPath, "seq2"), Text.class, WorkUnitState.class));
watermark.setLongWatermark(100L);
workUnitState.setActualHighWatermark(watermark);
writer2.append(key, workUnitState);
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
@Test(dependsOnMethods = "testSerializeToSequenceFile")
public void testDeserializeFromSequenceFile() throws IOException {
Queue<WorkUnitState> workUnitStates = Queues.newConcurrentLinkedQueue();
Path seqPath1 = new Path(this.outputPath, "seq1");
Path seqPath2 = new Path(this.outputPath, "seq2");
try (ParallelRunner parallelRunner = new ParallelRunner(2, this.fs)) {
parallelRunner.deserializeFromSequenceFile(Text.class, WorkUnitState.class, seqPath1, workUnitStates, true);
parallelRunner.deserializeFromSequenceFile(Text.class, WorkUnitState.class, seqPath2, workUnitStates, true);
}
Assert.assertFalse(this.fs.exists(seqPath1));
Assert.assertFalse(this.fs.exists(seqPath2));
Assert.assertEquals(workUnitStates.size(), 2);
for (WorkUnitState workUnitState : workUnitStates) {
TestWatermark watermark = new Gson().fromJson(workUnitState.getActualHighWatermark(), TestWatermark.class);
Assert.assertTrue(watermark.getLongWatermark() == 10L || watermark.getLongWatermark() == 100L);
}
}
@Test
public void testMovePath() throws IOException, URISyntaxException {
String expected = "test";
ByteArrayOutputStream actual = new ByteArrayOutputStream();
Path src = new Path("/src/file.txt");
Path dst = new Path("/dst/file.txt");
FileSystem fs1 = Mockito.mock(FileSystem.class);
Mockito.when(fs1.exists(src)).thenReturn(true);
Mockito.when(fs1.isFile(src)).thenReturn(true);
Mockito.when(fs1.getUri()).thenReturn(new URI("fs1:////"));
Mockito.when(fs1.getFileStatus(src)).thenReturn(new FileStatus(1, false, 1, 1, 1, src));
Mockito.when(fs1.open(src))
.thenReturn(new FSDataInputStream(new SeekableFSInputStream(new ByteArrayInputStream(expected.getBytes()))));
Mockito.when(fs1.delete(src, true)).thenReturn(true);
FileSystem fs2 = Mockito.mock(FileSystem.class);
Mockito.when(fs2.exists(dst)).thenReturn(false);
Mockito.when(fs2.getUri()).thenReturn(new URI("fs2:////"));
Mockito.when(fs2.getConf()).thenReturn(new Configuration());
Mockito.when(fs2.create(dst, false)).thenReturn(new FSDataOutputStream(actual, null));
try (ParallelRunner parallelRunner = new ParallelRunner(1, fs1)) {
parallelRunner.movePath(src, fs2, dst, Optional.<String>absent());
}
Assert.assertEquals(actual.toString(), expected);
}
/**
* A test verifying when {@link ParallelRunner#close()} is called, everything inside runner has been collected.
* Positive case: Both files get mock-deleted and parallelRunner returns.
* Negative case: Using countdown-latch to specify causality between two deletion of files and construct a deadlock.
* The src2 deletion won't happen as it depends on src1's deletion and vice versa.
* {@link ParallelRunner#close()} will finally timeout and expect to catch the {@link java.util.concurrent.TimeoutException}.
*/
public void testWaitsForFuturesWhenClosing() throws IOException, InterruptedException {
// Indicate if deadlock is being constructed.
final AtomicBoolean flag = new AtomicBoolean();
flag.set(true);
final CountDownLatch latch1 = new CountDownLatch(1);
final CountDownLatch latch2 = new CountDownLatch(1);
Path src1 = new Path("/src/file1.txt");
Path src2 = new Path("/src/file2.txt");
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.exists(src1)).thenReturn(true);
Mockito.when(fs.delete(src1, true)).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
if (flag.get()) {
latch2.await();
}
latch1.countDown();
return true;
}
});
Mockito.when(fs.exists(src2)).thenReturn(true);
/** Will make deletion of files from parallelRunner to be timeout after 5 seconds.*/
final int timeout = 5000;
Mockito.when(fs.delete(src2, true)).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
if (flag.get()) {
latch1.await();
}
latch2.countDown();
return true;
}
});
boolean caughtException = false;
ParallelRunner parallelRunner = new ParallelRunner(2, fs);
try {
parallelRunner.deletePath(src2, true);
parallelRunner.deletePath(src1, true);
parallelRunner.waitForTasks(timeout);
} catch (IOException e) {
caughtException = true;
}
Assert.assertTrue(caughtException);
Assert.assertEquals(latch2.getCount(), 1);
Assert.assertEquals(latch1.getCount(), 1);
// Remove deadlock
flag.set(false);
caughtException = false;
parallelRunner = new ParallelRunner(2, fs);
try {
parallelRunner.deletePath(src2, true);
parallelRunner.deletePath(src1, true);
parallelRunner.waitForTasks(timeout);
} catch (IOException e) {
caughtException = true;
}
Assert.assertFalse(caughtException);
Assert.assertEquals(latch2.getCount(), 0);
Assert.assertEquals(latch1.getCount(), 0);
}
@AfterClass
public void tearDown() throws IOException {
if (this.fs != null && this.outputPath != null) {
this.fs.delete(this.outputPath, true);
}
}
public static class TestWatermark implements Watermark {
private long watermark = -1;
@Override
public JsonElement toJson() {
return WatermarkSerializerHelper.convertWatermarkToJson(this);
}
@Override
public short calculatePercentCompletion(Watermark lowWatermark, Watermark highWatermark) {
return 0;
}
public void setLongWatermark(long watermark) {
this.watermark = watermark;
}
public long getLongWatermark() {
return this.watermark;
}
}
}
| 4,035 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/TimeRangeCheckerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.Assert;
import org.testng.annotations.Test;
@Test(groups = {"gobblin.util"})
public class TimeRangeCheckerTest {
@Test
public void testTimeRangeChecker() {
// January 1st, 2015 (a Thursday)
DateTime dateTime = new DateTime(2015, 1, 1, 0, 0, 0, DateTimeZone.forID(ConfigurationKeys.PST_TIMEZONE_NAME));
// Positive Tests
// Base hour
Assert.assertTrue(TimeRangeChecker.isTimeInRange(Lists.newArrayList("THURSDAY"), "00-00", "06-00", dateTime));
// Valid minute
Assert.assertTrue(TimeRangeChecker.isTimeInRange(Lists.newArrayList("THURSDAY"), "00-00", "00-01", dateTime));
// Multiple days
Assert.assertTrue(TimeRangeChecker.isTimeInRange(Lists.newArrayList("MONDAY", "THURSDAY"), "00-00", "06-00", dateTime));
// Negative Tests
// Invalid day
Assert.assertFalse(TimeRangeChecker.isTimeInRange(Lists.newArrayList("MONDAY"), "00-00", "06-00", dateTime));
// Invalid minute
Assert.assertFalse(TimeRangeChecker.isTimeInRange(Lists.newArrayList("THURSDAY"), "00-01", "06-00", dateTime));
// Invalid hour
Assert.assertFalse(TimeRangeChecker.isTimeInRange(Lists.newArrayList("THURSDAY"), "01-00", "06-00", dateTime));
}
}
| 4,036 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/ConfigUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.jasypt.util.text.BasicTextEncryptor;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import static org.assertj.core.api.Assertions.assertThat;
public class ConfigUtilsTest {
@Test
public void testSaveConfigToFile()
throws IOException {
FileUtils fileUtils = new FileUtils();
ConfigUtils configUtils = new ConfigUtils(fileUtils);
ImmutableMap<String, String> configMap = ImmutableMap.of("k1", "v1", "k2", "v2");
Config config = ConfigFactory.parseMap(configMap);
Path destPath = Paths.get("test-config-file.txt");
configUtils.saveConfigToFile(config, destPath);
Config restoredConfig = ConfigFactory.parseFile(destPath.toFile());
assertThat(restoredConfig.getString("k1")).isEqualTo("v1");
assertThat(restoredConfig.getString("k2")).isEqualTo("v2");
java.nio.file.Files.deleteIfExists(destPath);
}
@Test
public void testPropertiesToConfig() {
Properties properties = new Properties();
properties.setProperty("k1.kk1", "v1");
properties.setProperty("k1.kk2", "v2");
properties.setProperty("k2.kk", "v3");
properties.setProperty("k3", "v4");
properties.setProperty("k3.kk1", "v5");
properties.setProperty("k3.kk1.kkk1", "v6");
Config conf = ConfigUtils.propertiesToConfig(properties);
Assert.assertEquals(conf.getString("k1.kk1"), "v1");
Assert.assertEquals(conf.getString("k1.kk2"), "v2");
Assert.assertEquals(conf.getString("k2.kk"), "v3");
Assert.assertEquals(conf.getString(ConfigUtils.sanitizeFullPrefixKey("k3")), "v4");
Assert.assertEquals(conf.getString(ConfigUtils.sanitizeFullPrefixKey("k3.kk1")), "v5");
Assert.assertEquals(conf.getString("k3.kk1.kkk1"), "v6");
}
@Test
public void testPropertiesToConfigWithPrefix() {
Properties properties = new Properties();
properties.setProperty("k1.kk1", "v1");
properties.setProperty("k1.kk2", "v2");
properties.setProperty("k2.kk", "v3");
Config conf = ConfigUtils.propertiesToConfig(properties, Optional.of("k1"));
Assert.assertEquals(conf.getString("k1.kk1"), "v1");
Assert.assertEquals(conf.getString("k1.kk2"), "v2");
Assert.assertFalse(conf.hasPath("k2.kk"), "Should not contain key k2.kk");
}
@Test
public void testHasNonEmptyPath() throws Exception {
Assert.assertTrue(ConfigUtils.hasNonEmptyPath(ConfigFactory.parseMap(ImmutableMap.of("key1", "value1")), "key1"));
Assert.assertFalse(ConfigUtils.hasNonEmptyPath(ConfigFactory.parseMap(ImmutableMap.of("key2", "value1")), "key1"));
Assert.assertFalse(ConfigUtils.hasNonEmptyPath(ConfigFactory.parseMap(ImmutableMap.of("key1", "")), "key1"));
}
@Test
public void testGetStringList() throws Exception {
// values as comma separated strings
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(ImmutableMap.of("a.b", "1,2,3")), "a.b"),
ImmutableList.of("1", "2", "3"));
// values as quoted comma separated strings
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(ImmutableMap.of("a.b", "\"1\",\"2\",\"3\"")), "a.b"),
ImmutableList.of("1", "2", "3"));
// values as quoted comma separated strings (Multiple values)
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(ImmutableMap.of("a.b", "\"1\",\"2,3\"")), "a.b"),
ImmutableList.of("1", "2,3"));
// values as Type safe list
Assert.assertEquals(ConfigUtils.getStringList(
ConfigFactory.empty().withValue("a.b",
ConfigValueFactory.fromIterable(ImmutableList.of("1", "2","3"))), "a.b"),
ImmutableList.of("1", "2", "3"));
// values as quoted Type safe list
Assert.assertEquals(ConfigUtils.getStringList(
ConfigFactory.empty().withValue("a.b",
ConfigValueFactory.fromIterable(ImmutableList.of("\"1\"", "\"2\"","\"3\""))), "a.b"),
ImmutableList.of("1", "2", "3"));
// values as quoted Type safe list (Multiple values)
Assert.assertEquals(ConfigUtils.getStringList(
ConfigFactory.empty().withValue("a.b",
ConfigValueFactory.fromIterable(ImmutableList.of("\"1\"", "\"2,3\""))), "a.b"),
ImmutableList.of("1", "2,3"));
// Empty list if path does not exist
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(ImmutableMap.of("key1", "value1,value2")), "key2"), ImmutableList.of());
// Empty list of path is null
Map<String,String> configMap = Maps.newHashMap();
configMap.put("key1", null);
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(configMap), "key1"), ImmutableList.of());
// Empty list if value is empty string
configMap = Maps.newHashMap();
configMap.put("key2", "");
Assert.assertEquals(ConfigUtils.getStringList(ConfigFactory.parseMap(configMap), "key2"), ImmutableList.of());
}
@Test
public void testConfigToProperties() {
Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put("key1", 1)
.put("key2", "sTring")
.put("key3", true)
.build());
Properties props = ConfigUtils.configToProperties(cfg);
Assert.assertEquals(props.getProperty("key1"), "1");
Assert.assertEquals(props.getProperty("key2"), "sTring");
Assert.assertEquals(props.getProperty("key3"), "true");
}
@Test
public void testPropertiesToConfigToState() {
Properties properties = new Properties();
properties.setProperty("k1.kk1", "v1");
properties.setProperty("k1.kk2", "v2");
properties.setProperty("k2.kk", "v3");
properties.setProperty("k3", "v4");
properties.setProperty("k3.kk1", "v5");
properties.setProperty("k3.kk1.kkk1", "v6");
Config conf = ConfigUtils.propertiesToConfig(properties);
State state = ConfigUtils.configToState(conf);
Assert.assertEquals(state.getProp("k1.kk1"), "v1");
Assert.assertEquals(state.getProp("k1.kk2"), "v2");
Assert.assertEquals(state.getProp("k2.kk"), "v3");
Assert.assertEquals(state.getProp("k3"), "v4");
Assert.assertEquals(state.getProp("k3.kk1"), "v5");
Assert.assertEquals(state.getProp("k3.kk1.kkk1"), "v6");
}
@Test
public void testConfigToPropertiesWithPrefix() {
Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put("a.key1", 1)
.put("b.key2", "sTring")
.put("a.key3", true)
.build());
Properties props = ConfigUtils.configToProperties(cfg, "a.");
Assert.assertEquals(props.getProperty("a.key1"), "1");
Assert.assertNull(props.getProperty("b.key2"));
Assert.assertEquals(props.getProperty("a.key3"), "true");
}
/**
* Test that you can go from properties to Config and back without changing.
* Specifically tests prefixed paths and numeric key-parts.
*/
@Test
public void testPropertiesToConfigAndBack() {
Properties props = new Properties();
props.setProperty("writer.staging.dir", "foobar");
props.setProperty("writer.staging.dir.0", "foobar-0");
Config config = ConfigUtils.propertiesToConfig(props);
Properties configProps = ConfigUtils.configToProperties(config);
Assert.assertEquals(configProps, props);
}
@Test
public void testFindFullPrefixKeys() {
Properties props = new Properties();
props.setProperty("a.b", "123");
props.setProperty("a.b1", "123");
props.setProperty("b", "123");
props.setProperty("b_a", "123");
props.setProperty("a.b.c", "123");
props.setProperty("a.b.c.d.e", "123");
props.setProperty("b.a", "123");
Set<String> fullPrefixKeys =
ConfigUtils.findFullPrefixKeys(props, Optional.<String>absent());
Assert.assertEquals(fullPrefixKeys, new HashSet<>(Arrays.asList("a.b", "a.b.c", "b")));
fullPrefixKeys =
ConfigUtils.findFullPrefixKeys(props, Optional.of("a."));
Assert.assertEquals(fullPrefixKeys, new HashSet<>(Arrays.asList("a.b", "a.b.c")));
fullPrefixKeys =
ConfigUtils.findFullPrefixKeys(props, Optional.of("c."));
Assert.assertTrue(fullPrefixKeys.isEmpty());
props = new Properties();
props.setProperty("a.b", "123");
props.setProperty("a.b1", "123");
props.setProperty("b", "123");
props.setProperty("b_a", "123");
fullPrefixKeys =
ConfigUtils.findFullPrefixKeys(props, Optional.<String>absent());
Assert.assertTrue(fullPrefixKeys.isEmpty());
}
@Test
public void testConfigResolveEncrypted() throws IOException {
Map<String, String> vals = Maps.newHashMap();
vals.put("test.key1", "test_val1");
vals.put("test.key2", "test_val2");
State state = new State();
for (Map.Entry<String, String> entry : vals.entrySet()) {
state.setProp(entry.getKey(), entry.getValue());
}
String key = UUID.randomUUID().toString();
File keyFile = newKeyFile(key);
state.setProp(ConfigurationKeys.ENCRYPT_KEY_LOC, keyFile.getAbsolutePath());
Map<String, String> encryptedVals = Maps.newHashMap();
encryptedVals.put("my.nested.key1", "val1");
encryptedVals.put("my.nested.key2", "val2");
String encPrefix = "testenc";
for (Map.Entry<String, String> entry : encryptedVals.entrySet()) {
BasicTextEncryptor encryptor = new BasicTextEncryptor();
encryptor.setPassword(key);
String encrypted = "ENC(" + encryptor.encrypt(entry.getValue()) + ")";
state.setProp(encPrefix + "." + entry.getKey(), encrypted);
}
Config config = ConfigUtils.resolveEncrypted(ConfigUtils.propertiesToConfig(state.getProperties()), Optional.of(encPrefix));
Map<String, String> expected = ImmutableMap.<String, String>builder()
.putAll(vals)
.putAll(encryptedVals)
.build();
for (Map.Entry<String, String> entry : expected.entrySet()) {
String val = config.getString(entry.getKey());
Assert.assertEquals(val, entry.getValue());
}
keyFile.delete();
}
@Test
public void testGetTimeUnitValid() {
String key = "a.b.c";
TimeUnit expectedTimeUnit = TimeUnit.DAYS;
TimeUnit defaultTimeUnit = TimeUnit.MILLISECONDS;
Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(key, TimeUnit.DAYS.name())
.build());
TimeUnit timeUnit = ConfigUtils.getTimeUnit(cfg, key, defaultTimeUnit);
Assert.assertEquals(timeUnit, expectedTimeUnit);
}
@Test
public void testGetTimeUnitInvalid() {
String key = "a.b.c";
final Config cfg = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(key, "INVALID_TIME_UNIT")
.build());
Assert.assertThrows(IllegalArgumentException.class, () -> {
ConfigUtils.getTimeUnit(cfg, key, TimeUnit.SECONDS);
});
}
@Test
public void testGetTimeUnitDefault() {
String key = "a.b.c";
TimeUnit defaultTimeUnit = TimeUnit.MINUTES;
final Config cfg = ConfigFactory.empty();
Assert.assertEquals(ConfigUtils.getTimeUnit(cfg, key, defaultTimeUnit), defaultTimeUnit);
}
private File newKeyFile(String masterPwd) throws IOException {
File masterPwdFile = File.createTempFile("masterPassword", null);
masterPwdFile.deleteOnExit();
Files.write(masterPwd, masterPwdFile, Charset.defaultCharset());
return masterPwdFile;
}
}
| 4,037 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/filters/RegexPathFilterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.filters;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
/**
* Unit test for {@link RegexPathFilter}.
*/
@Test(groups = { "gobblin.util.filters" })
public class RegexPathFilterTest {
@Test
public void testAccept() {
String regex = "a.*\\.b";
Path matchedPath = new Path("fsuri://testdir/subdir/a11.b");
Path unmatchedPath = new Path("fsuri://testdir/subdir/a.11b");
RegexPathFilter includeFilter = new RegexPathFilter(regex);
RegexPathFilter excludeFilter = new RegexPathFilter(regex, false);
Assert.assertTrue(includeFilter.accept(matchedPath));
Assert.assertFalse(includeFilter.accept(unmatchedPath));
Assert.assertFalse(excludeFilter.accept(matchedPath));
Assert.assertTrue(excludeFilter.accept(unmatchedPath));
}
} | 4,038 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/eventbus/EventBusFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.eventbus;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.eventbus.EventBus;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerImpl;
import org.apache.gobblin.broker.SimpleScope;
import org.apache.gobblin.broker.SimpleScopeType;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
public class EventBusFactoryTest {
@Test
public void testGet()
throws NotConfiguredException, IOException, NoSuchScopeException {
SharedResourcesBrokerImpl<SimpleScopeType> broker = SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(
ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
EventBus eventBus1 = EventBusFactory.get(getClass().getSimpleName(), broker);
EventBus eventBus2 = EventBusFactory.get(getClass().getSimpleName(), broker);
//Should return the same eventbus instance
Assert.assertEquals(eventBus1, eventBus2);
SharedResourcesBroker<SimpleScopeType> subBroker =
broker.newSubscopedBuilder(new SimpleScope<>(SimpleScopeType.LOCAL, "local")).build();
EventBus eventBus3 = EventBusFactory.get(getClass().getSimpleName(), subBroker);
//Should return the same eventbus instance
Assert.assertEquals(eventBus1, eventBus3);
//Create a new eventbus with local scope
EventBus eventBus4 = subBroker.getSharedResourceAtScope(new EventBusFactory<>(), new EventBusKey(getClass().getSimpleName()), SimpleScopeType.LOCAL);
Assert.assertNotEquals(eventBus3, eventBus4);
//Create an eventbus with different source class name
EventBus eventBus5 = EventBusFactory.get("", broker);
Assert.assertNotEquals(eventBus1, eventBus5);
}
} | 4,039 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/BruteForceAllocatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import static org.mockito.Mockito.*;
public class BruteForceAllocatorTest {
@Test
public void testAllocateRequests()
throws Exception {
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(new StringRequest.StringRequestEstimator())
.withPrioritizer(new StringRequest.StringRequestComparator()).build();
BruteForceAllocator<StringRequest> allocator =
new BruteForceAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
AllocatedRequestsIterator<StringRequest> result = allocator.allocateRequests(requests.iterator(), pool);
List<StringRequest> resultList = Lists.newArrayList(result);
Assert.assertEquals(resultList.size(), 4);
Assert.assertEquals(resultList.get(0).getString(), "a-50");
Assert.assertEquals(resultList.get(1).getString(), "b-20");
// No c because it is too large to fit
Assert.assertEquals(resultList.get(2).getString(), "d-30");
Assert.assertEquals(resultList.get(3).getString(), "e-20");
}
@Test
public void testThrowExceptionOnFailure() throws Exception {
ResourceEstimator<StringRequest> failingEstimator = mock(ResourceEstimator.class);
when(failingEstimator.estimateRequirement(any(), any())).thenThrow(new RuntimeException("Error"));
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(failingEstimator)
.allowParallelization()
.withPrioritizer(new StringRequest.StringRequestComparator()).build();
BruteForceAllocator<StringRequest> allocator =
new BruteForceAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
Assert.expectThrows(RuntimeException.class, () -> allocator.allocateRequests(requests.iterator(), pool));
}
} | 4,040 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/StringRequestor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import com.google.common.base.Function;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor
public class StringRequestor implements PushDownRequestor<StringRequest> {
public StringRequestor(String name, String... strings) {
this(name, Lists.newArrayList(strings));
}
@Getter
private final String name;
private final List<String> strings;
@Override
public Iterator<StringRequest> iterator() {
return Iterators.transform(this.strings.iterator(), new Function<String, StringRequest>() {
@Override
public StringRequest apply(String input) {
return new StringRequest(StringRequestor.this, input);
}
});
}
@Override
public Iterator<StringRequest> getRequests(Comparator<StringRequest> prioritizer)
throws IOException {
List<StringRequest> requests = Lists.newArrayList(Iterators.transform(this.strings.iterator(), new Function<String, StringRequest>() {
@Override
public StringRequest apply(String input) {
return new StringRequest(StringRequestor.this, input);
}
}));
Collections.sort(requests, prioritizer);
return requests.iterator();
}
}
| 4,041 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/VectorAlgebraTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import org.testng.Assert;
import org.testng.annotations.Test;
public class VectorAlgebraTest {
@Test
public void testAddVector()
throws Exception {
Assert.assertEquals(VectorAlgebra.addVector(new double[]{1, 2}, new double[]{1, 3}, 1., null), new double[]{2, 5});
Assert.assertEquals(VectorAlgebra.addVector(new double[]{1, 2}, new double[]{1, 3}, 2., null), new double[]{3, 8});
Assert.assertEquals(VectorAlgebra.addVector(new double[]{1, 2}, new double[]{1, 3}, -1., null), new double[]{0, -1});
// Check it uses reuse vector
double[] reuse = new double[]{1, 2};
VectorAlgebra.addVector(reuse, new double[]{1, 3}, 1., reuse);
Assert.assertEquals(reuse, new double[]{2, 5});
}
@Test
public void testExceedsVector()
throws Exception {
Assert.assertTrue(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{0, 3}, false));
Assert.assertTrue(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{2, 0}, false));
Assert.assertTrue(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{0, 2}, true));
Assert.assertTrue(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{1, 0}, true));
Assert.assertFalse(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{0, 1}, false));
Assert.assertFalse(VectorAlgebra.exceedsVector(new double[]{1, 2}, new double[]{1, 2}, false));
}
} | 4,042 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/PreOrderAllocatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import static org.mockito.Mockito.*;
public class PreOrderAllocatorTest {
@Test
public void testAllocateRequests()
throws Exception {
StringRequest.StringRequestEstimator estimator = new StringRequest.StringRequestEstimator();
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(estimator).withPrioritizer(new StringRequest.StringRequestComparator()).build();
PreOrderAllocator<StringRequest> allocator =
new PreOrderAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
AllocatedRequestsIterator<StringRequest> result = allocator.allocateRequests(requests.iterator(), pool);
List<StringRequest> resultList = Lists.newArrayList(result);
Assert.assertEquals(resultList.size(), 3);
Assert.assertEquals(resultList.get(0).getString(), "a-50");
Assert.assertEquals(resultList.get(1).getString(), "b-20");
// No c because it is too large to fit
Assert.assertEquals(resultList.get(2).getString(), "d-30");
Assert.assertTrue(estimator.getQueriedRequests().contains("a-50"));
Assert.assertTrue(estimator.getQueriedRequests().contains("b-20"));
Assert.assertTrue(estimator.getQueriedRequests().contains("c-200"));
Assert.assertTrue(estimator.getQueriedRequests().contains("d-30"));
Assert.assertFalse(estimator.getQueriedRequests().contains("e-20"));
Assert.assertFalse(estimator.getQueriedRequests().contains("f-50"));
}
@Test
public void testThrowExceptionOnFailure() throws Exception {
ResourceEstimator<StringRequest> failingEstimator = mock(ResourceEstimator.class);
when(failingEstimator.estimateRequirement(any(), any())).thenThrow(new RuntimeException("Error"));
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(failingEstimator)
.allowParallelization()
.withPrioritizer(new StringRequest.StringRequestComparator()).build();
PreOrderAllocator<StringRequest> allocator =
new PreOrderAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
Assert.expectThrows(RuntimeException.class, () -> allocator.allocateRequests(requests.iterator(), pool));
}
} | 4,043 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/GreedyAllocatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import static org.mockito.Mockito.*;
public class GreedyAllocatorTest {
@Test
public void testAllocateRequests()
throws Exception {
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(new StringRequest.StringRequestEstimator()).build();
GreedyAllocator<StringRequest> allocator =
new GreedyAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
AllocatedRequestsIterator<StringRequest> result = allocator.allocateRequests(requests.iterator(), pool);
List<StringRequest> resultList = Lists.newArrayList(result);
Assert.assertEquals(resultList.size(), 2);
// all equal, so no order guaranteed
Assert.assertEquals(Sets.newHashSet(Lists.transform(resultList, new Function<StringRequest, String>() {
@Override
public String apply(StringRequest input) {
return input.getString();
}
})), Sets.newHashSet("a-50", "f-50"));
}
@Test
public void testThrowExceptionOnFailure() throws Exception {
ResourceEstimator<StringRequest> failingEstimator = mock(ResourceEstimator.class);
when(failingEstimator.estimateRequirement(any(), any())).thenThrow(new RuntimeException("Error"));
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(failingEstimator)
.allowParallelization()
.withPrioritizer(new StringRequest.StringRequestComparator()).build();
GreedyAllocator<StringRequest> allocator = new GreedyAllocator<>(configuration);
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 100.).build();
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r1", "a-50", "f-50", "k-20"),
new StringRequestor("r2", "j-10", "b-20", "e-20"),
new StringRequestor("r3", "g-20", "c-200", "d-30"));
Assert.expectThrows(RuntimeException.class, () -> allocator.allocateRequests(requests.iterator(), pool));
}
} | 4,044 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/ConcurrentBoundedPriorityIterableTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.Comparator;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
public class ConcurrentBoundedPriorityIterableTest {
public static final String MEMORY = "memory";
@Test
public void test()
throws Exception {
ConcurrentBoundedPriorityIterable<String> iterable =
new ConcurrentBoundedPriorityIterable<>(new MyComparator(), new MyEstimator(), "min",
ResourcePool.builder().maxResource(MEMORY, 100.).build());
// doesn't fit
Assert.assertFalse(iterable.add("a-500"));
// add some elements until full
Assert.assertTrue(iterable.add("d-50"));
Assert.assertFalse(iterable.isFull());
Assert.assertTrue(iterable.add("d-50"));
Assert.assertTrue(iterable.isFull());
// container full, cannot add low priority
Assert.assertFalse(iterable.add("d-50"));
Assert.assertFalse(iterable.add("e-50"));
// can add item up to hard bound
Assert.assertTrue(iterable.add("e-10"));
// can add high priority item
Assert.assertTrue(iterable.add("b-50"));
// Check items
List<String> items = Lists
.newArrayList(Iterators.transform(iterable.iterator(), new AllocatedRequestsIteratorBase.TExtractor<String>()));
Assert.assertEquals(items.size(), 2);
Assert.assertEquals(items.get(0), "b-50");
Assert.assertEquals(items.get(1), "d-50");
iterable.reopen();
// a high priority that won't fit even with evictions should not evict anything
Assert.assertFalse(iterable.add("c-500"));
items = Lists
.newArrayList(Iterators.transform(iterable.iterator(), new AllocatedRequestsIteratorBase.TExtractor<String>()));
Assert.assertEquals(items.size(), 2);
iterable.reopen();
// even if it is higher priority than everything else
Assert.assertFalse(iterable.add("a-500"));
items = Lists
.newArrayList(Iterators.transform(iterable.iterator(), new AllocatedRequestsIteratorBase.TExtractor<String>()));
Assert.assertEquals(items.size(), 2);
}
private class MyComparator implements Comparator<String> {
@Override
public int compare(String o1, String o2) {
String o1CompareToken = o1.split("-")[0];
String o2CompareToken = o2.split("-")[0];
return o1CompareToken.compareTo(o2CompareToken);
}
}
private class MyEstimator implements ResourceEstimator<String> {
@Override
public ResourceRequirement estimateRequirement(String s, ResourcePool resourcePool) {
double memory = Double.parseDouble(s.split("-")[1]);
return resourcePool.getResourceRequirementBuilder().setRequirement(MEMORY, memory).build();
}
}
} | 4,045 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/StringRequest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.Comparator;
import java.util.Set;
import com.google.common.collect.Sets;
import lombok.Data;
import lombok.Getter;
@Data
public class StringRequest implements Request<StringRequest> {
public static final String MEMORY = "memory";
private final Requestor<StringRequest> requestor;
private final String string;
public static class StringRequestComparator implements Comparator<StringRequest> {
@Override
public int compare(StringRequest o1, StringRequest o2) {
String o1CompareToken = o1.getString().split("-")[0];
String o2CompareToken = o2.getString().split("-")[0];
return o1CompareToken.compareTo(o2CompareToken);
}
}
public static class StringRequestEstimator implements ResourceEstimator<StringRequest> {
@Getter
private Set<String> queriedRequests = Sets.newConcurrentHashSet();
@Override
public ResourceRequirement estimateRequirement(StringRequest s, ResourcePool resourcePool) {
double memory = Double.parseDouble(s.getString().split("-")[1]);
this.queriedRequests.add(s.getString());
return resourcePool.getResourceRequirementBuilder().setRequirement(MEMORY, memory).build();
}
}
}
| 4,046 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/ResourcePoolTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import org.testng.Assert;
import org.testng.annotations.Test;
public class ResourcePoolTest {
public static final String MEMORY = "Memory";
public static final String TIME = "Time";
@Test
public void test() {
ResourcePool pool = ResourcePool.builder().maxResource(MEMORY, 1000.).maxResource(TIME, 200.).tolerance(MEMORY, 2.)
.defaultRequirement(TIME, 1.).build();
Assert.assertEquals(pool.getNumDimensions(), 2);
Assert.assertEquals(pool.getSoftBound(), new double[]{1000, 200});
// Default tolerance is 1.2
Assert.assertEquals(pool.getHardBound(), new double[]{2000, 240});
// Test default resource use
Assert.assertEquals(pool.getResourceRequirementBuilder().build().getResourceVector(), new double[]{0, 1});
ResourceRequirement requirement = pool.getResourceRequirementBuilder().setRequirement(MEMORY, 10.).build();
Assert.assertEquals(requirement.getResourceVector(), new double[]{10, 1});
}
} | 4,047 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/request_allocation/HierarchicalAllocatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.request_allocation;
import java.util.Comparator;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
public class HierarchicalAllocatorTest {
@Test
public void testAllocateRequests()
throws Exception {
Comparator<Requestor<StringRequest>> requestorComparator = new Comparator<Requestor<StringRequest>>() {
@Override
public int compare(Requestor<StringRequest> o1, Requestor<StringRequest> o2) {
StringRequestor stringRequestor1 = (StringRequestor) o1;
StringRequestor stringRequestor2 = (StringRequestor) o2;
return stringRequestor1.getName().compareTo(stringRequestor2.getName());
}
};
HierarchicalPrioritizer<StringRequest> prioritizer =
new SimpleHierarchicalPrioritizer<>(requestorComparator, new StringRequest.StringRequestComparator());
RequestAllocatorConfig<StringRequest> configuration =
RequestAllocatorConfig.builder(new StringRequest.StringRequestEstimator()).withPrioritizer(prioritizer).build();
BruteForceAllocator<StringRequest> underlying = new BruteForceAllocator<>(configuration);
HierarchicalAllocator<StringRequest> hierarchicalAllocator = new HierarchicalAllocator<>(prioritizer, underlying);
List<Requestor<StringRequest>> requests = Lists.<Requestor<StringRequest>>newArrayList(
new StringRequestor("r2", "b-10", "c-10"),
new StringRequestor("r1", "f-10", "h-10"),
new StringRequestor("r1", "g-10", "i-10"),
new StringRequestor("r3", "a-10", "d-10"));
ResourcePool pool = ResourcePool.builder().maxResource(StringRequest.MEMORY, 45.).build();
AllocatedRequestsIterator<StringRequest> result = hierarchicalAllocator.allocateRequests(requests.iterator(), pool);
List<StringRequest> resultList = Lists.newArrayList(result);
Assert.assertEquals(resultList.size(), 5);
Assert.assertEquals(resultList.get(0).getString(), "f-10");
Assert.assertEquals(resultList.get(1).getString(), "g-10");
Assert.assertEquals(resultList.get(2).getString(), "h-10");
Assert.assertEquals(resultList.get(3).getString(), "i-10");
Assert.assertEquals(resultList.get(4).getString(), "b-10");
}
} | 4,048 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/test/TestIOUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.test;
import java.util.Collection;
import java.util.List;
import org.apache.avro.generic.GenericRecord;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Test cases for {@link TestIOUtils}
*/
public class TestIOUtilsTest {
@Test
public void testReadAllRecords()
throws Exception {
List<GenericRecord> testData = TestIOUtils.readAllRecords(
getClass().getResource("/test_data.json").getPath(),
getClass().getResource("/test_data.avsc").getPath());
Assert.assertEquals(testData.size(), 2);
Assert.assertEquals(find(testData, "string1", "string1").toString(),
"{\"string1\": \"string1\", \"long1\": 1234, \"double1\": 1234.12}");
Assert.assertEquals(find(testData, "string1", "string2").toString(),
"{\"string1\": \"string2\", \"long1\": 4567, \"double1\": 4567.89}");
}
private static GenericRecord find(Collection<GenericRecord> records, String field, String value) {
for (GenericRecord record : records) {
if (null == record.getSchema().getField(field)) {
continue;
}
if (null != record.get(field) && record.get(field).toString().equals(value)) {
return record;
}
}
return null;
}
}
| 4,049 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/test/TestHelloWorldSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.test;
import java.io.IOException;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link HelloWorldSource}
*/
public class TestHelloWorldSource {
@Test public void testSourceExtractor() throws DataRecordException, IOException {
SourceState state = new SourceState();
state.setProp(HelloWorldSource.NUM_HELLOS_FULL_KEY, 10);
HelloWorldSource source = new HelloWorldSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), 10);
for (int i = 0; i < wus.size(); ++i) {
WorkUnit wu = wus.get(i);
Assert.assertEquals(wu.getPropAsInt(HelloWorldSource.HELLO_ID_FULL_KEY), i + 1);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, String> extr = source.getExtractor(wuState);
Assert.assertEquals(extr.getExpectedRecordCount(), 1);
Assert.assertEquals(extr.readRecord(null), "Hello world "+ (i+1) + " !");
}
}
}
| 4,050 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/test/TestStressTestingSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.test;
import java.io.IOException;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link StressTestingSource}
*/
public class TestStressTestingSource {
@Test
public void testSourceExtractor() throws DataRecordException, IOException {
final int MEM_ALLOC_BYTES = 100;
final int NUM_WORK_UNITS = 10;
final int COMPUTE_TIME_MICRO = 10;
final int NUM_RECORDS = 10000;
SourceState state = new SourceState();
state.setProp(StressTestingSource.NUM_WORK_UNITS_KEY, NUM_WORK_UNITS);
state.setProp(StressTestingSource.MEM_ALLOC_BYTES_KEY, MEM_ALLOC_BYTES);
state.setProp(StressTestingSource.COMPUTE_TIME_MICRO_KEY, COMPUTE_TIME_MICRO);
state.setProp(StressTestingSource.NUM_RECORDS_KEY, NUM_RECORDS);
StressTestingSource source = new StressTestingSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), NUM_WORK_UNITS);
for (int i = 0; i < wus.size(); ++i) {
WorkUnit wu = wus.get(i);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, byte[]> extractor = source.getExtractor(wuState);
Assert.assertEquals(extractor.getExpectedRecordCount(), NUM_RECORDS);
Assert.assertEquals(extractor.readRecord(null).length, 100);
}
}
@Test (enabled=false)
public void testComputeTime() throws DataRecordException, IOException {
final int MEM_ALLOC_BYTES = 100;
final int NUM_WORK_UNITS = 1;
final int COMPUTE_TIME_MICRO = 10000;
final int NUM_RECORDS = 500;
SourceState state = new SourceState();
state.setProp(StressTestingSource.NUM_WORK_UNITS_KEY, NUM_WORK_UNITS);
state.setProp(StressTestingSource.MEM_ALLOC_BYTES_KEY, MEM_ALLOC_BYTES);
state.setProp(StressTestingSource.COMPUTE_TIME_MICRO_KEY, COMPUTE_TIME_MICRO);
state.setProp(StressTestingSource.NUM_RECORDS_KEY, NUM_RECORDS);
StressTestingSource source = new StressTestingSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), NUM_WORK_UNITS);
WorkUnit wu = wus.get(0);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, byte[]> extractor = source.getExtractor(wuState);
byte[] record;
long startTimeNano = System.nanoTime();
while ((record = extractor.readRecord(null)) != null) {
Assert.assertEquals(record.length, 100);
}
long endTimeNano = System.nanoTime();
long timeSpentMicro = (endTimeNano - startTimeNano)/(1000);
// check that there is less than 5 second difference between expected and actual time spent
Assert.assertTrue(Math.abs(timeSpentMicro - (COMPUTE_TIME_MICRO * NUM_RECORDS)) < (5000000),
"Time spent " + timeSpentMicro);
}
@Test (enabled=false)
public void testSleepTime() throws DataRecordException, IOException {
final int MEM_ALLOC_BYTES = 100;
final int NUM_WORK_UNITS = 1;
final int SLEEP_TIME_MICRO = 10000;
final int NUM_RECORDS = 500;
SourceState state = new SourceState();
state.setProp(StressTestingSource.NUM_WORK_UNITS_KEY, NUM_WORK_UNITS);
state.setProp(StressTestingSource.MEM_ALLOC_BYTES_KEY, MEM_ALLOC_BYTES);
state.setProp(StressTestingSource.SLEEP_TIME_MICRO_KEY, SLEEP_TIME_MICRO);
state.setProp(StressTestingSource.NUM_RECORDS_KEY, NUM_RECORDS);
StressTestingSource source = new StressTestingSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), NUM_WORK_UNITS);
WorkUnit wu = wus.get(0);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, byte[]> extractor = source.getExtractor(wuState);
byte[] record;
long startTimeNano = System.nanoTime();
while ((record = extractor.readRecord(null)) != null) {
Assert.assertEquals(record.length, 100);
}
long endTimeNano = System.nanoTime();
long timeSpentMicro = (endTimeNano - startTimeNano)/(1000);
// check that there is less than 2 second difference between expected and actual time spent
Assert.assertTrue(Math.abs(timeSpentMicro - (SLEEP_TIME_MICRO * NUM_RECORDS)) < (2000000),
"Time spent " + timeSpentMicro);
}
@Test (enabled=false)
public void testRunDuration() throws DataRecordException, IOException {
final int MEM_ALLOC_BYTES = 100;
final int NUM_WORK_UNITS = 1;
final int SLEEP_TIME_MICRO = 1000;
final int NUM_RECORDS = 30; // this config is ignored since the duration is set
final int RUN_DURATION_SECS = 5;
SourceState state = new SourceState();
state.setProp(StressTestingSource.NUM_WORK_UNITS_KEY, NUM_WORK_UNITS);
state.setProp(StressTestingSource.MEM_ALLOC_BYTES_KEY, MEM_ALLOC_BYTES);
state.setProp(StressTestingSource.SLEEP_TIME_MICRO_KEY, SLEEP_TIME_MICRO);
state.setProp(StressTestingSource.NUM_RECORDS_KEY, NUM_RECORDS);
state.setProp(StressTestingSource.RUN_DURATION_KEY, RUN_DURATION_SECS);
StressTestingSource source = new StressTestingSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), NUM_WORK_UNITS);
WorkUnit wu = wus.get(0);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, byte[]> extractor = source.getExtractor(wuState);
byte[] record;
long startTimeNano = System.nanoTime();
while ((record = extractor.readRecord(null)) != null) {
Assert.assertEquals(record.length, 100);
}
long endTimeNano = System.nanoTime();
long timeSpentMicro = (endTimeNano - startTimeNano)/(1000);
// check that there is less than 1 second difference between expected and actual time spent
Assert.assertTrue(Math.abs(timeSpentMicro - (RUN_DURATION_SECS * 1000000)) < (1000000),
"Time spent " + timeSpentMicro);
}
@Test
public void testThrowException() throws DataRecordException, IOException {
final int MEM_ALLOC_BYTES = 100;
final int NUM_WORK_UNITS = 1;
final int SLEEP_TIME_MICRO = 1000;
final int NUM_RECORDS = 30; // this config is ignored since the duration is set
final int RUN_DURATION_SECS = 5;
SourceState state = new SourceState();
state.setProp(StressTestingSource.NUM_WORK_UNITS_KEY, NUM_WORK_UNITS);
state.setProp(StressTestingSource.MEM_ALLOC_BYTES_KEY, MEM_ALLOC_BYTES);
state.setProp(StressTestingSource.SLEEP_TIME_MICRO_KEY, SLEEP_TIME_MICRO);
state.setProp(StressTestingSource.NUM_RECORDS_KEY, NUM_RECORDS);
state.setProp(StressTestingSource.RUN_DURATION_KEY, RUN_DURATION_SECS);
state.setProp(StressTestingSource.THROW_EXCEPTION, true);
StressTestingSource source = new StressTestingSource();
List<WorkUnit> wus = source.getWorkunits(state);
Assert.assertEquals(wus.size(), NUM_WORK_UNITS);
WorkUnit wu = wus.get(0);
WorkUnitState wuState = new WorkUnitState(wu, state);
Extractor<String, byte[]> extractor = source.getExtractor(wuState);
Assert.expectThrows(IOException.class, () -> {
byte[] record;
while ((record = extractor.readRecord(null)) != null) {
Assert.assertEquals(record.length, 100);
}
});
}
}
| 4,051 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/MultiLimiterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
public class MultiLimiterTest {
@Test
public void test() throws Exception {
CountBasedLimiter countLimiter1 = new CountBasedLimiter(3);
CountBasedLimiter countLimiter2 = new CountBasedLimiter(1);
MultiLimiter multiLimiter = new MultiLimiter(countLimiter1, countLimiter2);
// Can only take 1 permit (limiter2 has only 1 permit available)
Assert.assertNotNull(multiLimiter.acquirePermits(1));
Assert.assertNull(multiLimiter.acquirePermits(1));
// limiter1 has 1 leftover permit (one consumed in the failed second permit above)
Assert.assertNotNull(countLimiter1.acquirePermits(1));
Assert.assertNull(countLimiter1.acquirePermits(1));
// limiter2 has not leftover permits
Assert.assertNull(countLimiter2.acquirePermits(1));
}
public void testConstructor() throws Exception {
CountBasedLimiter countLimiter1 = new CountBasedLimiter(3);
CountBasedLimiter countLimiter2 = new CountBasedLimiter(3);
CountBasedLimiter countLimiter3 = new CountBasedLimiter(3);
NoopLimiter noopLimiter = new NoopLimiter();
MultiLimiter multiLimiter1 = new MultiLimiter(countLimiter1, countLimiter2);
Assert.assertEquals(multiLimiter1.getUnderlyingLimiters(), Lists.newArrayList(countLimiter1, countLimiter2));
// Noop limiters get filtered
MultiLimiter multiLimiter2 = new MultiLimiter(countLimiter1, noopLimiter);
Assert.assertEquals(multiLimiter2.getUnderlyingLimiters(), Lists.newArrayList(countLimiter1));
// multilimiters get expanded
MultiLimiter multiLimiter3 = new MultiLimiter(multiLimiter1, countLimiter3);
Assert.assertEquals(multiLimiter3.getUnderlyingLimiters(), Lists.newArrayList(countLimiter1, countLimiter2, countLimiter3));
// deduplication
MultiLimiter multiLimiter4 = new MultiLimiter(countLimiter1, countLimiter1);
Assert.assertEquals(multiLimiter4.getUnderlyingLimiters(), Lists.newArrayList(countLimiter1));
// deduplication on expanded multilimiters
MultiLimiter multiLimiter5 = new MultiLimiter(multiLimiter1, countLimiter1);
Assert.assertEquals(multiLimiter5.getUnderlyingLimiters(), Lists.newArrayList(countLimiter1, countLimiter2));
}
} | 4,052 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/RateBasedLimiterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.Meter;
import com.google.common.math.DoubleMath;
/**
* Unit tests for {@link RateBasedLimiter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util.limiter" })
public class RateBasedLimiterTest {
private static final Random RANDOM = new Random();
private Limiter limiter;
@BeforeClass
public void setUp() {
this.limiter = new RateBasedLimiter(20, TimeUnit.SECONDS);
this.limiter.start();
}
@Test
public void testThrottling() throws InterruptedException {
Meter meter = new Meter();
for (int i = 0; i < 100; i++) {
Assert.assertTrue(this.limiter.acquirePermits(1) != null);
meter.mark();
Thread.sleep((RANDOM.nextInt() & Integer.MAX_VALUE) % 10);
}
// Assert a fuzzy equal with 5% of tolerance
Assert.assertTrue(DoubleMath.fuzzyEquals(meter.getMeanRate(), 20d, 20d * 0.05));
}
@AfterClass
public void tearDown() {
this.limiter.stop();
}
}
| 4,053 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/CountBasedLimiterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link CountBasedLimiter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util.limiter" })
public class CountBasedLimiterTest {
@Test
public void testThrottling() throws InterruptedException {
Limiter limiter = new CountBasedLimiter(10);
limiter.start();
for (int i = 0; i < 10; i++) {
Assert.assertTrue(limiter.acquirePermits(1) != null);
}
Assert.assertTrue(limiter.acquirePermits(1) == null);
limiter.stop();
}
}
| 4,054 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/TimeBasedLimiterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Unit tests for {@link TimeBasedLimiter}.
*
* @author Yinan Li
*/
@Test(groups = { "gobblin.util.limiter" })
public class TimeBasedLimiterTest {
private Limiter limiter;
@BeforeClass
public void setUp() {
this.limiter = new TimeBasedLimiter(3l, TimeUnit.SECONDS);
this.limiter.start();
}
@Test
public void testThrottling() throws InterruptedException {
Assert.assertTrue(this.limiter.acquirePermits(1) != null);
Thread.sleep(1000);
Assert.assertTrue(this.limiter.acquirePermits(1) != null);
Thread.sleep(1000);
Assert.assertTrue(this.limiter.acquirePermits(1) != null);
Thread.sleep(1100);
Assert.assertTrue(this.limiter.acquirePermits(1) == null);
}
@AfterClass
public void tearDown() {
this.limiter.stop();
}
}
| 4,055 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/DefaultLimiterFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
/**
* Unit tests for {@link DefaultLimiterFactory}.
*/
@Test(groups = {"gobblin.util.limiter"})
public class DefaultLimiterFactoryTest {
@Test
public void testNewLimiter() {
State stateWithRateLimitDeprecatedKeys = new State();
stateWithRateLimitDeprecatedKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_TYPE_KEY, BaseLimiterType.RATE_BASED);
stateWithRateLimitDeprecatedKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY_DEP, "10");
stateWithRateLimitDeprecatedKeys
.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY_DEP, TimeUnit.MINUTES);
Limiter rateLimiterFromDeprecatedKeys = DefaultLimiterFactory.newLimiter(stateWithRateLimitDeprecatedKeys);
Assert.assertTrue(rateLimiterFromDeprecatedKeys instanceof RateBasedLimiter);
Assert.assertTrue(stateWithRateLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY));
Assert
.assertFalse(stateWithRateLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY_DEP));
Assert.assertTrue(
stateWithRateLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY));
Assert.assertFalse(
stateWithRateLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY_DEP));
State stateWithTimeLimitDeprecatedKeys = new State();
stateWithTimeLimitDeprecatedKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_TYPE_KEY, BaseLimiterType.TIME_BASED);
stateWithTimeLimitDeprecatedKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_KEY_DEP, "10");
stateWithTimeLimitDeprecatedKeys
.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_TIMEUNIT_KEY_DEP, TimeUnit.MINUTES);
Limiter timeLimiterFromDeprecatedKeys = DefaultLimiterFactory.newLimiter(stateWithTimeLimitDeprecatedKeys);
Assert.assertTrue(timeLimiterFromDeprecatedKeys instanceof TimeBasedLimiter);
Assert.assertTrue(stateWithTimeLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_KEY));
Assert
.assertFalse(stateWithTimeLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_KEY_DEP));
Assert.assertTrue(
stateWithTimeLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_TIMEUNIT_KEY));
Assert.assertFalse(
stateWithTimeLimitDeprecatedKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_TIME_LIMIT_TIMEUNIT_KEY_DEP));
State stateWithNewKeys = new State();
stateWithNewKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_TYPE_KEY, BaseLimiterType.RATE_BASED);
stateWithNewKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY_DEP, "10");
stateWithNewKeys.setProp(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY_DEP, TimeUnit.MINUTES);
Limiter rateLimiterFromNewKeys = DefaultLimiterFactory.newLimiter(stateWithNewKeys);
Assert.assertTrue(rateLimiterFromNewKeys instanceof RateBasedLimiter);
Assert.assertTrue(stateWithNewKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY));
Assert.assertFalse(stateWithNewKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_KEY_DEP));
Assert.assertTrue(stateWithNewKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY));
Assert.assertFalse(stateWithNewKeys.contains(DefaultLimiterFactory.EXTRACT_LIMIT_RATE_LIMIT_TIMEUNIT_KEY_DEP));
}
}
| 4,056 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/limiter/broker/SharedLimiterFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.limiter.broker;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.BrokerConstants;
import org.apache.gobblin.broker.ResourceInstance;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerImpl;
import org.apache.gobblin.broker.SimpleScope;
import org.apache.gobblin.broker.SimpleScopeType;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.util.limiter.CountBasedLimiter;
import org.apache.gobblin.util.limiter.Limiter;
import org.apache.gobblin.util.limiter.MultiLimiter;
import org.apache.gobblin.util.limiter.NoopLimiter;
public class SharedLimiterFactoryTest {
public static final Joiner JOINER = Joiner.on(".");
@Test
public void testEmptyConfig() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker =
getBrokerForConfigMap(ImmutableMap.<String, String>of());
SharedLimiterFactory<SimpleScopeType> factory = new SharedLimiterFactory<>();
Assert.assertEquals(
factory.getAutoScope(broker, broker.getConfigView(SimpleScopeType.LOCAL, new SharedLimiterKey("resource"), factory.getName())),
SimpleScopeType.GLOBAL);
Limiter limiter = ((ResourceInstance<Limiter>) factory.createResource(broker,
broker.getConfigView(SimpleScopeType.GLOBAL, new SharedLimiterKey("resource"), factory.getName()))).getResource();
Assert.assertTrue(limiter instanceof NoopLimiter);
}
@Test
public void testCountLimiter() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker =
getBrokerForConfigMap(ImmutableMap.of(
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, SharedLimiterFactory.LIMITER_CLASS_KEY), "CountBasedLimiter",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, CountBasedLimiter.Factory.COUNT_KEY), "10"
));
SharedLimiterFactory<SimpleScopeType> factory = new SharedLimiterFactory<>();
Assert.assertEquals(
factory.getAutoScope(broker, broker.getConfigView(SimpleScopeType.LOCAL, new SharedLimiterKey("resource"), factory.getName())),
SimpleScopeType.GLOBAL);
Limiter limiter = ((ResourceInstance<Limiter>) factory.createResource(broker,
broker.getConfigView(SimpleScopeType.GLOBAL, new SharedLimiterKey("resource"), factory.getName()))).getResource();
Assert.assertTrue(limiter instanceof CountBasedLimiter);
Assert.assertEquals(((CountBasedLimiter) limiter).getCountLimit(), 10);
}
@Test
public void testMultiLevelLimiter() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker =
getBrokerForConfigMap(ImmutableMap.of(
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, SimpleScopeType.GLOBAL, SharedLimiterFactory.LIMITER_CLASS_KEY), "CountBasedLimiter",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, SimpleScopeType.GLOBAL, CountBasedLimiter.Factory.COUNT_KEY), "10",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, SimpleScopeType.LOCAL, SharedLimiterFactory.LIMITER_CLASS_KEY), "CountBasedLimiter",
JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, SharedLimiterFactory.NAME, SimpleScopeType.LOCAL, CountBasedLimiter.Factory.COUNT_KEY), "5"
));
SharedResourcesBroker<SimpleScopeType> localBroker1 =
broker.newSubscopedBuilder(new SimpleScope<>(SimpleScopeType.LOCAL, "local1")).build();
SharedResourcesBroker<SimpleScopeType> localBroker2 =
broker.newSubscopedBuilder(new SimpleScope<>(SimpleScopeType.LOCAL, "local2")).build();
SharedLimiterFactory<SimpleScopeType> factory = new SharedLimiterFactory<>();
Limiter limiter1 = ((ResourceInstance<Limiter>) factory.createResource(localBroker1,
broker.getConfigView(SimpleScopeType.LOCAL, new SharedLimiterKey("resource"), factory.getName()))).getResource();
Limiter limiter2 = ((ResourceInstance<Limiter>) factory.createResource(localBroker2,
broker.getConfigView(SimpleScopeType.LOCAL, new SharedLimiterKey("resource"), factory.getName()))).getResource();
Assert.assertTrue(limiter1 instanceof MultiLimiter);
Assert.assertTrue(limiter2 instanceof MultiLimiter);
Assert.assertEquals(((CountBasedLimiter)((MultiLimiter) limiter1).getUnderlyingLimiters().get(0)).getCountLimit(), 5);
Assert.assertEquals(((CountBasedLimiter)((MultiLimiter) limiter1).getUnderlyingLimiters().get(1)).getCountLimit(), 10);
Assert.assertEquals(((CountBasedLimiter)((MultiLimiter) limiter2).getUnderlyingLimiters().get(0)).getCountLimit(), 5);
Assert.assertEquals(((CountBasedLimiter)((MultiLimiter) limiter2).getUnderlyingLimiters().get(1)).getCountLimit(), 10);
Assert.assertNotEquals(((MultiLimiter) limiter1).getUnderlyingLimiters().get(0), ((MultiLimiter) limiter2).getUnderlyingLimiters().get(0));
Assert.assertEquals(((MultiLimiter) limiter1).getUnderlyingLimiters().get(1), ((MultiLimiter) limiter2).getUnderlyingLimiters().get(1));
}
private SharedResourcesBrokerImpl<SimpleScopeType> getBrokerForConfigMap(Map<String, String> configMap) {
Config config = ConfigFactory.parseMap(configMap);
return
SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(config, SimpleScopeType.GLOBAL.defaultScopeInstance());
}
}
| 4,057 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/callbacks/TestCallbacksDispatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.callbacks;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Function;
import lombok.AllArgsConstructor;
/**
* Unit tests for {@link CallbacksDispatcher}
*/
public class TestCallbacksDispatcher {
@Test
public void testHappyPath() throws InterruptedException, ExecutionException {
final Logger log =
LoggerFactory.getLogger(TestCallbacksDispatcher.class.getSimpleName() +
".testHappyPath");
CallbacksDispatcher<MyListener> disp1 = new CallbacksDispatcher<>(log);
runHappyPath(disp1);
CallbacksDispatcher<MyListener> disp2 =
new CallbacksDispatcher<>(Executors.newFixedThreadPool(2), log);
runHappyPath(disp2);
}
@Test
public void testErrorCallback() throws InterruptedException, ExecutionException {
final Logger log =
LoggerFactory.getLogger(TestCallbacksDispatcher.class.getSimpleName() +
".testErrorCallback");
CallbacksDispatcher<MyListener> disp1 = new CallbacksDispatcher<>(log);
MyListener l1 = Mockito.mock(MyListener.class);
MyListener l2 = Mockito.mock(MyListener.class);
MyListener l3 = Mockito.mock(MyListener.class);
Mockito.doThrow(new RuntimeException("injected error")).when(l2).voidCallback();
disp1.addListener(l1);
disp1.addListener(l2);
disp1.addListener(l3);
final VoidCallback voidCallback = new VoidCallback();
CallbacksDispatcher.CallbackResults<MyListener, Void> voidRes =
disp1.execCallbacks(voidCallback);
Assert.assertEquals(voidRes.getSuccesses().size(), 2);
Assert.assertEquals(voidRes.getFailures().size(), 1);
Assert.assertEquals(voidRes.getCancellations().size(), 0);
Assert.assertTrue(voidRes.getSuccesses().get(l1).isSuccessful());
Assert.assertTrue(voidRes.getSuccesses().get(l3).isSuccessful());
Assert.assertTrue(voidRes.getFailures().get(l2).hasFailed());
}
private void runHappyPath(CallbacksDispatcher<MyListener> disp) throws InterruptedException, ExecutionException {
MyListener l1 = Mockito.mock(MyListener.class);
MyListener l2 = Mockito.mock(MyListener.class);
Mockito.when(l1.booleanCallback(Mockito.eq(1))).thenReturn(true);
Mockito.when(l1.booleanCallback(Mockito.eq(2))).thenReturn(false);
Mockito.when(l2.booleanCallback(Mockito.eq(1))).thenReturn(false);
Mockito.when(l2.booleanCallback(Mockito.eq(2))).thenReturn(true);
final VoidCallback voidCallback = new VoidCallback();
final BoolCallback boolCallback1 = new BoolCallback(1);
final BoolCallback boolCallback2 = new BoolCallback(2);
Assert.assertEquals(disp.getListeners().size(), 0);
CallbacksDispatcher.CallbackResults<MyListener, Void> voidRes = disp.execCallbacks(voidCallback);
Assert.assertEquals(voidRes.getSuccesses().size(), 0);
Assert.assertEquals(voidRes.getFailures().size(), 0);
Assert.assertEquals(voidRes.getCancellations().size(), 0);
disp.addListener(l1);
Assert.assertEquals(disp.getListeners().size(), 1);
voidRes = disp.execCallbacks(voidCallback);
Assert.assertEquals(voidRes.getSuccesses().size(), 1);
Assert.assertEquals(voidRes.getFailures().size(), 0);
Assert.assertEquals(voidRes.getCancellations().size(), 0);
disp.addListener(l2);
Assert.assertEquals(disp.getListeners().size(), 2);
CallbacksDispatcher.CallbackResults<MyListener, Boolean> boolRes = disp.execCallbacks(boolCallback1);
Assert.assertEquals(boolRes.getSuccesses().size(), 2);
Assert.assertEquals(boolRes.getFailures().size(), 0);
Assert.assertEquals(boolRes.getCancellations().size(), 0);
Assert.assertTrue(boolRes.getSuccesses().get(l1).getResult());
Assert.assertFalse(boolRes.getSuccesses().get(l2).getResult());
disp.removeListener(l1);
Assert.assertEquals(disp.getListeners().size(), 1);
boolRes = disp.execCallbacks(boolCallback2);
Assert.assertEquals(boolRes.getSuccesses().size(), 1);
Assert.assertEquals(boolRes.getFailures().size(), 0);
Assert.assertEquals(boolRes.getCancellations().size(), 0);
Assert.assertTrue(boolRes.getSuccesses().get(l2).getResult());
disp.removeListener(l2);
Assert.assertEquals(disp.getListeners().size(), 0);
boolRes = disp.execCallbacks(boolCallback2);
Assert.assertEquals(boolRes.getSuccesses().size(), 0);
Assert.assertEquals(boolRes.getFailures().size(), 0);
Assert.assertEquals(boolRes.getCancellations().size(), 0);
Mockito.verify(l1).voidCallback();
Mockito.verify(l1).booleanCallback(Mockito.eq(1));
Mockito.verify(l2).booleanCallback(Mockito.eq(1));
Mockito.verify(l2).booleanCallback(Mockito.eq(2));
}
private static interface MyListener {
void voidCallback();
boolean booleanCallback(int param);
}
private static class VoidCallback implements Function<MyListener, Void> {
@Override public Void apply(MyListener input) {
input.voidCallback();
return null;
}
}
@AllArgsConstructor
private static class BoolCallback implements Function<MyListener, Boolean> {
private final int param;
@Override public Boolean apply(MyListener input) {
return input.booleanCallback(param);
}
}
}
| 4,058 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/dataset/DatasetUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.dataset;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.State;
/**
* Tests for {@link DatasetUtils}.
*/
@Test(groups = { "gobblin.util.dataset" })
public class DatasetUtilsTest {
@Test
public void testGetDatasetSpecificState() {
String dataset1 = "testDataset1";
String dataset2 = "testDataset2";
String dataset3 = "testDataset3";
String testKey1 = "testKey1";
String testValue1 = "testValue1";
SourceState state = new SourceState();
state.setProp(DatasetUtils.DATASET_SPECIFIC_PROPS, "[{\"dataset\" : \"" + dataset1 + "\", \"" + testKey1 + "\" : \""
+ testValue1 + "\"}, {\"dataset\" : \"" + dataset2 + "\", \"" + testKey1 + "\" : \"" + testValue1 + "\"}]");
Map<String, State> datasetSpecificStateMap =
DatasetUtils.getDatasetSpecificProps(Lists.newArrayList(dataset1, dataset3), state);
State dataset1ExpectedState = new State();
dataset1ExpectedState.setProp(testKey1, testValue1);
Assert.assertEquals(datasetSpecificStateMap.get(dataset1), dataset1ExpectedState);
Assert.assertNull(datasetSpecificStateMap.get(dataset2));
Assert.assertNull(datasetSpecificStateMap.get(dataset3));
}
@Test
public void testGetDatasetSpecificStateWithRegex() {
String dataset1 = "testDataset1";
String dataset2 = "testDataset2";
String dataset3 = "otherTestDataset1";
String testKey1 = "testKey1";
String testValue1 = "testValue1";
SourceState state = new SourceState();
state.setProp(DatasetUtils.DATASET_SPECIFIC_PROPS,
"[{\"dataset\" : \"testDataset.*\", \"" + testKey1 + "\" : \"" + testValue1 + "\"}]");
Map<String, State> datasetSpecificStateMap =
DatasetUtils.getDatasetSpecificProps(Lists.newArrayList(dataset1, dataset2, dataset3), state);
State dataset1ExpectedState = new State();
dataset1ExpectedState.setProp(testKey1, testValue1);
State dataset2ExpectedState = new State();
dataset2ExpectedState.setProp(testKey1, testValue1);
Assert.assertEquals(datasetSpecificStateMap.get(dataset1), dataset1ExpectedState);
Assert.assertEquals(datasetSpecificStateMap.get(dataset2), dataset2ExpectedState);
Assert.assertNull(datasetSpecificStateMap.get(dataset3));
}
}
| 4,059 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/io/ThrottledInputStreamTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.io;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.commons.io.IOUtils;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Charsets;
import org.apache.gobblin.util.limiter.CountBasedLimiter;
import org.apache.gobblin.util.limiter.Limiter;
public class ThrottledInputStreamTest {
@Test
public void test() throws Exception {
ByteArrayInputStream inputStream = new ByteArrayInputStream("abcde".getBytes(Charsets.UTF_8));
MeteredInputStream meteredInputStream = MeteredInputStream.builder().in(inputStream).updateFrequency(1).build();
Limiter limiter = new CountBasedLimiter(4);
InputStream throttled = new ThrottledInputStream(meteredInputStream, limiter, meteredInputStream);
try {
String output = IOUtils.toString(throttled, Charsets.UTF_8);
Assert.fail();
} catch (RuntimeException re) {
// Expected
}
meteredInputStream.reset();
limiter = new CountBasedLimiter(5);
throttled = new ThrottledInputStream(meteredInputStream, limiter, meteredInputStream);
Assert.assertEquals(IOUtils.toString(throttled, Charsets.UTF_8), "abcde");
}
}
| 4,060 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/io/MeteredOutputStreamTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.io;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.codahale.metrics.Meter;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
public class MeteredOutputStreamTest {
@Test
public void test() throws Exception {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Meter meter = new Meter();
MeteredOutputStream mos = MeteredOutputStream.builder().out(outputStream).meter(meter).updateFrequency(1).build();
MyOutputStream duplicated = new MyOutputStream(mos);
DataOutputStream dos = new DataOutputStream(duplicated);
dos.write("abcde".getBytes(Charsets.UTF_8));
Assert.assertEquals(outputStream.toString(Charsets.UTF_8.name()), "aabbccddee");
Optional<MeteredOutputStream> meteredOutputStream = MeteredOutputStream.findWrappedMeteredOutputStream(dos);
Assert.assertEquals(meteredOutputStream.get(), mos);
Assert.assertEquals(meteredOutputStream.get().getBytesProcessedMeter().getCount(), 10);
}
/**
* An {@link OutputStream} that duplicates every byte.
*/
private static class MyOutputStream extends FilterOutputStream {
public MyOutputStream(OutputStream out) {
super(out);
}
@Override
public void write(int b) throws IOException {
this.out.write(b);
this.out.write(b);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
for (int i = 0; i < len; i++) {
write(b[off + i]);
}
}
}
}
| 4,061 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/io/StreamUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.io;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.isIn;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import com.google.common.collect.Sets;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.testng.Assert;
import org.testng.annotations.Test;
@Test
public class StreamUtilsTest {
@Test
public void testTarDir()
throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
// Set of expected Paths to be in the resulting tar file
Set<Path> expectedPaths = Sets.newHashSet();
// Create input directory
Path testInDir = new Path("testDir");
expectedPaths.add(testInDir);
// Create output file path
Path testOutFile = new Path("testTarOut" + UUID.randomUUID() + ".tar.gz");
try {
localFs.mkdirs(testInDir);
// Create a test file path
Path testFile1 = new Path(testInDir, "testFile1");
expectedPaths.add(testFile1);
FSDataOutputStream testFileOut1 = localFs.create(testFile1);
testFileOut1.close();
// Create a test file path
Path testFile2 = new Path(testInDir, "testFile2");
expectedPaths.add(testFile2);
FSDataOutputStream testFileOut2 = localFs.create(testFile2);
testFileOut2.close();
// tar the input directory to the specific output file
StreamUtils.tar(localFs, testInDir, testOutFile);
// Confirm the contents of the tar file are valid
try (TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(
new GzipCompressorInputStream(localFs.open(testOutFile)))) {
TarArchiveEntry tarArchiveEntry;
while (null != (tarArchiveEntry = tarArchiveInputStream.getNextTarEntry())) {
assertThat(new Path(tarArchiveEntry.getName()), isIn(expectedPaths));
}
}
} finally {
if (localFs.exists(testInDir)) {
localFs.delete(testInDir, true);
}
if (localFs.exists(testOutFile)) {
localFs.delete(testOutFile, true);
}
}
}
@Test
public void testTarFile()
throws IOException {
FileSystem localFs = FileSystem.getLocal(new Configuration());
// Set of expected Paths to be in the resulting tar file
Set<Path> expectedPaths = Sets.newHashSet();
// Create input file path
Path testFile = new Path("testFile");
expectedPaths.add(testFile);
// Create output file path
Path testOutFile = new Path("testTarOut" + UUID.randomUUID() + ".tar.gz");
try {
// Create the input file
FSDataOutputStream testFileOut1 = localFs.create(testFile);
testFileOut1.close();
// tar the input file to the specific output file
StreamUtils.tar(localFs, testFile, testOutFile);
// Confirm the contents of the tar file are valid
try (TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(
new GzipCompressorInputStream(localFs.open(testOutFile)))) {
TarArchiveEntry tarArchiveEntry;
while (null != (tarArchiveEntry = tarArchiveInputStream.getNextTarEntry())) {
MatcherAssert.assertThat(new Path(tarArchiveEntry.getName()), Matchers.isIn(expectedPaths));
}
}
} finally {
if (localFs.exists(testFile)) {
localFs.delete(testFile, true);
}
if (localFs.exists(testOutFile)) {
localFs.delete(testOutFile, true);
}
}
}
@Test
public void testRegularByteBufferToStream() throws IOException {
final int BUF_LEN = 128000;
Random random = new Random();
byte[] srcBytes = new byte[BUF_LEN];
random.nextBytes(srcBytes);
// allocate a larger size than we actually use to make sure we don't copy off the end of the used portion of the
// buffer
ByteBuffer buffer = ByteBuffer.allocate(BUF_LEN * 2);
verifyBuffer(srcBytes, buffer);
// now try with direct buffers; they aren't array backed so codepath is different
buffer = ByteBuffer.allocateDirect(BUF_LEN * 2);
verifyBuffer(srcBytes, buffer);
}
private void verifyBuffer(byte[] srcBytes, ByteBuffer buffer) throws IOException {
buffer.put(srcBytes);
buffer.flip();
ByteArrayOutputStream bOs = new ByteArrayOutputStream();
StreamUtils.byteBufferToOutputStream(buffer, bOs);
Assert.assertEquals(bOs.toByteArray(), srcBytes);
bOs = new ByteArrayOutputStream();
buffer.rewind();
// consume one character from the buf; make sure it is not included in the output by
// byteBufferToOutputStream
buffer.getChar();
StreamUtils.byteBufferToOutputStream(buffer, bOs);
byte[] offByTwo = bOs.toByteArray();
Assert.assertEquals(offByTwo.length, srcBytes.length - 2);
for (int i = 0; i < offByTwo.length; i++) {
Assert.assertEquals(offByTwo[i], srcBytes[i+2]);
}
}
}
| 4,062 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/io/MeteredInputStreamTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.io.IOUtils;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.codahale.metrics.Meter;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
public class MeteredInputStreamTest {
@Test
public void test() throws Exception {
InputStream is = new ByteArrayInputStream("aabbccddee".getBytes(Charsets.UTF_8));
Meter meter = new Meter();
MeteredInputStream mis = MeteredInputStream.builder().in(is).meter(meter).updateFrequency(1).build();
InputStream skipped = new MyInputStream(mis);
DataInputStream dis = new DataInputStream(skipped);
ByteArrayOutputStream os = new ByteArrayOutputStream();
IOUtils.copy(dis, os);
String output = os.toString(Charsets.UTF_8.name());
Assert.assertEquals(output, "abcde");
Optional<MeteredInputStream> meteredOpt = MeteredInputStream.findWrappedMeteredInputStream(dis);
Assert.assertEquals(meteredOpt.get(), mis);
Assert.assertEquals(meteredOpt.get().getBytesProcessedMeter().getCount(), 10);
}
/**
* An input stream that skips every second byte
*/
public static class MyInputStream extends FilterInputStream {
public MyInputStream(InputStream in) {
super(in);
}
@Override
public int read() throws IOException {
int bte = super.read();
if (bte == -1) {
return bte;
}
return super.read();
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
for (int i = 0; i < len; i++) {
byte bte = (byte) read();
if (bte == -1) {
return i == 0 ? -1 : i;
}
b[off + i] = bte;
}
return len;
}
}
}
| 4,063 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/io/StreamCopierTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.io;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Charsets;
public class StreamCopierTest {
@Test
public void testSimpleCopy() throws Exception {
String testString = "This is a string";
ByteArrayInputStream inputStream = new ByteArrayInputStream(testString.getBytes(Charsets.UTF_8));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new StreamCopier(inputStream, outputStream).copy();
Assert.assertEquals(testString, new String(outputStream.toByteArray(), Charsets.UTF_8));
}
@Test
public void testLongCopy() throws Exception {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < 1000; i++) {
builder.append("testString");
}
String testString = builder.toString();
ByteArrayInputStream inputStream = new ByteArrayInputStream(testString.getBytes(Charsets.UTF_8));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new StreamCopier(inputStream, outputStream).withBufferSize(100).copy();
Assert.assertEquals(testString, new String(outputStream.toByteArray(), Charsets.UTF_8));
}
@Test
public void testBlockCopy() throws Exception {
String testString = "This is a string";
ByteArrayInputStream inputStream = new ByteArrayInputStream(testString.getBytes(Charsets.UTF_8));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
long maxBytes = 4L;
new StreamCopier(inputStream, outputStream, maxBytes).copy();
Assert.assertEquals(new String(outputStream.toByteArray(), Charsets.UTF_8),
testString.substring(0, (int) maxBytes));
}
@Test
public void testCopyMeter() throws Exception {
String testString = "This is a string";
Meter meter = new MetricRegistry().meter("my.meter");
ByteArrayInputStream inputStream = new ByteArrayInputStream(testString.getBytes(Charsets.UTF_8));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
new StreamCopier(inputStream, outputStream).withCopySpeedMeter(meter).copy();
Assert.assertEquals(testString, new String(outputStream.toByteArray(), Charsets.UTF_8));
Assert.assertEquals(meter.getCount(), testString.length());
}
}
| 4,064 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/binpacking/WorstFitDecreasingBinPackingTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.binpacking;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import org.apache.gobblin.source.workunit.Extract;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.source.workunit.WorkUnitWeighter;
public class WorstFitDecreasingBinPackingTest {
public static final String WEIGHT = "weight";
WorkUnitWeighter weighter = new FieldWeighter(WEIGHT);
@Test
public void testBasicPacking() throws Exception {
List<WorkUnit> workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(10));
List<WorkUnit> multiWorkUnits = new WorstFitDecreasingBinPacking(20).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 2);
workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(20));
multiWorkUnits = new WorstFitDecreasingBinPacking(20).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 2);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(1)).getWorkUnits().size(), 1);
workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(20));
multiWorkUnits = new WorstFitDecreasingBinPacking(20).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 2);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(1)).getWorkUnits().size(), 2);
}
@Test
public void testLargeWorkUnits() throws Exception {
// Accept even large work units that don't fit in a single bucket
List<WorkUnit> workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(30));
List<WorkUnit> multiWorkUnits = new WorstFitDecreasingBinPacking(20).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 2);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(1)).getWorkUnits().size(), 1);
}
@Test
public void testOneLargeUnitManySmallUnits() throws Exception {
// Check that a large work unit doesn't prevent small work units from being packed together
// (this was an issue in a previous implementation of the algorithm)
List<WorkUnit> workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(10),
getWorkUnitWithWeight(10000));
List<WorkUnit> multiWorkUnits = new WorstFitDecreasingBinPacking(50).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 2);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 3);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(1)).getWorkUnits().size(), 1);
}
@Test
public void testMaxSizeZero() throws Exception {
// If maxSize is 0, one work unit per bin
List<WorkUnit> workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(1),
getWorkUnitWithWeight(1));
List<WorkUnit> multiWorkUnits = new WorstFitDecreasingBinPacking(0).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 2);
Assert.assertEquals(workUnitList, multiWorkUnits);
}
@Test
public void testOverflows() throws Exception {
// Test overflows
List<WorkUnit> workUnitList = Lists.newArrayList(
getWorkUnitWithWeight(Long.MAX_VALUE),
getWorkUnitWithWeight(Long.MAX_VALUE),
getWorkUnitWithWeight(10));
List<WorkUnit> multiWorkUnits = new WorstFitDecreasingBinPacking(100).pack(workUnitList, weighter);
Assert.assertEquals(multiWorkUnits.size(), 3);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(0)).getWorkUnits().size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(1)).getWorkUnits().size(), 1);
Assert.assertEquals(((MultiWorkUnit) multiWorkUnits.get(2)).getWorkUnits().size(), 1);
}
public WorkUnit getWorkUnitWithWeight(long weight) {
WorkUnit workUnit = new WorkUnit(new Extract(Extract.TableType.APPEND_ONLY, "", ""));
workUnit.setProp(WEIGHT, Long.toString(weight));
return workUnit;
}
}
| 4,065 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/filesystem/ThrottledFileSystemTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.filesystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.util.limiter.CountBasedLimiter;
import org.apache.gobblin.util.limiter.Limiter;
import org.apache.gobblin.util.limiter.NotEnoughPermitsException;
public class ThrottledFileSystemTest {
@Test
public void testSimpleCalls() throws Exception {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.getFileStatus(Mockito.any(Path.class))).thenReturn(new FileStatus(0, false, 0, 0, 0, new Path("/")));
Limiter limiter = new CountBasedLimiter(2);
ThrottledFileSystem throttledFileSystem = new ThrottledFileSystem(fs, limiter, "testService");
Assert.assertNotNull(throttledFileSystem.getFileStatus(new Path("/myFile")));
Assert.assertNotNull(throttledFileSystem.getFileStatus(new Path("/myFile")));
try {
throttledFileSystem.getFileStatus(new Path("/myFile"));
Assert.fail();
} catch (NotEnoughPermitsException expected) {
// Expected
}
}
@Test
public void testListing() throws Exception {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.listStatus(Mockito.any(Path.class))).thenAnswer(new Answer<FileStatus[]>() {
@Override
public FileStatus[] answer(InvocationOnMock invocation) throws Throwable {
Path path = (Path) invocation.getArguments()[0];
int files = Integer.parseInt(path.getName());
FileStatus status = new FileStatus(0, false, 0, 0, 0, new Path("/"));
FileStatus[] out = new FileStatus[files];
for (int i = 0; i < files; i++) {
out[i] = status;
}
return out;
}
});
Limiter limiter = new CountBasedLimiter(5);
ThrottledFileSystem throttledFileSystem = new ThrottledFileSystem(fs, limiter, "testService");
Assert.assertEquals(throttledFileSystem.getServiceName(), "testService");
Assert.assertNotNull(throttledFileSystem.listStatus(new Path("/files/99"))); // use 1 permit
Assert.assertNotNull(throttledFileSystem.listStatus(new Path("/files/250"))); // use 3 permits
try {
throttledFileSystem.listStatus(new Path("/files/150")); // requires 2 permits
Assert.fail();
} catch (NotEnoughPermitsException expected) {
// Expected
}
Assert.assertNotNull(throttledFileSystem.listStatus(new Path("/files/99"))); // requires 1 permit
}
}
| 4,066 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/filesystem/FileSystemFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.filesystem;
import java.net.URI;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerImpl;
import org.apache.gobblin.broker.SimpleScopeType;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.util.DecoratorUtils;
public class FileSystemFactoryTest {
@Test
public void test() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker = SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(
ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
FileSystemKey key = new FileSystemKey(new URI("file:///"), new Configuration());
FileSystemFactory<SimpleScopeType> factory = new FileSystemFactory<>();
FileSystem fs = broker.getSharedResource(factory, key);
verifyInstrumentedOnce(fs);
SharedResourcesBroker<SimpleScopeType> subBroker =
broker.newSubscopedBuilder(SimpleScopeType.LOCAL.defaultScopeInstance()).build();
FileSystem subBrokerFs = FileSystemFactory.get(new URI("file:///"), new Configuration(), subBroker);
Assert.assertEquals(fs, subBrokerFs);
}
@Test
public void testCreationWithInstrumentedScheme() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker = SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(
ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
FileSystemKey key = new FileSystemKey(new URI("instrumented-file:///"), new Configuration());
FileSystemFactory<SimpleScopeType> factory = new FileSystemFactory<>();
FileSystem fs = broker.getSharedResource(factory, key);
verifyInstrumentedOnce(fs);
Assert.assertTrue(DecoratorUtils.resolveUnderlyingObject(fs) instanceof LocalFileSystem);
}
@Test
public void testCreationWithConfigurationFSImpl() throws Exception {
SharedResourcesBrokerImpl<SimpleScopeType> broker = SharedResourcesBrokerFactory.<SimpleScopeType>createDefaultTopLevelBroker(
ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
Configuration conf = new Configuration();
conf.set("fs.local.impl", InstrumentedLocalFileSystem.class.getName());
FileSystemKey key = new FileSystemKey(new URI("file:///"), new Configuration());
FileSystemFactory<SimpleScopeType> factory = new FileSystemFactory<>();
FileSystem fs = broker.getSharedResource(factory, key);
verifyInstrumentedOnce(fs);
Assert.assertTrue(DecoratorUtils.resolveUnderlyingObject(fs) instanceof LocalFileSystem);
}
private void verifyInstrumentedOnce(FileSystem fs) {
List<Object> list = DecoratorUtils.getDecoratorLineage(fs);
boolean foundThrottledFs = false;
for (Object obj : list) {
if (obj instanceof ThrottledFileSystem) {
if (foundThrottledFs) {
Assert.fail("Object instrumented twice.");
}
foundThrottledFs = true;
}
}
Assert.assertTrue(foundThrottledFs, "Object not instrumented.");
}
}
| 4,067 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/filesystem/InstrumentedLocalFileSystemTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.filesystem;
import java.io.File;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.io.Files;
import org.apache.gobblin.util.DecoratorUtils;
public class InstrumentedLocalFileSystemTest {
@Test
public void testFromInstrumentedScheme() throws Exception {
File tmpDir = Files.createTempDir();
tmpDir.deleteOnExit();
FileSystem fs = FileSystem.get(new URI(InstrumentedLocalFileSystem.SCHEME + ":///"), new Configuration());
Assert.assertTrue(fs instanceof InstrumentedLocalFileSystem);
Assert.assertTrue(DecoratorUtils.resolveUnderlyingObject(fs) instanceof LocalFileSystem);
Assert.assertEquals(fs.getFileStatus(new Path("/tmp")).getPath(), new Path("instrumented-file:///tmp"));
Assert.assertEquals(fs.getUri().getScheme(), "instrumented-file");
Path basePath = new Path(tmpDir.getAbsolutePath());
Assert.assertTrue(fs.exists(basePath));
Path file = new Path(basePath, "file");
Assert.assertFalse(fs.exists(file));
fs.create(new Path(basePath, "file"));
Assert.assertTrue(fs.exists(file));
Assert.assertEquals(fs.getFileStatus(file).getLen(), 0);
Assert.assertEquals(fs.listStatus(basePath).length, 1);
fs.delete(file, false);
Assert.assertFalse(fs.exists(file));
}
@Test
public void testFromConfigurationOverride() throws Exception {
Configuration configuration = new Configuration();
configuration.set("fs.file.impl", InstrumentedLocalFileSystem.class.getName());
FileSystem fs = FileSystem.newInstance(new URI("file:///"), configuration);
Assert.assertTrue(fs instanceof InstrumentedLocalFileSystem);
Assert.assertTrue(DecoratorUtils.resolveUnderlyingObject(fs) instanceof LocalFileSystem);
Assert.assertEquals(fs.getFileStatus(new Path("/tmp")).getPath(), new Path("file:///tmp"));
Assert.assertEquals(fs.getUri().getScheme(), "file");
}
}
| 4,068 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/recordcount/CompactionRecordCountProviderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.recordcount;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link CompactionRecordCountProvider}.
*/
@Test(groups = { "gobblin.util.recordcount" })
public class CompactionRecordCountProviderTest {
@Test
public void testFileNameRecordCountProvider() {
CompactionRecordCountProvider filenameRecordCountProvider = new CompactionRecordCountProvider();
Pattern pattern = Pattern.compile("part\\-r\\-123\\.[\\d]*\\.[\\d]*\\.avro");
Assert.assertTrue(pattern.matcher(CompactionRecordCountProvider.constructFileName("part-r-", ".avro", 123)).matches());
Assert.assertEquals(filenameRecordCountProvider.getRecordCount(new Path("part-r-123.1.2.avro")), 123);
}
}
| 4,069 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/recordcount/IngestionRecordCountProviderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.recordcount;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link IngestionRecordCountProvider}.
*/
@Test(groups = { "gobblin.util.recordcount" })
public class IngestionRecordCountProviderTest {
@Test
public void testFileNameFormat() {
IngestionRecordCountProvider filenameRecordCountProvider = new IngestionRecordCountProvider();
Assert.assertEquals(IngestionRecordCountProvider.constructFilePath("/a/b/c.avro", 123), "/a/b/c.123.avro");
Assert.assertEquals(filenameRecordCountProvider.getRecordCount(new Path("/a/b/c.123.avro")), 123);
Assert.assertEquals(IngestionRecordCountProvider.containsRecordCount("/a/b/c.123.avro"), true);
Assert.assertEquals(IngestionRecordCountProvider.containsRecordCount("/a/b/c.xyz.avro"), false);
}
}
| 4,070 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/logs/LogCopierTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.logs;
import java.io.IOException;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
public class LogCopierTest {
private FileSystem srcFs;
private FileSystem destFs;
private Path srcLogDir;
private Path destLogDir;
private LogCopier logCopier;
private String testString = "Test Log line.";
@BeforeClass
public void setUp() throws IOException {
this.srcFs = FileSystem.getLocal(new Configuration());
this.destFs = FileSystem.getLocal(new Configuration());
this.srcLogDir = new Path("/tmp/LogCopierTest/srcLogDir");
if (!srcFs.exists(srcLogDir)) {
srcFs.mkdirs(srcLogDir);
}
this.destLogDir = new Path("/tmp/LogCopierTest/destLogDir");
if (!destFs.exists(destLogDir)) {
destFs.mkdirs(destLogDir);
}
this.logCopier = LogCopier.newBuilder()
.readFrom(this.srcLogDir)
.useCurrentLogFileName("testLog")
.useSrcFileSystem(srcFs)
.useDestFileSystem(destFs)
.writeTo(destLogDir)
.acceptsLogFileExtensions(ImmutableSet.of()).build();
}
private void createFileHelper(FileSystem fs, Path path) throws IOException {
FSDataOutputStream fsDataOutputStream = fs.create(path);
fsDataOutputStream.writeBytes(testString);
fsDataOutputStream.close();
}
@Test
public void testCheckSrcLogFiles() throws Exception {
//Create test log files on the srcFs
createFileHelper(srcFs, new Path(srcLogDir, "testLog.log"));
createFileHelper(srcFs, new Path(srcLogDir, "testLog.log.1"));
//Run one iteration of the LogCopier. 1st rolled log file should be copied over.
this.logCopier.runOneIteration();
FileStatus[] destLogFiles = this.destFs.listStatus(destLogDir);
Assert.assertEquals(destLogFiles.length, 1);
Assert.assertEquals(destLogFiles[0].getLen(), testString.length() + 1);
createFileHelper(srcFs, new Path(srcLogDir, "testLog.log.2"));
//Run the 2nd iteration of LogCopier. 2nd rolled log file should be copied over.
this.logCopier.runOneIteration();
destLogFiles = this.destFs.listStatus(destLogDir);
Assert.assertEquals(destLogFiles.length, 2);
Assert.assertEquals(destLogFiles[0].getLen(), testString.length() + 1);
Assert.assertEquals(destLogFiles[1].getLen(), testString.length() + 1);
//Shutdown the LogCopier. The current log file (i.e. testLog.log) should be copied over.
this.logCopier.shutDown();
destLogFiles = this.destFs.listStatus(destLogDir);
Assert.assertEquals(destLogFiles.length, 3);
Assert.assertEquals(destLogFiles[0].getLen(), testString.length() + 1);
Assert.assertEquals(destLogFiles[1].getLen(), testString.length() + 1);
Assert.assertEquals(destLogFiles[2].getLen(), testString.length() + 1);
}
@Test (dependsOnMethods = "testCheckSrcLogFiles")
public void testPruneCopiedFileNames() throws Exception {
Set<String> srcLogFileNames = Sets.newHashSet("testLog.log");
Assert.assertEquals(logCopier.getCopiedFileNames().size(), 3);
logCopier.pruneCopiedFileNames(srcLogFileNames);
Assert.assertEquals(logCopier.getCopiedFileNames().size(), 1);
}
@AfterClass
public void cleanUp() throws IOException {
this.srcFs.delete(srcLogDir, true);
this.destFs.delete(destLogDir, true);
}
} | 4,071 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/executors/IteratorExecutorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.executors;
import java.util.Iterator;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Predicate;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.gobblin.testing.AssertWithBackoff;
public class IteratorExecutorTest {
private final AtomicInteger nextCallCount = new AtomicInteger(0);
private final AtomicInteger completedCount = new AtomicInteger(0);
private final Logger log = LoggerFactory.getLogger(IteratorExecutorTest.class);
@Test
public void test() throws Exception {
TestIterator iterator = new TestIterator(5);
final IteratorExecutor<Void> executor = new IteratorExecutor<>(iterator, 2, new ThreadFactoryBuilder().build());
log.info("Starting executor");
ExecutorService singleThread = Executors.newSingleThreadExecutor();
final Future<?> future = singleThread.submit(new Runnable() {
@Override
public void run() {
try {
executor.execute();
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
});
log.info("Only two threads, so exactly two tasks retrieved");
verify(2, 0);
Assert.assertFalse(future.isDone());
Assert.assertTrue(iterator.hasNext());
log.info("end one of the tasks");
iterator.endOneCallable();
log.info("three tasks retrieved, one completed");
verify(3, 1);
Assert.assertFalse(future.isDone());
Assert.assertTrue(iterator.hasNext());
log.info("end two tasks");
iterator.endOneCallable();
iterator.endOneCallable();
log.info("5 (all) tasks retrieved, 3 completed");
verify(5, 3);
Assert.assertFalse(future.isDone());
Assert.assertFalse(iterator.hasNext());
log.info("end two tasks");
iterator.endOneCallable();
iterator.endOneCallable();
log.info("all tasks completed, check future is done");
verify(5, 5);
AssertWithBackoff.assertTrue(new Predicate<Void>() {
@Override public boolean apply(Void input) {
return future.isDone();
}
}, 10000, "future done", log, 2, 1000);
Assert.assertTrue(future.isDone());
log.info("done.");
}
/**
* Verify exactly retrieved tasks have been retrieved, and exactly completed tasks have completed.
*/
private void verify(final int retrieved, final int completed) throws Exception {
AssertWithBackoff.assertTrue(new Predicate<Void>() {
@Override public boolean apply(Void input) {
log.info("Waiting for {} retrieved -> {} and {} completed -> {}", retrieved,
nextCallCount.get(), completed, completedCount.get());
return (nextCallCount.get() == retrieved && completedCount.get() == completed);
}
}, 30000, "Waiting for callcount retrieved=" + retrieved + "/completed=" + completed, log,
1.5, 1000);
}
/**
* Runnable iterator. Runnables block until a call to {@link #endOneCallable()}.
*/
private class TestIterator implements Iterator<Callable<Void>> {
private final int maxCallables;
private Lock lock = new ReentrantLock();
private Condition endCallTransitionCondition = lock.newCondition();
private boolean endCallEnqueued = false;
public TestIterator(int maxCallables) {
this.maxCallables = maxCallables;
}
@Override
public boolean hasNext() {
return nextCallCount.get() < maxCallables;
}
@Override
public Callable<Void> next() {
nextCallCount.incrementAndGet();
return new Callable<Void>() {
@Override
public Void call()
throws Exception {
log.debug("Blocking at {}", nextCallCount.get());
lock.lock();
try {
// wait for endCall request
while (!endCallEnqueued) {
if (!endCallTransitionCondition.await(30, TimeUnit.SECONDS)) {
throw new TimeoutException("Waiting for endCall to be enqueued: retrieved=" +
IteratorExecutorTest.this.nextCallCount.get()
+ " completed=" + IteratorExecutorTest.this.completedCount.get()
+ " endCallEnqueued=" + endCallEnqueued);
}
}
completedCount.incrementAndGet();
endCallEnqueued = false;
endCallTransitionCondition.signalAll();
} finally {
lock.unlock();
}
log.debug("Completed {}", completedCount.get());
return null;
}
};
}
public void endOneCallable() throws InterruptedException, TimeoutException {
log.debug("End one {}", nextCallCount.get());
lock.lock();
try {
// wait till the last endCall was processed
while (endCallEnqueued) {
if (!endCallTransitionCondition.await(30, TimeUnit.SECONDS)) {
throw new TimeoutException("Waiting for endCall to be processed: retrieved=" +
IteratorExecutorTest.this.nextCallCount.get()
+ " completed=" + IteratorExecutorTest.this.completedCount.get()
+ " endCallEnqueued=" + endCallEnqueued);
}
}
endCallEnqueued = true;
endCallTransitionCondition.signalAll();
}
finally {
lock.unlock();
}
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| 4,072 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/concurrent/HashedWheelTimerTaskSchedulerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.concurrent;
import com.google.common.base.Optional;
public class HashedWheelTimerTaskSchedulerTest extends TaskSchedulerTest {
public HashedWheelTimerTaskSchedulerTest() {
super(false);
}
@Override
protected TaskScheduler<String, Task> getTaskScheduler(String name) {
return TaskSchedulerFactory.get("HashedWheelTimer", Optional.of(name));
}
} | 4,073 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/concurrent/ScheduledExecutorServiceTaskSchedulerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.concurrent;
import com.google.common.base.Optional;
public class ScheduledExecutorServiceTaskSchedulerTest extends TaskSchedulerTest {
public ScheduledExecutorServiceTaskSchedulerTest() {
super(true);
}
@Override
protected TaskScheduler<String, Task> getTaskScheduler(String name) {
return TaskSchedulerFactory.get("ScheduledExecutorService", Optional.of(name));
}
} | 4,074 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/concurrent/TaskSchedulerFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.concurrent;
import org.apache.commons.lang.StringUtils;
import org.hamcrest.Matchers;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
public class TaskSchedulerFactoryTest {
@Test
public void testGet() {
TaskScheduler<Object, ScheduledTask<Object>> taskScheduler = TaskSchedulerFactory.get("", Optional.<String>absent());
Assert.assertTrue(Matchers.instanceOf(ScheduledExecutorServiceTaskScheduler.class).matches(taskScheduler));
taskScheduler = TaskSchedulerFactory.get(StringUtils.EMPTY, Optional.<String>absent());
Assert.assertTrue(Matchers.instanceOf(ScheduledExecutorServiceTaskScheduler.class).matches(taskScheduler));
taskScheduler = TaskSchedulerFactory.get("ScheduledExecutorService", Optional.<String>absent());
Assert.assertTrue(Matchers.instanceOf(ScheduledExecutorServiceTaskScheduler.class).matches(taskScheduler));
taskScheduler = TaskSchedulerFactory.get("HashedWheelTimer", Optional.<String>absent());
Assert.assertTrue(Matchers.instanceOf(HashedWheelTimerTaskScheduler.class).matches(taskScheduler));
}
}
| 4,075 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/concurrent/TaskSchedulerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.concurrent;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Optional;
import com.google.common.collect.Iterables;
public abstract class TaskSchedulerTest {
private final boolean scheduleFiresImmediately;
public TaskSchedulerTest(boolean scheduleFiresImmediately) {
this.scheduleFiresImmediately = scheduleFiresImmediately;
}
@Test
public void testTaskRunsOnSchedule() throws IOException, InterruptedException {
long startTime;
long endTime;
Task task = new Task("foo");
try (TaskScheduler<String, Task> taskScheduler = getTaskScheduler("TaskSchedulerTest")) {
startTime = System.currentTimeMillis();
taskScheduler.schedule(task, 1, TimeUnit.SECONDS);
task.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
task.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
task.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
endTime = System.currentTimeMillis();
}
Assert.assertEquals(task.getCount(), 3);
Assert.assertTrue(endTime - startTime >= (scheduleFiresImmediately ? 2000 : 3000));
}
@Test
public void testCloseCancelsTasks() throws IOException, InterruptedException {
Task task = new Task("foo");
try (TaskScheduler<String, Task> taskScheduler = getTaskScheduler("TaskSchedulerTest")) {
taskScheduler.schedule(task, 1, TimeUnit.SECONDS);
task.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
}
task.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
Assert.assertEquals(task.getCount(), 1);
}
@Test
public void testScheduledTasksAreRetrievableAndCancellable() throws IOException, InterruptedException {
Task task1 = new Task("foo");
Task task2 = new Task("bar");
try (TaskScheduler<String, Task> taskScheduler = getTaskScheduler("TaskSchedulerTest")) {
taskScheduler.schedule(task1, 1, TimeUnit.SECONDS);
taskScheduler.schedule(task2, 1, TimeUnit.SECONDS);
task1.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
task2.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
Optional<Task> foo = taskScheduler.getScheduledTask("foo");
Assert.assertTrue(foo.isPresent());
Iterable<Task> all = taskScheduler.getScheduledTasks();
Assert.assertEquals(Iterables.size(all), 2);
taskScheduler.cancel(foo.get());
foo = taskScheduler.getScheduledTask("foo");
Assert.assertFalse(foo.isPresent());
all = taskScheduler.getScheduledTasks();
Assert.assertEquals(Iterables.size(all), 1);
task2.getAutoResetEvent().waitOne(2, TimeUnit.SECONDS);
}
Assert.assertEquals(task1.getCount(), 1);
Assert.assertEquals(task2.getCount(), 2);
}
protected abstract TaskScheduler<String, Task> getTaskScheduler(String name);
protected class Task implements ScheduledTask<String> {
private final String key;
private final AutoResetEvent autoResetEvent;
private int count = 0;
public Task(String key) {
this.key = key;
this.autoResetEvent = new AutoResetEvent(false);;
}
@Override
public String getKey() {
return key;
}
public int getCount() {
return count;
}
public AutoResetEvent getAutoResetEvent() {
return autoResetEvent;
}
@Override
public void runOneIteration() {
count++;
this.autoResetEvent.set();
}
}
}
| 4,076 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/guid/GuidTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.guid;
import java.io.IOException;
import java.util.Random;
import org.testng.Assert;
import org.testng.annotations.Test;
public class GuidTest {
@Test
public void testLength() {
Assert.assertEquals(new Guid(new byte[0]).sha.length, Guid.GUID_LENGTH);
}
// Obviously not an extensive test, but better than nothing.
@Test
public void testUniqueReplicable() {
Random random = new Random();
byte[] b = new byte[10];
random.nextBytes(b);
Assert.assertEquals(new Guid(b), new Guid(b));
byte[] other = new byte[10];
for (int i = 0; i < 1000; i++) {
random.nextBytes(other);
Assert.assertNotEquals(new Guid(b), new Guid(other));
}
}
@Test
public void testSerDe() throws Exception {
Random random = new Random();
byte[] b = new byte[10];
random.nextBytes(b);
Guid guid = new Guid(b);
Assert.assertEquals(guid.toString().length(), 2 * Guid.GUID_LENGTH);
Assert.assertEquals(guid, Guid.deserialize(guid.toString()));
}
@Test
public void testFromHasGuid() throws IOException {
Random random = new Random();
byte[] b = new byte[10];
random.nextBytes(b);
Guid guid = new Guid(b);
HasGuid hasGuid = new Guid.SimpleHasGuid(guid);
Assert.assertEquals(hasGuid.guid(), guid);
}
}
| 4,077 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/orc/AvroOrcSchemaConverterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.orc;
import com.google.common.base.Preconditions;
import java.util.List;
import org.apache.avro.LogicalType;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.orc.TypeDescription;
import org.testng.Assert;
import org.testng.annotations.Test;
import static org.apache.gobblin.util.orc.AvroOrcSchemaConverter.*;
public class AvroOrcSchemaConverterTest {
@Test
public void testUnionORCSchemaTranslation() throws Exception {
Schema avroUnion = SchemaBuilder.record("test")
.fields()
.name("test_union")
.type(SchemaBuilder.builder().unionOf().stringType().and().intType().and().nullType().endUnion())
.noDefault()
.endRecord();
TypeDescription unionSchema = TypeDescription.createUnion()
.addUnionChild(TypeDescription.createString())
.addUnionChild(TypeDescription.createInt());
TypeDescription recordSchemaWithUnion = TypeDescription.createStruct().addField("test_union", unionSchema);
// Verify the schema conversion for Union works
Assert.assertEquals(AvroOrcSchemaConverter.getOrcSchema(avroUnion), recordSchemaWithUnion);
//Create a nullable union field
Schema nullableAvroUnion = SchemaBuilder.record("test")
.fields()
.name("test_union")
.type(SchemaBuilder.builder().unionOf().stringType().and().nullType().endUnion())
.noDefault()
.endRecord();
//Assert that Orc schema has flattened the nullable union to the member's type
Assert.assertEquals(AvroOrcSchemaConverter.getOrcSchema(nullableAvroUnion),
TypeDescription.createStruct().addField("test_union", TypeDescription.createString()));
//Create a non nullable union type
Schema nonNullableAvroUnion = SchemaBuilder.record("test")
.fields()
.name("test_union")
.type(SchemaBuilder.builder().unionOf().stringType().endUnion())
.noDefault()
.endRecord();
//Ensure that the union type is preserved
Assert.assertEquals(AvroOrcSchemaConverter.getOrcSchema(nonNullableAvroUnion), TypeDescription.createStruct()
.addField("test_union", TypeDescription.createUnion().addUnionChild(TypeDescription.createString())));
}
@Test
public void testTrivialAvroSchemaTranslation() throws Exception {
Schema decimalSchema = SchemaBuilder.builder().bytesType();
decimalSchema.addProp(LogicalType.LOGICAL_TYPE_PROP, "decimal");
decimalSchema.addProp("scale", 2);
decimalSchema.addProp("precision", 10);
// Trivial cases
Schema avroSchema = SchemaBuilder.record("test")
.fields()
.name("string_type")
.type(SchemaBuilder.builder().stringType())
.noDefault()
.name("int_type")
.type(SchemaBuilder.builder().intType())
.noDefault()
.name("decimal_type")
.type(decimalSchema)
.noDefault()
.endRecord();
TypeDescription orcSchema = TypeDescription.createStruct()
.addField("string_type", TypeDescription.createString())
.addField("int_type", TypeDescription.createInt())
.addField("decimal_type", TypeDescription.createDecimal().withPrecision(10).withScale(2));
// Top-level record name will not be replicated in conversion result.
Assert.assertEquals(avroSchema.getFields(), getAvroSchema(orcSchema).getFields());
Assert.assertEquals(AvroOrcSchemaConverter.getOrcSchema(avroSchema), orcSchema);
}
@Test
public void testUnionAvroSchemaTranslation() throws Exception {
Schema avroSchema = SchemaBuilder.record("test")
.fields()
.name("union_nested")
.type(SchemaBuilder.builder().unionOf().stringType().and().intType().endUnion())
.noDefault()
.endRecord();
TypeDescription orcSchema = TypeDescription.createStruct()
.addField("union_nested", TypeDescription.createUnion()
.addUnionChild(TypeDescription.createString())
.addUnionChild(TypeDescription.createInt()));
Assert.assertEquals(avroSchema.getFields(), getAvroSchema(orcSchema).getFields());
}
@Test
public void testSchemaSanitization() throws Exception {
// Two field along with null
Schema avroSchema = SchemaBuilder.builder().unionOf().nullType().and().stringType().and().intType().endUnion();
Schema expectedSchema = SchemaBuilder.builder().unionOf().stringType().and().intType().endUnion();
Assert.assertEquals(sanitizeNullableSchema(avroSchema), expectedSchema);
// Only one field except null
Schema avroSchema_1 = SchemaBuilder.builder()
.unionOf()
.nullType()
.and()
.record("test")
.fields()
.name("aaa")
.type(SchemaBuilder.builder().intType())
.noDefault()
.endRecord()
.endUnion();
expectedSchema = SchemaBuilder.builder()
.record("test")
.fields()
.name("aaa")
.type(SchemaBuilder.builder().intType())
.noDefault()
.endRecord();
Assert.assertEquals(sanitizeNullableSchema(avroSchema_1), expectedSchema);
}
public static Schema getAvroSchema(TypeDescription schema) {
final TypeDescription.Category type = schema.getCategory();
switch (type) {
case BYTE:
case SHORT:
case DATE:
case TIMESTAMP:
case VARCHAR:
case CHAR:
throw new UnsupportedOperationException("Types like BYTE and SHORT (and many more) are not supported in Avro");
case DECIMAL:
Schema bytesType = SchemaBuilder.builder().bytesType();
bytesType.addProp(LogicalType.LOGICAL_TYPE_PROP, "decimal");
bytesType.addProp("scale", schema.getScale());
bytesType.addProp("precision", schema.getPrecision());
return bytesType;
case BOOLEAN:
return SchemaBuilder.builder().booleanType();
case INT:
return SchemaBuilder.builder().intType();
case LONG:
return SchemaBuilder.builder().longType();
case STRUCT:
// TODO: Cases that current implementation cannot support:
// union<struct1, struct2, ..., structN>
// All these structs will be assigned with the same name, while calling "endUnion" an exception will be thrown.
// We would workaround this by assigning randomly-picked name while that will cause difficulties in name-related
// resolution after translation, like `resolveUnion` method which is relying on name.
SchemaBuilder.FieldAssembler assembler = SchemaBuilder.record("nested").fields();
List<String> childFieldNames = schema.getFieldNames();
List<TypeDescription> childrenSchemas = schema.getChildren();
for (int i = 0; i < childrenSchemas.size(); i++) {
String fieldName = childFieldNames.get(i);
assembler = assembler.name(fieldName).type(getAvroSchema(childrenSchemas.get(i))).noDefault();
}
return (Schema) assembler.endRecord();
case STRING:
return SchemaBuilder.builder().stringType();
case BINARY:
return SchemaBuilder.builder().bytesType();
case DOUBLE:
return SchemaBuilder.builder().doubleType();
case FLOAT:
return SchemaBuilder.builder().floatType();
case LIST:
return SchemaBuilder.builder().array().items(getAvroSchema(schema.getChildren().get(0)));
case MAP:
Preconditions.checkArgument(schema.getChildren().get(0).getCategory().equals(TypeDescription.Category.STRING));
Preconditions.checkArgument(schema.getChildren().size() == 2);
return SchemaBuilder.builder().map().values(getAvroSchema(schema.getChildren().get(1)));
case UNION:
SchemaBuilder.BaseTypeBuilder builder = SchemaBuilder.builder().unionOf();
List<TypeDescription> unionChildrenSchemas = schema.getChildren();
for (int i = 0; i < unionChildrenSchemas.size(); i++) {
if (i < unionChildrenSchemas.size() - 1) {
builder = ((SchemaBuilder.UnionAccumulator<Schema>) builder.type(
getAvroSchema(unionChildrenSchemas.get(i)))).and();
} else {
return ((SchemaBuilder.UnionAccumulator<Schema>) builder.type(
getAvroSchema(unionChildrenSchemas.get(i)))).endUnion();
}
}
default:
throw new IllegalStateException("Unrecognized ORC type:" + schema.getCategory());
}
}
} | 4,078 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/reflection/DerivedClass.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.reflection;
public class DerivedClass extends BaseClass {
public DerivedClass(int a) {
super(a);
}
}
| 4,079 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/reflection/BaseClass.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.reflection;
/**
* A testing Class for {@link RestrictedFieldAccessingUtilsTest}.
*/
public class BaseClass {
private EnclosedClass enclose;
int a;
public BaseClass(int a) {
enclose = new EnclosedClass(a);
this.a = a;
}
/**
* Exposed field inside enclose to verify object's exposure and set-type of method.
*/
public int getEnclosingValue() {
return enclose.getValue();
}
}
| 4,080 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/reflection/RestrictedFieldAccessingUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.reflection;
import org.testng.Assert;
import org.testng.annotations.Test;
public class RestrictedFieldAccessingUtilsTest {
@Test
public void testGetRestrictedFieldByReflection()
throws Exception {
BaseClass baseClass = new BaseClass(5);
int a = (int) RestrictedFieldAccessingUtils.getRestrictedFieldByReflection(baseClass, "a", baseClass.getClass());
Assert.assertEquals(a, 5);
}
@Test
public void testGetRestrictedFieldByReflectionRecursively()
throws Exception {
DerivedClass derivedClass = new DerivedClass(5);
Assert.assertEquals(derivedClass.getEnclosingValue(), 5);
((EnclosedClass) RestrictedFieldAccessingUtils
.getRestrictedFieldByReflectionRecursively(derivedClass, "enclose", derivedClass.getClass())).setValue(100);
Assert.assertEquals(derivedClass.getEnclosingValue(), 100);
}
@Test
public void testNoSuchFieldException()
throws Exception {
DerivedClass derivedClass = new DerivedClass(5);
try {
RestrictedFieldAccessingUtils
.getRestrictedFieldByReflectionRecursively(derivedClass, "non", derivedClass.getClass());
} catch (NoSuchFieldException ne) {
Assert.assertTrue(true);
return;
}
// Should never reach here.
Assert.assertTrue(false);
}
} | 4,081 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/reflection/EnclosedClass.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.reflection;
import lombok.Getter;
import lombok.Setter;
public class EnclosedClass {
public EnclosedClass(int value) {
this.value = value;
}
@Setter
@Getter
int value;
}
| 4,082 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/util/reflection/GobblinConstructorUtilsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.util.reflection;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
public class GobblinConstructorUtilsTest {
@Test
public void testInvokeFirst()
throws Exception {
ConstructorTestClass ctc = GobblinConstructorUtils.invokeFirstConstructor(ConstructorTestClass.class,
ImmutableList.<Object>of(Integer.valueOf(3), new Properties()),
ImmutableList.<Object>of(Integer.valueOf(3), "test1"));
Assert.assertNotNull(ctc.id);
Assert.assertNotNull(ctc.props);
Assert.assertEquals(ctc.id, Integer.valueOf(3));
Assert.assertNull(ctc.str);
ctc = GobblinConstructorUtils
.invokeFirstConstructor(ConstructorTestClass.class, ImmutableList.<Object>of(Integer.valueOf(3), "test1"),
ImmutableList.<Object>of(Integer.valueOf(3), new Properties()));
Assert.assertNotNull(ctc.id);
Assert.assertNotNull(ctc.str);
Assert.assertEquals(ctc.id, Integer.valueOf(3));
Assert.assertEquals(ctc.str, "test1");
Assert.assertNull(ctc.props);
}
@Test(expectedExceptions = NoSuchMethodException.class)
public void testInvokeFirstException()
throws Exception {
GobblinConstructorUtils.invokeFirstConstructor(ConstructorTestClass.class, ImmutableList.<Object>of(),
ImmutableList.<Object>of(Integer.valueOf(3), Integer.valueOf(3)));
}
public static class ConstructorTestClass {
Properties props = null;
String str = null;
Integer id = null;
public ConstructorTestClass(Integer id, Properties props) {
this.id = id;
this.props = props;
}
public ConstructorTestClass(Integer id, String str) {
this.id = id;
this.str = str;
}
}
public static class ConstructorTestClassWithNoArgs extends ConstructorTestClass {
public ConstructorTestClassWithNoArgs() {
super(0, "noArgs");
}
}
@Test
public void testLongestConstructor()
throws Exception {
ConstructorTestClass obj = GobblinConstructorUtils
.invokeLongestConstructor(ConstructorTestClass.class, Integer.valueOf(1), "String1", "String2");
Assert.assertEquals(obj.id.intValue(), 1);
Assert.assertEquals(obj.str, "String1");
obj = GobblinConstructorUtils.invokeLongestConstructor(ConstructorTestClass.class, Integer.valueOf(1), "String1");
Assert.assertEquals(obj.id.intValue(), 1);
Assert.assertEquals(obj.str, "String1");
try {
obj = GobblinConstructorUtils.invokeLongestConstructor(ConstructorTestClass.class, Integer.valueOf(1));
Assert.fail();
} catch (NoSuchMethodException nsme) {
//expected to throw exception
}
obj = GobblinConstructorUtils.invokeLongestConstructor(ConstructorTestClassWithNoArgs.class);
Assert.assertEquals(obj.id.intValue(), 0);
Assert.assertEquals(obj.str, "noArgs");
Assert.assertNull(obj.props);
obj = GobblinConstructorUtils.invokeLongestConstructor(ConstructorTestClassWithNoArgs.class, Integer.valueOf(1));
Assert.assertEquals(obj.id.intValue(), 0);
Assert.assertEquals(obj.str, "noArgs");
Assert.assertNull(obj.props);
}
}
| 4,083 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/runtime/cli/PublicMethodsCliObjectFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.cli;
import java.io.IOException;
import org.apache.commons.cli.CommandLine;
import org.testng.Assert;
import org.testng.annotations.Test;
import lombok.Data;
public class PublicMethodsCliObjectFactoryTest {
@Test
public void test() throws Exception {
MyFactory factory = new MyFactory();
MyObject object;
try {
// Try to build object with missing expected argument.
object = factory.buildObject(new String[]{}, 0, false, "usage");
Assert.fail();
} catch (IOException exc) {
Assert.assertTrue(exc.getCause() instanceof ArrayIndexOutOfBoundsException);
// Expected
}
object = factory.buildObject(new String[]{"required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertNull(object.string1);
Assert.assertNull(object.string2);
object = factory.buildObject(new String[]{"-setString1", "str1", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string1, "str1");
Assert.assertNull(object.string2);
object = factory.buildObject(new String[]{"-foo", "bar", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string2, "bar");
Assert.assertNull(object.string1);
object = factory.buildObject(new String[]{"-foo", "bar", "-setString1", "str1", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string2, "bar");
Assert.assertEquals(object.string1, "str1");
}
public static class MyFactory extends PublicMethodsCliObjectFactory<MyObject> {
public MyFactory() {
super(MyObject.class);
}
@Override
protected MyObject constructObject(CommandLine cli) throws IOException {
return new MyObject(cli.getArgs()[0]);
}
}
@Data
public static class MyObject {
private final String required;
private String string1;
private String string2;
@CliObjectOption(name = "foo")
public void setString2(String str) {
this.string2 = str;
}
}
}
| 4,084 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/runtime | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/runtime/cli/ConstructorAndPublicMethodsCliObjectFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.cli;
import java.io.IOException;
import org.apache.commons.cli.MissingOptionException;
import org.testng.Assert;
import org.testng.annotations.Test;
import lombok.Data;
public class ConstructorAndPublicMethodsCliObjectFactoryTest {
@Test
public void test() throws Exception {
MyFactory factory = new MyFactory();
MyObject object;
try {
// Try to build object with missing constructor argument, which is required
object = factory.buildObject(new String[]{}, 0, false, "usage");
Assert.fail();
} catch (IOException exc) {
// Expected
Assert.assertEquals(exc.getCause().getClass(), MissingOptionException.class);
}
object = factory.buildObject(new String[]{"-myArg", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertNull(object.string1);
Assert.assertNull(object.string2);
object = factory.buildObject(new String[]{"-setString1", "str1", "-myArg", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string1, "str1");
Assert.assertNull(object.string2);
object = factory.buildObject(new String[]{"-foo", "bar", "-myArg", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string2, "bar");
Assert.assertNull(object.string1);
object = factory.buildObject(new String[]{"-foo", "bar", "-setString1", "str1", "-myArg", "required"}, 0, false, "usage");
Assert.assertEquals(object.required, "required");
Assert.assertEquals(object.string2, "bar");
Assert.assertEquals(object.string1, "str1");
}
public static class MyFactory extends ConstructorAndPublicMethodsCliObjectFactory<MyObject> {
public MyFactory() {
super(MyObject.class);
}
}
@Data
public static class MyObject {
private final String required;
private String string1;
private String string2;
@CliObjectSupport(argumentNames = "myArg")
public MyObject(String required) {
this.required = required;
}
@CliObjectOption(name = "foo")
public void setString2(String str) {
this.string2 = str;
}
}
}
| 4,085 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/fsm/FiniteStateMachineTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fsm;
import java.util.Set;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Function;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Sets;
import javax.annotation.Nullable;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class FiniteStateMachineTest {
public enum MyStates {
PENDING, RUNNING, SUCCESS, ERROR
}
private final FiniteStateMachine<MyStates> refFsm = new FiniteStateMachine.Builder<MyStates>()
.addTransition(MyStates.PENDING, MyStates.RUNNING)
.addTransition(MyStates.RUNNING, MyStates.SUCCESS)
.addTransition(MyStates.PENDING, MyStates.ERROR)
.addTransition(MyStates.RUNNING, MyStates.ERROR).build(MyStates.PENDING);
@Test
public void singleThreadImmediateTransitionsTest() throws Exception {
FiniteStateMachine<MyStates> fsm = refFsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
fsm.transitionImmediately(MyStates.RUNNING);
Assert.assertEquals(fsm.getCurrentState(), MyStates.RUNNING);
fsm.transitionImmediately(MyStates.SUCCESS);
Assert.assertEquals(fsm.getCurrentState(), MyStates.SUCCESS);
fsm = fsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
fsm.transitionImmediately(MyStates.ERROR);
Assert.assertEquals(fsm.getCurrentState(), MyStates.ERROR);
fsm = fsm.cloneAtCurrentState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.ERROR);
}
@Test
public void illegalTransitionsTest() throws Exception {
FiniteStateMachine<MyStates> fsm = refFsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
try {
fsm.transitionImmediately(MyStates.PENDING);
Assert.fail();
} catch (FiniteStateMachine.UnallowedTransitionException exc) {
// expected
}
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
try {
fsm.transitionImmediately(MyStates.SUCCESS);
Assert.fail();
} catch (FiniteStateMachine.UnallowedTransitionException exc) {
// expected
}
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
fsm.transitionImmediately(MyStates.RUNNING);
Assert.assertEquals(fsm.getCurrentState(), MyStates.RUNNING);
}
@Test
public void slowTransitionsTest() throws Exception {
FiniteStateMachine<MyStates> fsm = refFsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
try (FiniteStateMachine.Transition transition = fsm.startTransition(MyStates.RUNNING)) {
try {
fsm.getCurrentState();
Assert.fail();
} catch (FiniteStateMachine.ReentrantStableStateWait exc) {
// Expected because the same thread that is transitioning tries to read the current state
}
try {
fsm.transitionImmediately(MyStates.RUNNING);
Assert.fail();
} catch (FiniteStateMachine.ReentrantStableStateWait exc) {
// Expected because the same thread that is transitioning tries to start another transition
}
}
Assert.assertEquals(fsm.getCurrentState(), MyStates.RUNNING);
try (FiniteStateMachine<MyStates>.Transition transition = fsm.startTransition(MyStates.SUCCESS)) {
transition.changeEndState(MyStates.ERROR);
}
Assert.assertEquals(fsm.getCurrentState(), MyStates.ERROR);
}
@Test
public void callbackTest() throws Exception {
NamedStateWithCallback stateA = new NamedStateWithCallback("a");
NamedStateWithCallback stateB = new NamedStateWithCallback("b");
NamedStateWithCallback stateC = new NamedStateWithCallback("c", null, s -> {
throw new RuntimeException("leave");
});
NamedStateWithCallback stateD = new NamedStateWithCallback("d");
FiniteStateMachine<NamedStateWithCallback> fsm = new FiniteStateMachine.Builder<NamedStateWithCallback>()
.addTransition(new NamedStateWithCallback("a"), new NamedStateWithCallback("b"))
.addTransition(new NamedStateWithCallback("b"), new NamedStateWithCallback("c"))
.addTransition(new NamedStateWithCallback("c"), new NamedStateWithCallback("d"))
.addUniversalEnd(new NamedStateWithCallback("ERROR"))
.build(stateA);
fsm.transitionImmediately(stateB);
Assert.assertEquals(fsm.getCurrentState(), stateB);
Assert.assertEquals(stateA.lastTransition, "leave:a->b");
stateA.lastTransition = "";
Assert.assertEquals(stateB.lastTransition, "enter:a->b");
stateB.lastTransition = "";
try {
// State that will error on enter
fsm.transitionImmediately(new NamedStateWithCallback("c", s -> {
throw new RuntimeException("enter");
}, s -> {
throw new RuntimeException("leave");
}));
Assert.fail("Expected excpetion");
} catch (FiniteStateMachine.FailedTransitionCallbackException exc) {
Assert.assertEquals(exc.getFailedCallback(), FiniteStateMachine.FailedCallback.END_STATE);
Assert.assertEquals(exc.getOriginalException().getMessage(), "enter");
// switch state to one that will only error on leave
exc.getTransition().changeEndState(stateC);
exc.getTransition().close();
}
Assert.assertEquals(fsm.getCurrentState(), stateC);
Assert.assertEquals(stateB.lastTransition, "leave:b->c");
stateB.lastTransition = "";
Assert.assertEquals(stateC.lastTransition, "enter:b->c");
stateC.lastTransition = "";
try {
fsm.transitionImmediately(stateD);
Assert.fail("Expected exception");
} catch (FiniteStateMachine.FailedTransitionCallbackException exc) {
Assert.assertEquals(exc.getFailedCallback(), FiniteStateMachine.FailedCallback.START_STATE);
Assert.assertEquals(exc.getOriginalException().getMessage(), "leave");
// switch state to one that will only error on leave
exc.getTransition().changeEndState(new NamedStateWithCallback("ERROR"));
exc.getTransition().closeWithoutCallbacks();
}
Assert.assertEquals(fsm.getCurrentState(), new NamedStateWithCallback("ERROR"));
Assert.assertEquals(stateD.lastTransition, "");
}
@Test(timeOut = 5000)
public void multiThreadTest() throws Exception {
FiniteStateMachine<MyStates> fsm = refFsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
Transitioner<MyStates> t1 = new Transitioner<>(fsm, MyStates.RUNNING);
Transitioner<MyStates> t2 = new Transitioner<>(fsm, MyStates.ERROR);
Thread t1Thread = new Thread(null, t1, "t1");
t1Thread.start();
t1.awaitState(Sets.newHashSet(TransitionState.TRANSITIONING));
Assert.assertEquals(t1.transitionResult, TransitionState.TRANSITIONING);
Assert.assertEquals(fsm.getCurrentStateEvenIfTransitioning(), MyStates.PENDING);
Thread t2Thread = new Thread(null, t2, "t2");
t2Thread.start();
Assert.assertEquals(t1.transitionResult, TransitionState.TRANSITIONING);
Assert.assertEquals(t2.transitionResult, TransitionState.STARTING);
Assert.assertEquals(fsm.getCurrentStateEvenIfTransitioning(), MyStates.PENDING);
t1Thread.interrupt();
t1.awaitState(Sets.newHashSet(TransitionState.COMPLETED));
t2.awaitState(Sets.newHashSet(TransitionState.TRANSITIONING));
Assert.assertEquals(t1.transitionResult, TransitionState.COMPLETED);
Assert.assertEquals(t2.transitionResult, TransitionState.TRANSITIONING);
Assert.assertEquals(fsm.getCurrentStateEvenIfTransitioning(), MyStates.RUNNING);
t2Thread.interrupt();
t2.awaitState(Sets.newHashSet(TransitionState.COMPLETED));
Assert.assertEquals(t1.transitionResult, TransitionState.COMPLETED);
Assert.assertEquals(t2.transitionResult, TransitionState.COMPLETED);
Assert.assertEquals(fsm.getCurrentStateEvenIfTransitioning(), MyStates.ERROR);
}
@Test(timeOut = 5000)
public void deadThreadTest() throws Exception {
FiniteStateMachine<MyStates> fsm = refFsm.cloneAtInitialState();
Assert.assertEquals(fsm.getCurrentState(), MyStates.PENDING);
Thread t = new Thread(() -> {
try {
FiniteStateMachine.Transition transition = fsm.startTransition(MyStates.RUNNING);
} catch (FiniteStateMachine.UnallowedTransitionException | InterruptedException exc) {
// do nothing
}
// since we don't close the transition, it should become orphaned
});
t.start();
while (t.isAlive()) {
Thread.sleep(50);
}
try {
fsm.transitionImmediately(MyStates.RUNNING);
Assert.fail();
} catch (FiniteStateMachine.AbandonedTransitionException exc) {
// Expected
}
}
@Data
private class Transitioner<T> implements Runnable {
private final FiniteStateMachine<T> fsm;
private final T endState;
private final Lock lock = new ReentrantLock();
private final Condition condition = lock.newCondition();
private volatile boolean running = false;
private volatile TransitionState transitionResult = TransitionState.STARTING;
@Override
public void run() {
try(FiniteStateMachine.Transition transition = this.fsm.startTransition(this.endState)) {
goToState(TransitionState.TRANSITIONING);
try {
Thread.sleep(2000);
this.transitionResult = TransitionState.TIMEOUT;
return;
} catch (InterruptedException ie) {
// This is the signal to end the state transition, so do nothing
}
} catch (InterruptedException exc) {
goToState(TransitionState.INTERRUPTED);
return;
} catch (FiniteStateMachine.UnallowedTransitionException exc) {
goToState(TransitionState.UNALLOWED);
return;
} catch (FiniteStateMachine.FailedTransitionCallbackException exc) {
goToState(TransitionState.CALLBACK_ERROR);
return;
}
goToState(TransitionState.COMPLETED);
}
public void awaitState(Set<TransitionState> states) throws InterruptedException {
try {
this.lock.lock();
while (!states.contains(this.transitionResult)) {
this.condition.await();
}
} finally {
this.lock.unlock();
}
}
public void goToState(TransitionState state) {
try {
this.lock.lock();
this.transitionResult = state;
this.condition.signalAll();
} finally {
this.lock.unlock();
}
}
}
enum TransitionState {
STARTING, TRANSITIONING, COMPLETED, INTERRUPTED, UNALLOWED, TIMEOUT, CALLBACK_ERROR
}
@RequiredArgsConstructor
@EqualsAndHashCode(of = "name")
public static class NamedStateWithCallback implements StateWithCallbacks<NamedStateWithCallback> {
@Getter
private final String name;
private final Function<NamedStateWithCallback, Void> enterCallback;
private final Function<NamedStateWithCallback, Void> leaveCallback;
String lastTransition = "";
public NamedStateWithCallback(String name) {
this(name, null, null);
}
private void setLastTransition(String callback, NamedStateWithCallback start, NamedStateWithCallback end) {
this.lastTransition = String.format("%s:%s->%s", callback, start == null ? "null" : start.name, end.name);
}
@Override
public void onEnterState(@Nullable NamedStateWithCallback previousState) {
if (this.enterCallback == null) {
setLastTransition("enter", previousState, this);
} else {
this.enterCallback.apply(previousState);
}
}
@Override
public void onLeaveState(NamedStateWithCallback nextState) {
if (this.leaveCallback == null) {
setLastTransition("leave", this, nextState);
} else {
this.leaveCallback.apply(nextState);
}
}
}
}
| 4,086 |
0 | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/test/java/org/apache/gobblin/time/TimeIteratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.time;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link TimeIterator}
*/
public class TimeIteratorTest {
private ZoneId zone = ZoneId.of("America/Los_Angeles");
/**
* A representative unit test to cover iterating. Actual computations are covered by {@link #testInc()}
*/
@Test
public void testIterator() {
ZonedDateTime startTime = ZonedDateTime.of(2019,12,20,11,
20,30, 0, zone);
ZonedDateTime endTime = startTime.plusDays(12);
TimeIterator iterator = new TimeIterator(startTime, endTime, TimeIterator.Granularity.DAY);
int days = 0;
while (iterator.hasNext()) {
Assert.assertEquals(iterator.next(), startTime.plusDays(days++));
}
Assert.assertEquals(days, 13);
}
@Test
public void testInc() {
ZonedDateTime startTime = ZonedDateTime.of(2019,12,20,11,
20,30, 0, zone);
Assert.assertEquals(TimeIterator.inc(startTime, TimeIterator.Granularity.MINUTE, 40).toString(),
"2019-12-20T12:00:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.inc(startTime, TimeIterator.Granularity.HOUR, 13).toString(),
"2019-12-21T00:20:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.inc(startTime, TimeIterator.Granularity.DAY, 12).toString(),
"2020-01-01T11:20:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.inc(startTime, TimeIterator.Granularity.MONTH, 1).toString(),
"2020-01-20T11:20:30-08:00[America/Los_Angeles]");
}
@Test
public void testDec() {
ZonedDateTime startTime = ZonedDateTime.of(2019,12,20,11,
20,30, 0, zone);
Assert.assertEquals(TimeIterator.dec(startTime, TimeIterator.Granularity.MINUTE, 21).toString(),
"2019-12-20T10:59:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.dec(startTime, TimeIterator.Granularity.HOUR, 12).toString(),
"2019-12-19T23:20:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.dec(startTime, TimeIterator.Granularity.DAY, 20).toString(),
"2019-11-30T11:20:30-08:00[America/Los_Angeles]");
Assert.assertEquals(TimeIterator.dec(startTime, TimeIterator.Granularity.MONTH, 12).toString(),
"2018-12-20T11:20:30-08:00[America/Los_Angeles]");
}
}
| 4,087 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/DefaultBrokerCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.Lock;
import java.util.stream.Collectors;
import com.google.common.base.Predicate;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.Service;
import com.google.common.util.concurrent.Striped;
import javax.annotation.Nonnull;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import org.apache.gobblin.broker.iface.SharedResourceKey;
/**
* A backing cache for shared resources used by {@link SharedResourcesBrokerImpl}. Stores created objects in a guava
* cache keyed by factory, scope, and key.
* @param <S> the {@link ScopeType} class for the scopes topology used in this tree of brokers.
*/
@Slf4j
class DefaultBrokerCache<S extends ScopeType<S>> {
private final Cache<RawJobBrokerKey, Object> sharedResourceCache;
private final Cache<RawJobBrokerKey, ScopeWrapper<S>> autoScopeCache;
private final Striped<Lock> invalidationLock;
public DefaultBrokerCache() {
this.sharedResourceCache = CacheBuilder.newBuilder().build();
this.autoScopeCache = CacheBuilder.newBuilder().build();
this.invalidationLock = Striped.lazyWeakLock(20);
}
/**
* The key for shared resources in the cache.
*/
@Data
class RawJobBrokerKey {
// Left if the key represents
private final ScopeWrapper<S> scope;
private final String factoryName;
private final SharedResourceKey key;
}
/**
* Get an object for the specified factory, key, and broker at the scope selected by the factory. {@link DefaultBrokerCache}
* guarantees that calling this method from brokers with the same leaf scope will return the same object.
*/
@SuppressWarnings(value = "unchecked")
<T, K extends SharedResourceKey> T getAutoScoped(final SharedResourceFactory<T, K, S> factory, final K key,
final SharedResourcesBrokerImpl<S> broker)
throws ExecutionException {
// figure out auto scope
RawJobBrokerKey autoscopeCacheKey = new RawJobBrokerKey(broker.getWrappedSelfScope(), factory.getName(), key);
ScopeWrapper<S> selectedScope = this.autoScopeCache.get(autoscopeCacheKey, new Callable<ScopeWrapper<S>>() {
@Override
public ScopeWrapper<S> call() throws Exception {
return broker.getWrappedScope(factory.getAutoScope(broker, broker.getConfigView(null, key, factory.getName())));
}
});
// get actual object
return getScoped(factory, key, selectedScope, broker);
}
/**
* Get a scoped object from the cache.
*/
@SuppressWarnings(value = "unchecked")
<T, K extends SharedResourceKey> SharedResourceFactoryResponse<T> getScopedFromCache(
final SharedResourceFactory<T, K, S> factory, @Nonnull final K key,
@Nonnull final ScopeWrapper<S> scope, final SharedResourcesBrokerImpl<S> broker)
throws ExecutionException {
RawJobBrokerKey fullKey = new RawJobBrokerKey(scope, factory.getName(), key);
Object obj = this.sharedResourceCache.get(fullKey, new Callable<Object>() {
@Override
public Object call() throws Exception {
return factory.createResource(broker.getScopedView(scope.getType()), broker.getConfigView(scope.getType(), key,
factory.getName()));
}
});
return (SharedResourceFactoryResponse<T>)obj;
}
/**
* Get an object for the specified factory, key, scope, and broker. {@link DefaultBrokerCache}
* guarantees that calling this method for the same factory, key, and scope will return the same object.
*/
@SuppressWarnings(value = "unchecked")
<T, K extends SharedResourceKey> T getScoped(final SharedResourceFactory<T, K, S> factory, @Nonnull final K key,
@Nonnull final ScopeWrapper<S> scope, final SharedResourcesBrokerImpl<S> broker)
throws ExecutionException {
SharedResourceFactory<T, K, S> currentFactory = factory;
K currentKey = key;
ScopeWrapper<S> currentScope = scope;
Object obj = getScopedFromCache(currentFactory, currentKey, currentScope, broker);
// this loop is to continue looking up objects through redirection or reloading until a valid resource is found
while (true) {
if (obj instanceof ResourceCoordinate) {
ResourceCoordinate<T, K, S> resourceCoordinate = (ResourceCoordinate<T, K, S>) obj;
if (!SharedResourcesBrokerUtils.isScopeTypeAncestor((ScopeType) currentScope.getType(), ((ResourceCoordinate) obj).getScope())) {
throw new RuntimeException(String
.format("%s returned an invalid coordinate: scope %s is not an ancestor of %s.", currentFactory.getName(),
((ResourceCoordinate) obj).getScope(), currentScope.getType()));
}
try {
obj = getScopedFromCache(resourceCoordinate.getFactory(), resourceCoordinate.getKey(),
broker.getWrappedScope(resourceCoordinate.getScope()), broker);
} catch (NoSuchScopeException nsse) {
throw new RuntimeException(String
.format("%s returned an invalid coordinate: scope %s is not available.", factory.getName(),
resourceCoordinate.getScope().name()), nsse);
}
} else if (obj instanceof ResourceEntry) {
T resource = ((ResourceEntry<T>) obj).getResourceIfValid();
// valid resource found
if (resource != null) {
return resource;
}
// resource is invalid. The lock in this block is to reduce the chance of starvation where a thread keeps
// getting objects that are invalidated by another thread.
Lock lock = this.invalidationLock.get(key);
try {
lock.lock();
RawJobBrokerKey fullKey = new RawJobBrokerKey(currentScope, currentFactory.getName(), currentKey);
safeInvalidate(fullKey);
obj = getScopedFromCache(currentFactory, currentKey, currentScope, broker);
} finally {
lock.unlock();
}
} else {
throw new RuntimeException(String.format("Invalid response from %s: %s.", factory.getName(), obj.getClass()));
}
}
}
<T, K extends SharedResourceKey> void put(final SharedResourceFactory<T, K, S> factory, @Nonnull final K key,
@Nonnull final ScopeWrapper<S> scope, T instance) {
RawJobBrokerKey fullKey = new RawJobBrokerKey(scope, factory.getName(), key);
this.sharedResourceCache.put(fullKey, new ResourceInstance<>(instance));
}
private void safeInvalidate(RawJobBrokerKey key) {
Lock lock = this.invalidationLock.get(key);
lock.lock();
try {
Object obj = this.sharedResourceCache.getIfPresent(key);
if (obj != null && obj instanceof ResourceEntry && !((ResourceEntry) obj).isValid()) {
this.sharedResourceCache.invalidate(key);
((ResourceEntry) obj).onInvalidate();
}
} finally {
lock.unlock();
}
}
/**
* Invalidate all objects at scopes which are descendant of the input scope. Any such invalidated object that is a
* {@link Closeable} will be closed, and any such object which is a {@link Service} will be shutdown.
* @throws IOException
*/
public void close(ScopeWrapper<S> scope)
throws IOException {
List<Throwable> exceptionsList = Lists.newArrayList();
List<Service> awaitShutdown = Lists.newArrayList();
for (Map.Entry<RawJobBrokerKey, Object> entry : Maps.filterKeys(this.sharedResourceCache.asMap(),
new ScopeIsAncestorFilter(scope)).entrySet()) {
this.sharedResourceCache.invalidate(entry.getKey());
if (entry.getValue() instanceof ResourceInstance) {
Object obj = ((ResourceInstance) entry.getValue()).getResource();
// Catch unchecked exception while closing resources, make sure all resources managed by cache are closed.
try {
SharedResourcesBrokerUtils.shutdownObject(obj, log);
} catch (Throwable t) {
exceptionsList.add(t);
}
if (obj instanceof Service) {
awaitShutdown.add((Service) obj);
}
}
}
for (Service service : awaitShutdown) {
try {
service.awaitTerminated(10, TimeUnit.SECONDS);
} catch (TimeoutException te) {
log.error("Failed to shutdown {}.", service);
}
}
// log exceptions while closing resources up.
if (exceptionsList.size() > 0) {
log.error(exceptionsList.stream()
.map(Throwables::getStackTraceAsString).collect(Collectors.joining("\n")));
}
}
/**
* Filter {@link RawJobBrokerKey} that are not descendants of the input {@link ScopeWrapper}.
*/
@AllArgsConstructor
private class ScopeIsAncestorFilter implements Predicate<RawJobBrokerKey> {
private final ScopeWrapper<S> scope;
@Override
public boolean apply(RawJobBrokerKey input) {
if (this.scope == null) {
return true;
}
if (input.getScope() == null) {
return false;
}
return SharedResourcesBrokerUtils.isScopeAncestor(input.getScope(), this.scope);
}
}
}
| 4,088 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/EmptyKey.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import lombok.EqualsAndHashCode;
/**
* A dummy {@link SharedResourceKey}.
*/
@EqualsAndHashCode
public final class EmptyKey implements SharedResourceKey {
/** A singleton instance */
public static final EmptyKey INSTANCE = new EmptyKey();
@Override
public String toConfigurationKey() {
return null;
}
}
| 4,089 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/ResourceInstance.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import lombok.Data;
/**
* A {@link SharedResourceFactoryResponse} that returns a newly created resource instance.
*/
@Data
public class ResourceInstance<T> implements ResourceEntry<T> {
// Note: the name here is theResource instead of resource since to avoid a collision of the lombok generated getter
// and the getResource() method defined in {@link ResourceEntry}. The collision results in unintended side effects
// when getResource() is overridden since it may have additional logic that should not be executed when the value of
// this field is fetched using the getter, such as in the Lombok generated toString().
private final T theResource;
/**
* This method returns the resource, but may have logic before the return.
* @return the resource
*/
@Override
public T getResource() {
return getTheResource();
}
@Override
public boolean isValid() {
return true;
}
@Override
public void onInvalidate() {
// this should never happen
throw new RuntimeException();
}
}
| 4,090 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/SharedResourcesBrokerFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.iface.ScopeInstance;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.util.ConfigUtils;
/**
* Used to create a default implementation of {@link org.apache.gobblin.broker.iface.SharedResourcesBroker}.
*
* All {@link SharedResourcesBroker}s created by this factory automatically load a set of configurations. In order of
* preference:
* * Programmatically supplied configurations in {@link #createDefaultTopLevelBroker(Config, ScopeInstance)}.
* * Configurations in a broker configuration resource. The default path of the resource is {@link #DEFAULT_BROKER_CONF_FILE},
* but its path can be overriden with {@link #BROKER_CONF_FILE_KEY} either in programmatically supplied configurations,
* java properties, or environment variables.
* * Java properties of the current JVM.
* * Environment variables of the current shell.
* * Hadoop configuration obtained via {@link Configuration#Configuration()}. This can be disabled setting
* {@link #LOAD_HADOOP_CONFIGURATION} to false.
*/
public class SharedResourcesBrokerFactory {
public static final String LOAD_HADOOP_CONFIGURATION = BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX + ".loadHadoopConfiguration";
public static final String BROKER_CONF_FILE_KEY = BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX + ".configuration";
public static final String DEFAULT_BROKER_CONF_FILE = "gobblinBroker.conf";
private static final Splitter LIST_SPLITTER = Splitter.on(",").trimResults().omitEmptyStrings();
private static final Config BROKER_NAMESPACES_FALLBACK = ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(BrokerConstants.GOBBLIN_BROKER_CONFIG_NAMESPACES, "").build());
/**
* Create a root {@link SharedResourcesBroker}. Subscoped brokers should be built using
* {@link SharedResourcesBroker#newSubscopedBuilder(ScopeInstance)}.
*
* In general, this method will be called only once per application, and all other brokers will Nbe children of the root
* application broker.
*
* @param config The global configuration of the broker.
* @param globalScope The scope of the root broker.
* @param <S> The {@link ScopeType} DAG used for this broker tree.
*/
public static <S extends ScopeType<S>> SharedResourcesBrokerImpl<S> createDefaultTopLevelBroker(Config config,
ScopeInstance<S> globalScope) {
if (!globalScope.getType().equals(globalScope.getType().rootScope())) {
throw new IllegalArgumentException(String.format("The top level broker must be created at the root scope type. "
+ "%s is not a root scope type.", globalScope.getType()));
}
ScopeWrapper<S> scopeWrapper = new ScopeWrapper<>(globalScope.getType(), globalScope, Lists.<ScopeWrapper<S>>newArrayList());
return new SharedResourcesBrokerImpl<>(new DefaultBrokerCache<S>(),
scopeWrapper,
Lists.newArrayList(new SharedResourcesBrokerImpl.ScopedConfig<>(globalScope.getType(),
getBrokerConfig(addSystemConfigurationToConfig(config)))),
ImmutableMap.of(globalScope.getType(), scopeWrapper));
}
private static InheritableThreadLocal<SharedResourcesBroker<?>> threadLocalBroker = new ThreadLocalBroker();
private static SharedResourcesBroker<SimpleScopeType> SINGLETON;
/**
* Get all broker configurations from the given {@code srcConfig}. Configurations from
* {@value BrokerConstants#GOBBLIN_BROKER_CONFIG_PREFIX} is always loaded first, then in-order from namespaces,
* which is encoded as a comma separated string keyed by {@value BrokerConstants#GOBBLIN_BROKER_CONFIG_NAMESPACES}.
*/
@VisibleForTesting
static Config getBrokerConfig(Config srcConfig) {
Config allSrcConfig = srcConfig.withFallback(BROKER_NAMESPACES_FALLBACK);
String namespaces = allSrcConfig.getString(BrokerConstants.GOBBLIN_BROKER_CONFIG_NAMESPACES);
Config brokerConfig = ConfigUtils.getConfigOrEmpty(allSrcConfig, BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX);
for (String namespace : LIST_SPLITTER.splitToList(namespaces)) {
brokerConfig = brokerConfig.withFallback(ConfigUtils.getConfigOrEmpty(allSrcConfig, namespace));
}
return brokerConfig;
}
/**
* Get the implicit {@link SharedResourcesBroker} in the callers thread. This is either a singleton broker configured
* from environment variables, java options, and classpath configuration options, or a specific broker injected
* elsewhere in the application.
*
* In general, it is preferable to explicitly pass around {@link SharedResourcesBroker}s, as that allows better
* control over the scoping. However, in cases where it is hard to do so, this method provides an alternative to
* method of acquiring a configured broker.
*/
public static SharedResourcesBroker<?> getImplicitBroker() {
SharedResourcesBroker<?> threadLocal = threadLocalBroker.get();
return threadLocal == null ? getSingleton() : threadLocal;
}
/**
* Register a {@link SharedResourcesBroker} to be used as the implicit broker for this and all new children threads.
*/
public static void registerImplicitBroker(SharedResourcesBroker<?> broker) {
threadLocalBroker.set(broker);
}
private static synchronized SharedResourcesBroker<SimpleScopeType> getSingleton() {
if (SINGLETON == null) {
SINGLETON = createDefaultTopLevelBroker(ConfigFactory.empty(), SimpleScopeType.GLOBAL.defaultScopeInstance());
}
return SINGLETON;
}
private static class ThreadLocalBroker extends InheritableThreadLocal<SharedResourcesBroker<?>> {}
private static Config addSystemConfigurationToConfig(Config config) {
Map<String, String> confMap = Maps.newHashMap();
addBrokerKeys(confMap, System.getenv().entrySet());
addBrokerKeys(confMap, System.getProperties().entrySet());
Config systemConfig = ConfigFactory.parseMap(confMap);
Config tmpConfig = config.withFallback(systemConfig);
String brokerConfPath = DEFAULT_BROKER_CONF_FILE;
if (tmpConfig.hasPath(BROKER_CONF_FILE_KEY)) {
brokerConfPath = tmpConfig.getString(BROKER_CONF_FILE_KEY);
}
Config resourceConfig = ConfigFactory.parseResources(SharedResourcesBrokerFactory.class, brokerConfPath);
config = config.withFallback(resourceConfig).withFallback(systemConfig);
if (ConfigUtils.getBoolean(config, LOAD_HADOOP_CONFIGURATION, true)) {
Map<String, String> hadoopConfMap = Maps.newHashMap();
Configuration hadoopConf = new Configuration();
hadoopConf.addResource("gobblin-site.xml");
addBrokerKeys(hadoopConfMap, hadoopConf);
config = config.withFallback(ConfigFactory.parseMap(hadoopConfMap));
}
return config;
}
public static <S, T> void addBrokerKeys(Map<String, String> configMap, Iterable<Map.Entry<S, T>> entries) {
for (Map.Entry<S, T> entry : entries) {
Object key = entry.getKey();
if (key instanceof String && ((String) key).startsWith(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX)) {
configMap.put((String) key, entry.getValue().toString());
}
}
}
}
| 4,091 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/ImmediatelyInvalidResourceEntry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import lombok.EqualsAndHashCode;
import lombok.extern.slf4j.Slf4j;
/**
* A {@link ResourceEntry} that expires immediately. The resource is not closed on invalidation since the lifetime of
* the object cannot be determined by the cache, so the recipient of the resource needs to close it.
*/
@Slf4j
@EqualsAndHashCode(callSuper = true)
public class ImmediatelyInvalidResourceEntry<T> extends ResourceInstance<T> {
private boolean valid;
public ImmediatelyInvalidResourceEntry(T resource) {
super(resource);
this.valid = true;
}
@Override
public synchronized T getResource() {
// mark the object as invalid before returning so that a new one will be created on the next
// request from the factory
this.valid = false;
return super.getResource();
}
@Override
public boolean isValid() {
return this.valid;
}
@Override
public void onInvalidate() {
// these type of resource cannot be closed on invalidation since the lifetime can't be determined
}
/**
* This method is synchronized so that the validity check and validity change is atomic for callers of this method.
* @return
*/
@Override
public synchronized T getResourceIfValid() {
if (this.valid) {
return getResource();
} else {
return null;
}
}
}
| 4,092 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/TTLResourceEntry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
import lombok.extern.slf4j.Slf4j;
/**
* A {@link ResourceEntry} that automatically expires after a given number of milliseconds.
*/
@Slf4j
@SuppressWarnings
public class TTLResourceEntry<T> extends ResourceInstance<T> {
private final long expireAt;
private final boolean closeOnInvalidation;
public TTLResourceEntry(T resource, long millisToLive, boolean closeOnInvalidation) {
super(resource);
this.expireAt = System.currentTimeMillis() + millisToLive;
this.closeOnInvalidation = closeOnInvalidation;
}
@Override
public boolean isValid() {
return System.currentTimeMillis() < this.expireAt;
}
@Override
public void onInvalidate() {
if (this.closeOnInvalidation) {
SharedResourcesBrokerUtils.shutdownObject(getResource(), log);
}
}
}
| 4,093 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/SharedResourcesBrokerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ExecutionException;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.iface.NoSuchScopeException;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.ScopeInstance;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.util.ConfigUtils;
import javax.annotation.Nullable;
import javax.annotation.concurrent.NotThreadSafe;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
/**
* An implementation of {@link SharedResourcesBroker} using a {@link DefaultBrokerCache} for storing shared objects.
*
* Instances of this broker must be created as follows:
* <pre>
* SharedResourcesBrokerImpl<MyScopes> rootBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(myConfig);
* SharedResourcesBrokerImpl<MyScopes> scopeBroker = topBroker.newSubscopedBuilder(scope, "scopeId").build();
* </pre>
*/
@Slf4j
public class SharedResourcesBrokerImpl<S extends ScopeType<S>> implements SharedResourcesBroker<S> {
private final DefaultBrokerCache<S> brokerCache;
private final ScopeWrapper<S> selfScopeWrapper;
private final List<ScopedConfig<S>> scopedConfigs;
private final ImmutableMap<S, ScopeWrapper<S>> ancestorScopesByType;
SharedResourcesBrokerImpl(DefaultBrokerCache<S> brokerCache, ScopeWrapper<S> selfScope,
List<ScopedConfig<S>> scopedConfigs, Map<S, ScopeWrapper<S>> ancestorScopesByType) {
this.brokerCache = brokerCache;
this.selfScopeWrapper = selfScope;
this.scopedConfigs = scopedConfigs;
this.ancestorScopesByType = ImmutableMap.copyOf(ancestorScopesByType);
}
@Override
public ScopeInstance<S> selfScope() {
return this.selfScopeWrapper.getScope();
}
@Override
public ScopeInstance<S> getScope(S scopeType) throws NoSuchScopeException {
return getWrappedScope(scopeType).getScope();
}
@Override
public <T, K extends SharedResourceKey> T getSharedResource(SharedResourceFactory<T, K, S> factory, K key)
throws NotConfiguredException {
try {
return this.brokerCache.getAutoScoped(factory, key, this);
} catch (ExecutionException ee) {
Throwable cause = ee.getCause();
if (cause instanceof NotConfiguredException) {
throw (NotConfiguredException) cause;
}
throw new RuntimeException(cause);
}
}
@Override
public <T, K extends SharedResourceKey> T getSharedResourceAtScope(SharedResourceFactory<T, K, S> factory, K key,
S scope) throws NotConfiguredException, NoSuchScopeException {
try {
return this.brokerCache.getScoped(factory, key, getWrappedScope(scope), this);
} catch (ExecutionException ee) {
throw new RuntimeException(ee);
}
}
@Override
public <T, K extends SharedResourceKey> void bindSharedResourceAtScope(SharedResourceFactory<T, K, S> factory,
K key, S scopeType, T instance) throws NoSuchScopeException {
this.brokerCache.put(factory, key, getWrappedScope(scopeType), instance);
}
/**
* Get a {@link org.apache.gobblin.broker.iface.ConfigView} for the input scope, key, and factory.
*/
public <K extends SharedResourceKey> KeyedScopedConfigViewImpl<S, K> getConfigView(S scope, K key, String factoryName) {
Config config = ConfigFactory.empty();
for (ScopedConfig<S> scopedConfig : this.scopedConfigs) {
if (scopedConfig.getScopeType().equals(scopedConfig.getScopeType().rootScope())) {
config = ConfigUtils.getConfigOrEmpty(scopedConfig.getConfig(), factoryName).withFallback(config);
} else if (scope != null && SharedResourcesBrokerUtils.isScopeTypeAncestor(scope, scopedConfig.getScopeType())) {
Config tmpConfig = ConfigUtils.getConfigOrEmpty(scopedConfig.getConfig(), factoryName);
tmpConfig = ConfigUtils.getConfigOrEmpty(tmpConfig, scope.name());
config = tmpConfig.atKey(scope.name()).withFallback(config);
}
}
return new KeyedScopedConfigViewImpl<>(scope, key, factoryName, config);
}
NonExtendableBrokerView<S> getScopedView(final S scope) throws NoSuchScopeException {
return new NonExtendableBrokerView<>(this.brokerCache, getWrappedScope(scope), this.scopedConfigs,
Maps.filterKeys(this.ancestorScopesByType, new Predicate<S>() {
@Override
public boolean apply(@Nullable S input) {
return SharedResourcesBrokerUtils.isScopeTypeAncestor(scope, input);
}
}));
}
ScopeWrapper<S> getWrappedScope(S scopeType) throws NoSuchScopeException {
if (!this.ancestorScopesByType.containsKey(scopeType)) {
throw new NoSuchScopeException(scopeType);
}
return this.ancestorScopesByType.get(scopeType);
}
ScopeWrapper<S> getWrappedSelfScope() {
return this.selfScopeWrapper;
}
/**
* Stores overrides of {@link Config} applicable to a specific {@link ScopeType} and its descendants.
*/
@Data
static class ScopedConfig<T extends ScopeType<T>> {
private final T scopeType;
private final Config config;
}
/**
* Get a builder to create a descendant {@link SharedResourcesBrokerImpl} (i.e. its leaf scope is a descendant of this
* broker's leaf scope) and the same backing {@link DefaultBrokerCache}.
*
* @param subscope the {@link ScopeInstance} of the new {@link SharedResourcesBroker}.
* @return a {@link SubscopedBrokerBuilder}.
*/
@Override
public SubscopedBrokerBuilder newSubscopedBuilder(ScopeInstance<S> subscope) {
return new SubscopedBrokerBuilder(subscope);
}
/**
* A builder used to create a descendant {@link SharedResourcesBrokerImpl} with the same backing {@link DefaultBrokerCache}.
*/
@NotThreadSafe
public class SubscopedBrokerBuilder implements org.apache.gobblin.broker.iface.SubscopedBrokerBuilder<S, SharedResourcesBrokerImpl<S>> {
private final ScopeInstance<S> scope;
private final Map<S, ScopeWrapper<S>> ancestorScopes = Maps.newHashMap();
private Config config = ConfigFactory.empty();
private SubscopedBrokerBuilder(ScopeInstance<S> scope) {
Preconditions.checkNotNull(scope, "Subscope instance cannot be null.");
this.scope = scope;
if (SharedResourcesBrokerImpl.this.selfScopeWrapper != null) {
ancestorScopes.put(SharedResourcesBrokerImpl.this.selfScopeWrapper.getType(), SharedResourcesBrokerImpl.this.selfScopeWrapper);
}
}
/**
* Specify additional ancestor {@link SharedResourcesBrokerImpl}. Useful when a {@link ScopeType} has multiple parents.
*/
public SubscopedBrokerBuilder withAdditionalParentBroker(SharedResourcesBroker<S> broker) {
if (!(broker instanceof SharedResourcesBrokerImpl) ||
!((SharedResourcesBrokerImpl) broker).brokerCache.equals(SharedResourcesBrokerImpl.this.brokerCache)) {
throw new IllegalArgumentException("Additional parent broker is not compatible.");
}
this.ancestorScopes.put(broker.selfScope().getType(), ((SharedResourcesBrokerImpl<S>) broker).selfScopeWrapper);
return this;
}
/**
* Specify {@link Config} overrides. Note these overrides will only be applicable at the new leaf scope and descendant
* scopes. {@link Config} entries must start with {@link BrokerConstants#GOBBLIN_BROKER_CONFIG_PREFIX} (any entries
* not satisfying that condition will be ignored).
*/
public SubscopedBrokerBuilder withOverridingConfig(Config config) {
this.config = ConfigUtils.getConfigOrEmpty(config, BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX).withFallback(this.config);
return this;
}
/**
* @return the new {@link SharedResourcesBrokerImpl}.
*/
public SharedResourcesBrokerImpl<S> build() {
Map<S, ScopeWrapper<S>> scopeMap = Maps.newHashMap();
for (ScopeWrapper<S> scopeWrapper : this.ancestorScopes.values()) {
addScopeAndAncestorsToScopeMap(scopeMap, scopeWrapper);
}
ScopeWrapper<S> newScope = createWrappedScope(this.scope, scopeMap, this.scope.getType());
if (SharedResourcesBrokerImpl.this.selfScopeWrapper != null && !SharedResourcesBrokerUtils.isScopeAncestor(newScope, SharedResourcesBrokerImpl.this.selfScopeWrapper)) {
throw new IllegalArgumentException(String.format("Child scope %s must be a child of leaf scope %s.", newScope.getType(),
SharedResourcesBrokerImpl.this.selfScopeWrapper.getType()));
}
List<ScopedConfig<S>> scopedConfigs = Lists.newArrayList(SharedResourcesBrokerImpl.this.scopedConfigs);
if (!this.config.isEmpty()) {
scopedConfigs.add(new ScopedConfig<>(newScope.getType(), this.config));
}
return new SharedResourcesBrokerImpl<>(SharedResourcesBrokerImpl.this.brokerCache, newScope, scopedConfigs, scopeMap);
}
private ScopeWrapper<S> createWrappedScope(ScopeInstance<S> scope, Map<S, ScopeWrapper<S>> scopeMap, S mainScopeType)
throws IllegalArgumentException {
List<ScopeWrapper<S>> parentScopes = Lists.newArrayList();
ScopeType<S> scopeType = scope.getType();
if (scopeType.parentScopes() != null) {
for (S tpe : scopeType.parentScopes()) {
if (scopeMap.containsKey(tpe)) {
parentScopes.add(scopeMap.get(tpe));
} else if (tpe.defaultScopeInstance() != null) {
ScopeInstance<S> defaultInstance = tpe.defaultScopeInstance();
if (!defaultInstance.getType().equals(tpe)) {
throw new RuntimeException(String.format("Default scope instance %s for scope type %s is not of type %s.",
defaultInstance, tpe, tpe));
}
parentScopes.add(createWrappedScope(tpe.defaultScopeInstance(), scopeMap, mainScopeType));
} else {
throw new IllegalArgumentException(String.format(
"Scope %s is an ancestor of %s, however it does not have a default id and is not provided as an ancestor scope.",
tpe, mainScopeType));
}
}
}
ScopeWrapper<S> wrapper = new ScopeWrapper<>(scope.getType(), scope, parentScopes);
scopeMap.put(wrapper.getType(), wrapper);
return wrapper;
}
private void addScopeAndAncestorsToScopeMap(Map<S, ScopeWrapper<S>> scopeMap, ScopeWrapper<S> scope) {
if (scope == null) {
return;
}
Queue<ScopeWrapper<S>> ancestors = new LinkedList<>();
ancestors.add(scope);
while (!ancestors.isEmpty()) {
ScopeWrapper<S> thisScope = ancestors.poll();
if (!scopeMap.containsKey(thisScope.getType())) {
scopeMap.put(thisScope.getType(), thisScope);
} else if (!scopeMap.get(thisScope.getType()).equals(thisScope)) {
throw new IllegalStateException(String.format("Multiple scopes found with type %s but different identity: %s and %s.",
thisScope.getType(), thisScope.getScope(), scopeMap.get(thisScope.getType()).getScope()));
}
ancestors.addAll(thisScope.getParentScopes());
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SharedResourcesBrokerImpl<?> that = (SharedResourcesBrokerImpl<?>) o;
if (!brokerCache.equals(that.brokerCache)) {
return false;
}
if (!ancestorScopesByType.equals(that.ancestorScopesByType)) {
return false;
}
return selfScopeWrapper != null ? selfScopeWrapper.equals(that.selfScopeWrapper) : that.selfScopeWrapper == null;
}
@Override
public int hashCode() {
int result = brokerCache.hashCode();
result = 31 * result + ancestorScopesByType.hashCode();
result = 31 * result + (selfScopeWrapper != null ? selfScopeWrapper.hashCode() : 0);
return result;
}
@Override
public void close()
throws IOException {
ScopeInstance<S> scope = this.selfScopeWrapper.getScope();
log.info("Closing broker with scope {} of id {}.", scope.getType().toString(), scope.getScopeId());
this.brokerCache.close(this.selfScopeWrapper);
}
}
| 4,094 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/ScopeWrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import org.apache.gobblin.broker.iface.ScopeInstance;
import java.util.Collection;
import org.apache.gobblin.broker.iface.ScopeType;
import lombok.Data;
/**
* A wrapper around a {@link ScopeInstance} used by {@link SharedResourcesBrokerImpl} to store a
* {@link ScopeInstance} and its descendants.
*/
@Data
class ScopeWrapper<S extends ScopeType<S>> {
private final S type;
private final ScopeInstance<S> scope;
private final Collection<ScopeWrapper<S>> parentScopes;
}
| 4,095 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/BrokerConfigurationKeyGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import com.google.common.base.Joiner;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import javax.annotation.Nonnull;
import lombok.Builder;
/**
* Generates key strings that the default {@link org.apache.gobblin.broker.iface.SharedResourcesBroker} can understand. Useful
* for populating a configuration programmatically.
*/
public class BrokerConfigurationKeyGenerator {
private static final Joiner JOINER = Joiner.on(".").skipNulls();
/**
* Generate a {@link org.apache.gobblin.broker.iface.SharedResourcesBroker} configuration key for a particular {@link SharedResourceFactory},
* {@link SharedResourceKey} and {@link ScopeType}.
*
* Example:
* If the broker configuration contains a key-value pair with key:
* generateKey(myFactory, myKey, myScopeType, "sample.key")
* when requesting a resource created by myFactory, with the provided key and scope, the factory will be able to see
* the key-value pair specified.
*
* Note:
* {@link SharedResourceKey} and {@link ScopeType} may be null. In this case, the key-value pair will be visible to
* the factory regardless of the key and scope requested by the user.
*/
@Builder
public static String generateKey(@Nonnull SharedResourceFactory factory, SharedResourceKey key, ScopeType scopeType,
@Nonnull String configKey) {
return JOINER.join(BrokerConstants.GOBBLIN_BROKER_CONFIG_PREFIX, factory.getName(), scopeType == null ? null : scopeType.name(),
key == null ? null : key.toConfigurationKey(), configKey);
}
}
| 4,096 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/SharedResourcesBrokerUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.Queue;
import org.slf4j.Logger;
import com.google.common.util.concurrent.Service;
import org.apache.gobblin.broker.iface.ScopeType;
/**
* General utilities for {@link org.apache.gobblin.broker.iface.SharedResourcesBroker} functionality.
*/
public class SharedResourcesBrokerUtils {
/**
* Determine if a {@link ScopeType} is an ancestor of another {@link ScopeType}.
*/
public static <S extends ScopeType<S>> boolean isScopeTypeAncestor(S scopeType, S possibleAncestor) {
Queue<S> ancestors = new LinkedList<>();
ancestors.add(scopeType);
while (true) {
if (ancestors.isEmpty()) {
return false;
}
if (ancestors.peek().equals(possibleAncestor)) {
return true;
}
Collection<S> parentScopes = ancestors.poll().parentScopes();
if (parentScopes != null) {
ancestors.addAll(parentScopes);
}
}
}
/**
* Determine if a {@link ScopeWrapper} is an ancestor of another {@link ScopeWrapper}.
*/
static <S extends ScopeType<S>> boolean isScopeAncestor(ScopeWrapper<S> scope, ScopeWrapper<S> possibleAncestor) {
Queue<ScopeWrapper<S>> ancestors = new LinkedList<>();
ancestors.add(scope);
while (true) {
if (ancestors.isEmpty()) {
return false;
}
if (ancestors.peek().equals(possibleAncestor)) {
return true;
}
ancestors.addAll(ancestors.poll().getParentScopes());
}
}
/**
* Close {@link Closeable}s and shutdown {@link Service}s.
*/
public static void shutdownObject(Object obj, Logger log) {
if (obj instanceof Service) {
((Service) obj).stopAsync();
} else if (obj instanceof Closeable) {
try {
((Closeable) obj).close();
} catch (IOException ioe) {
log.error("Failed to close {}.", obj);
}
}
}
}
| 4,097 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/NonExtendableBrokerView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import java.util.List;
import java.util.Map;
import org.apache.gobblin.broker.iface.ScopeInstance;
import org.apache.gobblin.broker.iface.ScopeType;
/**
* A view of a {@link SharedResourcesBrokerImpl} at a higher scope than the leaf scope. This is used only to pass into
* factories, and it does not allow creating subscoped brokers.
*/
class NonExtendableBrokerView<S extends ScopeType<S>> extends SharedResourcesBrokerImpl<S> {
public NonExtendableBrokerView(DefaultBrokerCache<S> brokerCache, ScopeWrapper<S> selfScope,
List<ScopedConfig<S>> scopedConfigs, Map<S, ScopeWrapper<S>> ancestorScopesByType) {
super(brokerCache, selfScope, scopedConfigs, ancestorScopesByType);
}
@Override
public SubscopedBrokerBuilder newSubscopedBuilder(ScopeInstance<S> subscope) {
throw new UnsupportedOperationException();
}
}
| 4,098 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/broker/KeyedScopedConfigViewImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.broker;
import com.google.common.base.Joiner;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import org.apache.gobblin.util.ConfigUtils;
import javax.annotation.Nullable;
import lombok.Data;
/**
* An implementation of {@link ScopedConfigView} that knows how to extract relevant subconfiguration from an input
* {@link Config}.
*/
@Data
public class KeyedScopedConfigViewImpl<S extends ScopeType<S>, K extends SharedResourceKey>
implements ScopedConfigView<S, K> {
private static final Joiner JOINER = Joiner.on(".");
@Nullable private final S scope;
private final K key;
private final String factoryName;
private final Config fullConfig;
public Config getFactorySpecificConfig() {
return this.fullConfig;
}
public Config getScopedConfig() {
if (this.scope == null) {
return ConfigFactory.empty();
}
return ConfigUtils.getConfigOrEmpty(this.fullConfig, this.scope.name());
}
public Config getKeyedConfig() {
if (this.key == null) {
return ConfigFactory.empty();
}
String key = this.key.toConfigurationKey();
if (key == null) {
return ConfigFactory.empty();
}
return ConfigUtils.getConfigOrEmpty(this.fullConfig, this.key.toConfigurationKey());
}
public Config getKeyedScopedConfig() {
if (this.scope == null || this.key.toConfigurationKey() == null) {
return ConfigFactory.empty();
}
return ConfigUtils.getConfigOrEmpty(this.fullConfig,
chainConfigKeys(this.scope.name(), this.key.toConfigurationKey()));
}
@Override
public Config getConfig() {
return getKeyedScopedConfig().withFallback(getKeyedConfig()).withFallback(getScopedConfig()).withFallback(getFactorySpecificConfig());
}
@Override
public ScopedConfigView<S, K> getScopedView(S scopeType) {
return new KeyedScopedConfigViewImpl<>(scopeType, this.key, this.factoryName, this.fullConfig);
}
private static String chainConfigKeys(String... keys) {
return JOINER.join(keys);
}
}
| 4,099 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.