index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/time/TimeIterator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.time;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Iterator;
import java.util.NoSuchElementException;
import lombok.Getter;
/**
* A {@link TimeIterator} iterates over time points within [{@code startTime}, {@code endTime}]. It
* supports time points in various granularities (See {@link Granularity}
*/
public class TimeIterator implements Iterator {
public enum Granularity {
MINUTE, HOUR, DAY, MONTH
}
@Getter
private ZonedDateTime startTime;
private ZonedDateTime endTime;
private Granularity granularity;
private boolean reverse;
public TimeIterator(ZonedDateTime startTime, ZonedDateTime endTime, Granularity granularity) {
this(startTime, endTime, granularity, false);
}
public TimeIterator(ZonedDateTime startTime, ZonedDateTime endTime, Granularity granularity, boolean reverse) {
this.startTime = startTime;
this.endTime = endTime;
this.granularity = granularity;
this.reverse = reverse;
}
@Override
public boolean hasNext() {
return (reverse) ? !endTime.isAfter(startTime) : !startTime.isAfter(endTime);
}
@Override
public ZonedDateTime next() {
if ((!reverse && startTime.isAfter(endTime) || (reverse && endTime.isAfter(startTime)))) {
throw new NoSuchElementException();
}
ZonedDateTime dateTime = startTime;
startTime = (reverse) ? dec(startTime, granularity, 1) : inc(startTime, granularity, 1);
return dateTime;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
/**
* Increase the given time by {@code units}, which must be positive, of {@code granularity}
*/
public static ZonedDateTime inc(ZonedDateTime time, Granularity granularity, long units) {
switch (granularity) {
case MINUTE:
return time.plusMinutes(units);
case HOUR:
return time.plusHours(units);
case DAY:
return time.plusDays(units);
case MONTH:
return time.plusMonths(units);
}
throw new RuntimeException("Unsupported granularity: " + granularity);
}
/**
* Decrease the given time by {@code units}, which must be positive, of {@code granularity}
*/
public static ZonedDateTime dec(ZonedDateTime time, Granularity granularity, long units) {
switch (granularity) {
case MINUTE:
return time.minusMinutes(units);
case HOUR:
return time.minusHours(units);
case DAY:
return time.minusDays(units);
case MONTH:
return time.minusMonths(units);
}
throw new RuntimeException("Unsupported granularity: " + granularity);
}
/**
* Return duration as long between 2 datetime objects based on granularity
* @param d1
* @param d2
* @param granularity
* @return a long representing the duration
*/
public static long durationBetween(ZonedDateTime d1, ZonedDateTime d2, Granularity granularity) {
switch (granularity) {
case HOUR:
return Duration.between(d1, d2).toHours();
case MINUTE:
return Duration.between(d1, d2).toMinutes();
case DAY:
return Duration.between(d1, d2).toDays();
case MONTH:
return ChronoUnit.MONTHS.between(d1, d2);
}
throw new RuntimeException("Unsupported granularity: " + granularity);
}
}
| 4,300 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/data/management/copy | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/data/management/copy/hive/WhitelistBlacklist.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.data.management.copy.hive;
import java.io.IOException;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import com.google.common.base.Optional;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.SetMultimap;
import com.google.common.collect.Sets;
import com.typesafe.config.Config;
/**
* A whitelist / blacklist implementation for filtering Hive tables. It can be configured as
* case-insensitive({@code ignoreCase = true}) or case-sensitive({@code ignoreCase = false}). By default, it's
* case-insensitive. <br>
*
* <p></p>
* Parses input whitelist and blacklist of the form
* [dbpattern.tablepattern1|tablepattern2|...],... and filters accordingly. The db and table patterns accept "*"
* characters. Each of whitelist and blacklist is a list of patterns. For a table to be accepted, it must fail the
* blacklist filter and pass the whitelist filter. Empty whitelist or blacklist are noops.
*
* <p></p>
* <p>
* Example whitelist and blacklist patterns:
* <li> db1.table1 -> only db1.table1 passes.
* <li> db1 -> any table under db1 passes.
* <li> db1.table* -> any table under db1 whose name satisfies the pattern table* passes.
* <li> db* -> all tables from all databases whose names satisfy the pattern db* pass.
* <li> db*.table* -> db and table must satisfy the patterns db* and table* respectively
* <li> db1.table1,db2.table2 -> combine expressions for different databases with comma.
* <li> db1.table1|table2 -> combine expressions for same database with "|".
* </p>
*/
public class WhitelistBlacklist implements Serializable {
public static final String WHITELIST = "whitelist";
public static final String BLACKLIST = "blacklist";
public static final String IGNORE_CASE = "whitelistBlacklist.ignoreCase";
private static final Pattern ALL_TABLES = Pattern.compile(".*");
private final SetMultimap<Pattern, Pattern> whitelistMultimap;
private final SetMultimap<Pattern, Pattern> blacklistMultimap;
private final boolean ignoreCase;
public WhitelistBlacklist(Config config) throws IOException {
this(config.hasPath(WHITELIST) ? config.getString(WHITELIST) : "",
config.hasPath(BLACKLIST) ? config.getString(BLACKLIST) : "",
!config.hasPath(IGNORE_CASE) || config.getBoolean(IGNORE_CASE));
}
public WhitelistBlacklist(String whitelist, String blacklist) throws IOException {
this(whitelist, blacklist, true);
}
public WhitelistBlacklist(String whitelist, String blacklist, boolean ignoreCase) throws IOException {
this.whitelistMultimap = HashMultimap.create();
this.blacklistMultimap = HashMultimap.create();
this.ignoreCase = ignoreCase;
if (ignoreCase) {
populateMultimap(this.whitelistMultimap, whitelist.toLowerCase());
populateMultimap(this.blacklistMultimap, blacklist.toLowerCase());
} else {
populateMultimap(this.whitelistMultimap, whitelist);
populateMultimap(this.blacklistMultimap, blacklist);
}
}
/**
* @return Whether database db might contain tables accepted by this {@link WhitelistBlacklist}.
*/
public boolean acceptDb(String db) {
return accept(db, Optional.<String> absent());
}
/**
* @return Whether the input table is accepted by this {@link WhitelistBlacklist}.
*/
public boolean acceptTable(String db, String table) {
return accept(db, table == null? Optional.<String> absent(): Optional.fromNullable(table));
}
private boolean accept(String db, Optional<String> table) {
String adjustedDb = ignoreCase ? db.toLowerCase() : db;
Optional<String> adjustedTable = ignoreCase && table.isPresent() ? Optional.of(table.get().toLowerCase()) : table;
if (!this.blacklistMultimap.isEmpty() &&
multimapContains(this.blacklistMultimap, adjustedDb, adjustedTable, true)) {
return false;
}
return this.whitelistMultimap.isEmpty() ||
multimapContains(this.whitelistMultimap, adjustedDb, adjustedTable, false);
}
private static void populateMultimap(SetMultimap<Pattern, Pattern> multimap, String list) throws IOException {
Splitter tokenSplitter = Splitter.on(",").omitEmptyStrings().trimResults();
Splitter partSplitter = Splitter.on(".").omitEmptyStrings().trimResults();
Splitter tableSplitter = Splitter.on("|").omitEmptyStrings().trimResults();
for (String token : tokenSplitter.split(list)) {
if (!Strings.isNullOrEmpty(token)) {
List<String> parts = partSplitter.splitToList(token);
if (parts.size() > 2) {
throw new IOException("Invalid token " + token);
}
Pattern databasePattern = Pattern.compile(parts.get(0).replace("*", ".*"));
Set<Pattern> tablePatterns = Sets.newHashSet();
if (parts.size() == 2) {
String tables = parts.get(1);
for (String table : tableSplitter.split(tables)) {
if (table.equals("*")) {
// special case, must use ALL_TABLES due to use of set.contains(ALL_TABLES) in multimapContains
tablePatterns.add(ALL_TABLES);
} else {
tablePatterns.add(Pattern.compile(table.replace("*", ".*")));
}
}
} else {
tablePatterns.add(ALL_TABLES);
}
multimap.putAll(databasePattern, tablePatterns);
}
}
}
private static boolean multimapContains(SetMultimap<Pattern, Pattern> multimap, String database,
Optional<String> table, boolean blacklist) {
for (Pattern dbPattern : multimap.keySet()) {
if (dbPattern.matcher(database).matches()) {
if (!table.isPresent()) {
// if we are only matching database
return !blacklist || multimap.get(dbPattern).contains(ALL_TABLES);
}
for (Pattern tablePattern : multimap.get(dbPattern)) {
if (tablePattern.matcher(table.get()).matches()) {
return true;
}
}
}
}
return false;
}
}
| 4,301 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/exception/QuotaExceededException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.exception;
import java.io.IOException;
public class QuotaExceededException extends IOException {
public QuotaExceededException(String message) {
super(message);
}
}
| 4,302 |
0 | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-utility/src/main/java/org/apache/gobblin/exception/NonTransientException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.exception;
/**
* NonTransientException that shows this is a permanent failure where retry cannot solve.
*/
public class NonTransientException extends RuntimeException {
private static final long serialVersionUID = -973030180704599529L;
public NonTransientException() {
super();
}
public NonTransientException(String message) {
super(message);
}
public NonTransientException(String message, Throwable t) {
super(message, t);
}
public NonTransientException(Throwable t) {
super(t);
}
} | 4,303 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/DatabaseJobHistoryStoreV100Test.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import org.testng.annotations.Test;
/**
* Unit tests for {@link DatabaseJobHistoryStore} V1.0.0.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metastore"})
public class DatabaseJobHistoryStoreV100Test extends DatabaseJobHistoryStoreTest {
@Override
protected String getVersion() {
return "1.0.0";
}
}
| 4,304 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/DatabaseJobHistoryStoreV101Test.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import org.testng.annotations.Test;
/**
* Unit tests for {@link DatabaseJobHistoryStore} V1.0.1.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metastore"})
public class DatabaseJobHistoryStoreV101Test extends DatabaseJobHistoryStoreTest {
@Override
protected String getVersion() {
return "1.0.1";
}
}
| 4,305 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/MysqlDataSourceFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import javax.sql.DataSource;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Unit tests for {@link MysqlDataSourceFactory}.
*/
@Test(groups = { "gobblin.metastore" })
public class MysqlDataSourceFactoryTest {
@Test
public void testSameKey() throws IOException {
Config config = ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.STATE_STORE_DB_URL_KEY, "url",
ConfigurationKeys.STATE_STORE_DB_USER_KEY, "user",
ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, "dummypwd"));
DataSource dataSource1 = MysqlDataSourceFactory.get(config,
SharedResourcesBrokerFactory.getImplicitBroker());
DataSource dataSource2 = MysqlDataSourceFactory.get(config,
SharedResourcesBrokerFactory.getImplicitBroker());
Assert.assertEquals(dataSource1, dataSource2);
}
@Test
public void testDifferentKey() throws IOException {
Config config1 = ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.STATE_STORE_DB_URL_KEY, "url1",
ConfigurationKeys.STATE_STORE_DB_USER_KEY, "user",
ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, "dummypwd"));
Config config2 = ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.STATE_STORE_DB_URL_KEY, "url2",
ConfigurationKeys.STATE_STORE_DB_USER_KEY, "user",
ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, "dummypwd"));
DataSource dataSource1 = MysqlDataSourceFactory.get(config1,
SharedResourcesBrokerFactory.getImplicitBroker());
DataSource dataSource2 = MysqlDataSourceFactory.get(config2,
SharedResourcesBrokerFactory.getImplicitBroker());
Assert.assertNotEquals(dataSource1, dataSource2);
}
@Test
public void testSameDbDifferentUser() throws IOException {
Config config1 = ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.STATE_STORE_DB_URL_KEY, "url1",
ConfigurationKeys.STATE_STORE_DB_USER_KEY, "user1",
ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, "dummypwd"));
Config config2 = ConfigFactory.parseMap(ImmutableMap.of(ConfigurationKeys.STATE_STORE_DB_URL_KEY, "url1",
ConfigurationKeys.STATE_STORE_DB_USER_KEY, "user2",
ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, "dummypwd"));
DataSource dataSource1 = MysqlDataSourceFactory.get(config1,
SharedResourcesBrokerFactory.getImplicitBroker());
DataSource dataSource2 = MysqlDataSourceFactory.get(config2,
SharedResourcesBrokerFactory.getImplicitBroker());
Assert.assertNotEquals(dataSource1, dataSource2);
}
}
| 4,306 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/DatabaseJobHistoryStoreV103Test.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import org.testng.annotations.Test;
/**
* Unit tests for {@link DatabaseJobHistoryStore} V1.0.1.
*
*/
@Test(groups = {"gobblin.metastore"})
public class DatabaseJobHistoryStoreV103Test extends DatabaseJobHistoryStoreTest {
@Override
protected String getVersion() {
return "1.0.3";
}
}
| 4,307 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/DatabaseJobHistoryStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.linkedin.data.template.StringMap;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.testing.ITestMetastoreDatabase;
import org.apache.gobblin.metastore.testing.TestMetastoreDatabaseFactory;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobExecutionQuery;
import org.apache.gobblin.rest.JobStateEnum;
import org.apache.gobblin.rest.LauncherTypeEnum;
import org.apache.gobblin.rest.Metric;
import org.apache.gobblin.rest.MetricArray;
import org.apache.gobblin.rest.MetricTypeEnum;
import org.apache.gobblin.rest.QueryIdTypeEnum;
import org.apache.gobblin.rest.Table;
import org.apache.gobblin.rest.TableTypeEnum;
import org.apache.gobblin.rest.TaskExecutionInfo;
import org.apache.gobblin.rest.TaskExecutionInfoArray;
import org.apache.gobblin.rest.TaskStateEnum;
/**
* Unit tests for {@link DatabaseJobHistoryStore}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metastore"})
public abstract class DatabaseJobHistoryStoreTest {
private final List<JobExecutionInfo> expectedJobExecutionInfos = Lists.newArrayList();
private ITestMetastoreDatabase testMetastoreDatabase;
private JobHistoryStore jobHistoryStore;
protected abstract String getVersion();
@BeforeClass
public void setUp()
throws Exception {
ConfigFactory.invalidateCaches();
testMetastoreDatabase = TestMetastoreDatabaseFactory.get(getVersion());
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY, testMetastoreDatabase.getJdbcUrl());
Injector injector = Guice.createInjector(new MetaStoreModule(properties));
this.jobHistoryStore = injector.getInstance(JobHistoryStore.class);
}
@Test
public void testInsert()
throws IOException {
this.jobHistoryStore.put(create(0, false));
this.jobHistoryStore.put(create(1, true));
}
@Test(dependsOnMethods = {"testInsert"})
public void testUpdate()
throws IOException {
for (JobExecutionInfo jobExecutionInfo : this.expectedJobExecutionInfos) {
jobExecutionInfo.setEndTime(System.currentTimeMillis());
jobExecutionInfo.setDuration(jobExecutionInfo.getEndTime() - jobExecutionInfo.getStartTime());
jobExecutionInfo.setState(JobStateEnum.COMMITTED);
jobExecutionInfo.setCompletedTasks(jobExecutionInfo.getLaunchedTasks());
for (TaskExecutionInfo taskExecutionInfo : jobExecutionInfo.getTaskExecutions()) {
taskExecutionInfo.setEndTime(jobExecutionInfo.getEndTime());
taskExecutionInfo.setDuration(taskExecutionInfo.getEndTime() - taskExecutionInfo.getStartTime());
taskExecutionInfo.setState(TaskStateEnum.COMMITTED);
}
this.jobHistoryStore.put(jobExecutionInfo);
}
}
@Test(dependsOnMethods = {"testUpdate"})
public void testQueryByJobId()
throws IOException {
JobExecutionQuery queryByJobId = new JobExecutionQuery();
queryByJobId.setIdType(QueryIdTypeEnum.JOB_ID);
queryByJobId.setId(JobExecutionQuery.Id.create(this.expectedJobExecutionInfos.get(0).getJobId()));
List<JobExecutionInfo> result = this.jobHistoryStore.get(queryByJobId);
Assert.assertEquals(result.size(), 1);
JobExecutionInfo actual = result.get(0);
JobExecutionInfo expected = this.expectedJobExecutionInfos.get(0);
assertJobExecution(actual, expected);
}
@Test(dependsOnMethods = {"testUpdate"})
public void testQueryByJobName()
throws IOException {
JobExecutionQuery queryByJobName = new JobExecutionQuery();
queryByJobName.setIdType(QueryIdTypeEnum.JOB_NAME);
queryByJobName.setId(JobExecutionQuery.Id.create(this.expectedJobExecutionInfos.get(0).getJobName()));
List<JobExecutionInfo> result = this.jobHistoryStore.get(queryByJobName);
Assert.assertEquals(result.size(), 1);
JobExecutionInfo actual = result.get(0);
JobExecutionInfo expected = this.expectedJobExecutionInfos.get(0);
assertJobExecution(actual, expected);
}
@Test(dependsOnMethods = {"testUpdate"})
public void testQueryByTable()
throws IOException {
JobExecutionQuery queryByTable = new JobExecutionQuery();
queryByTable.setIdType(QueryIdTypeEnum.TABLE);
queryByTable.setId(
JobExecutionQuery.Id.create(this.expectedJobExecutionInfos.get(0).getTaskExecutions().get(0).getTable()));
List<JobExecutionInfo> result = this.jobHistoryStore.get(queryByTable);
Assert.assertEquals(result.size(), 2);
JobExecutionInfo actual = result.get(0);
Assert.assertEquals(actual.getJobName(), this.expectedJobExecutionInfos.get(0).getJobName());
Assert.assertEquals(actual.getJobId(), this.expectedJobExecutionInfos.get(0).getJobId());
Assert.assertEquals(actual.getTaskExecutions().size(), 1);
Assert.assertEquals(actual.getTaskExecutions().get(0).getTable(),
this.expectedJobExecutionInfos.get(0).getTaskExecutions().get(0).getTable());
actual = result.get(1);
Assert.assertEquals(actual.getJobName(), this.expectedJobExecutionInfos.get(1).getJobName());
Assert.assertEquals(actual.getJobId(), this.expectedJobExecutionInfos.get(1).getJobId());
Assert.assertEquals(actual.getTaskExecutions().size(), 1);
Assert.assertEquals(actual.getTaskExecutions().get(0).getTable(),
this.expectedJobExecutionInfos.get(1).getTaskExecutions().get(0).getTable());
queryByTable.setId(
JobExecutionQuery.Id.create(this.expectedJobExecutionInfos.get(1).getTaskExecutions().get(1).getTable()));
result = this.jobHistoryStore.get(queryByTable);
Assert.assertEquals(result.size(), 1);
actual = result.get(0);
Assert.assertEquals(actual.getJobName(), this.expectedJobExecutionInfos.get(1).getJobName());
Assert.assertEquals(actual.getJobId(), this.expectedJobExecutionInfos.get(1).getJobId());
Assert.assertEquals(result.size(), 1);
Assert.assertEquals(actual.getTaskExecutions().size(), 1);
Assert.assertEquals(actual.getTaskExecutions().get(0).getTable(),
this.expectedJobExecutionInfos.get(1).getTaskExecutions().get(1).getTable());
}
@AfterClass(alwaysRun = true)
public void tearDown()
throws Exception {
if (this.jobHistoryStore != null) {
this.jobHistoryStore.close();
}
if (this.testMetastoreDatabase != null) {
this.testMetastoreDatabase.close();
}
}
private JobExecutionInfo create(int index, boolean differentTableType) {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName("TestJob" + index);
jobExecutionInfo.setJobId(jobExecutionInfo.getJobName() + "_" + System.currentTimeMillis());
jobExecutionInfo.setStartTime(System.currentTimeMillis());
jobExecutionInfo.setState(JobStateEnum.PENDING);
jobExecutionInfo.setLaunchedTasks(2);
jobExecutionInfo.setCompletedTasks(0);
jobExecutionInfo.setLauncherType(LauncherTypeEnum.LOCAL);
jobExecutionInfo.setTrackingUrl("localhost");
MetricArray jobMetrics = new MetricArray();
Metric jobMetric1 = new Metric();
jobMetric1.setGroup("JOB");
jobMetric1.setName("jm1");
jobMetric1.setType(MetricTypeEnum.COUNTER);
jobMetric1.setValue("100");
jobMetrics.add(jobMetric1);
jobExecutionInfo.setMetrics(jobMetrics);
Map<String, String> jobProperties = Maps.newHashMap();
jobProperties.put("k" + index, "v" + index);
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
TaskExecutionInfo taskExecutionInfo1 = new TaskExecutionInfo();
taskExecutionInfo1.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo1.setTaskId(jobExecutionInfo.getJobId() + "_0");
taskExecutionInfo1.setStartTime(System.currentTimeMillis());
taskExecutionInfo1.setState(TaskStateEnum.PENDING);
taskExecutionInfo1.setLowWatermark(0L);
taskExecutionInfo1.setHighWatermark(1000L);
Table table1 = new Table();
table1.setNamespace("Test");
table1.setName("Test1");
table1.setType(TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo1.setTable(table1);
MetricArray taskMetrics1 = new MetricArray();
Metric taskMetric1 = new Metric();
taskMetric1.setGroup("TASK");
taskMetric1.setName("tm1");
taskMetric1.setType(MetricTypeEnum.COUNTER);
taskMetric1.setValue("100");
taskMetrics1.add(taskMetric1);
taskExecutionInfo1.setMetrics(taskMetrics1);
Map<String, String> taskProperties1 = Maps.newHashMap();
taskProperties1.put("k1" + index, "v1" + index);
taskExecutionInfo1.setTaskProperties(new StringMap(taskProperties1));
taskExecutionInfos.add(taskExecutionInfo1);
TaskExecutionInfo taskExecutionInfo2 = new TaskExecutionInfo();
taskExecutionInfo2.setJobId(jobExecutionInfo.getJobId());
taskExecutionInfo2.setTaskId(jobExecutionInfo.getJobId() + "_1");
taskExecutionInfo2.setStartTime(System.currentTimeMillis());
taskExecutionInfo2.setState(TaskStateEnum.PENDING);
taskExecutionInfo2.setLowWatermark(0L);
taskExecutionInfo2.setHighWatermark(2000L);
Table table2 = new Table();
table2.setNamespace("Test");
table2.setName("Test2");
table2.setType(differentTableType ? TableTypeEnum.SNAPSHOT_APPEND : TableTypeEnum.SNAPSHOT_ONLY);
taskExecutionInfo2.setTable(table2);
MetricArray taskMetrics2 = new MetricArray();
Metric taskMetric2 = new Metric();
taskMetric2.setGroup("TASK");
taskMetric2.setName("tm2");
taskMetric2.setType(MetricTypeEnum.COUNTER);
taskMetric2.setValue("100");
taskMetrics2.add(taskMetric2);
taskExecutionInfo2.setMetrics(taskMetrics2);
Map<String, String> taskProperties2 = Maps.newHashMap();
taskProperties2.put("k2" + index, "v2" + index);
taskExecutionInfo2.setTaskProperties(new StringMap(taskProperties2));
taskExecutionInfos.add(taskExecutionInfo2);
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
this.expectedJobExecutionInfos.add(jobExecutionInfo);
return jobExecutionInfo;
}
private void assertJobExecution(JobExecutionInfo actual, JobExecutionInfo expected) {
Assert.assertEquals(actual.getJobName(), expected.getJobName());
Assert.assertEquals(actual.getJobId(), expected.getJobId());
if (expected.hasDuration()) {
Assert.assertEquals(actual.getDuration(), expected.getDuration());
} else {
Assert.assertEquals(actual.getDuration().longValue(), 0L);
}
Assert.assertEquals(actual.getState(), expected.getState());
Assert.assertEquals(actual.getLaunchedTasks(), expected.getLaunchedTasks());
Assert.assertEquals(actual.getCompletedTasks(), expected.getCompletedTasks());
Assert.assertEquals(actual.getLauncherType(), expected.getLauncherType());
Assert.assertEquals(actual.getTrackingUrl(), expected.getTrackingUrl());
Assert.assertEquals(actual.getMetrics(), expected.getMetrics());
for (int i = 0; i < actual.getMetrics().size(); i++) {
assertMetric(actual.getMetrics().get(i), expected.getMetrics().get(i));
}
Assert.assertEquals(actual.getJobProperties(), expected.getJobProperties());
Assert.assertEquals(actual.getTaskExecutions().size(), expected.getTaskExecutions().size());
for (int i = 0; i < actual.getTaskExecutions().size(); i++) {
assertTaskExecution(actual.getTaskExecutions().get(i), expected.getTaskExecutions().get(i));
}
}
private void assertTaskExecution(TaskExecutionInfo actual, TaskExecutionInfo expected) {
Assert.assertEquals(actual.getJobId(), expected.getJobId());
Assert.assertEquals(actual.getTaskId(), expected.getTaskId());
if (expected.hasDuration()) {
Assert.assertEquals(actual.getDuration(), expected.getDuration());
} else {
Assert.assertEquals(actual.getDuration().longValue(), 0L);
}
Assert.assertEquals(actual.getState(), expected.getState());
Assert.assertEquals(actual.getLowWatermark(), expected.getLowWatermark());
Assert.assertEquals(actual.getHighWatermark(), expected.getHighWatermark());
Assert.assertEquals(actual.getTable(), expected.getTable());
Assert.assertEquals(actual.getMetrics(), expected.getMetrics());
for (int i = 0; i < actual.getMetrics().size(); i++) {
assertMetric(actual.getMetrics().get(i), expected.getMetrics().get(i));
}
Assert.assertEquals(actual.getTaskProperties(), expected.getTaskProperties());
}
private void assertMetric(Metric actual, Metric expected) {
Assert.assertEquals(actual.getGroup(), expected.getGroup());
Assert.assertEquals(actual.getName(), expected.getName());
Assert.assertEquals(actual.getType(), expected.getType());
Assert.assertEquals(actual.getValue(), expected.getValue());
}
}
| 4,308 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/FsStateStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Predicates;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ClassAliasResolver;
/**
* Unit tests for {@link FsStateStore}.
*/
@Test(groups = { "gobblin.metastore" })
public class FsStateStoreTest {
private StateStore<State> stateStore;
private StateStore.Factory stateStoreFactory;
private Config config;
@BeforeClass
public void setUp() throws Exception {
ClassAliasResolver<StateStore.Factory> resolver =
new ClassAliasResolver<>(StateStore.Factory.class);
stateStoreFactory =
resolver.resolveClass("fs").newInstance();
config = ConfigFactory.empty().withValue(ConfigurationKeys.STATE_STORE_FS_URI_KEY,
ConfigValueFactory.fromAnyRef("file:///")).withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY,
ConfigValueFactory.fromAnyRef("metastore-test")).withValue("fs.permissions.umask-mode",
ConfigValueFactory.fromAnyRef("022"));
this.stateStore = stateStoreFactory.createStateStore(config, State.class);
// cleanup in case files left behind by a prior run
this.stateStore.delete("testStore");
this.stateStore.delete("testStore2");
}
@Test
public void testPut() throws IOException {
List<State> states = Lists.newArrayList();
State state1 = new State();
state1.setId("s1");
state1.setProp("k1", "v1");
states.add(state1);
State state2 = new State();
state2.setId("s2");
state2.setProp("k2", "v2");
states.add(state2);
State state3 = new State();
state3.setId("s3");
state3.setProp("k3", "v3");
states.add(state3);
Assert.assertFalse(this.stateStore.exists("testStore", "testTable"));
this.stateStore.putAll("testStore", "testTable", states);
Assert.assertTrue(this.stateStore.exists("testStore", "testTable"));
// for testing of getStoreNames
this.stateStore.putAll("testStore2", "testTable", states);
}
@Test(dependsOnMethods = { "testPut" })
public void testGet() throws IOException {
List<State> states = this.stateStore.getAll("testStore", "testTable");
Assert.assertEquals(states.size(), 3);
Assert.assertEquals(states.get(0).getProp("k1"), "v1");
Assert.assertEquals(states.get(0).getId(), "s1");
Assert.assertEquals(states.get(1).getProp("k2"), "v2");
Assert.assertEquals(states.get(1).getId(), "s2");
Assert.assertEquals(states.get(2).getProp("k3"), "v3");
Assert.assertEquals(states.get(2).getId(), "s3");
}
@Test(dependsOnMethods = { "testPut" })
public void testCreateAlias() throws IOException {
this.stateStore.createAlias("testStore", "testTable", "testTable1");
Assert.assertTrue(this.stateStore.exists("testStore", "testTable1"));
}
@Test(dependsOnMethods = { "testCreateAlias" })
public void testGetAlias() throws IOException {
List<State> states = this.stateStore.getAll("testStore", "testTable1");
Assert.assertEquals(states.size(), 3);
Assert.assertEquals(states.get(0).getProp("k1"), "v1");
Assert.assertEquals(states.get(0).getId(), "s1");
Assert.assertEquals(states.get(1).getProp("k2"), "v2");
Assert.assertEquals(states.get(1).getId(), "s2");
Assert.assertEquals(states.get(2).getProp("k3"), "v3");
Assert.assertEquals(states.get(2).getId(), "s3");
}
@Test(dependsOnMethods = { "testGetAlias" })
public void testGetStoreNames() throws IOException {
List<String> storeNames = this.stateStore.getStoreNames(Predicates.alwaysTrue());
Collections.sort(storeNames);
Assert.assertTrue(storeNames.size() == 2);
Assert.assertEquals(storeNames.get(0), "testStore");
Assert.assertEquals(storeNames.get(1), "testStore2");
}
// Disable backwards compatibility change, since we are doing a major version upgrade
// .. and this is related to previous migration.
@Test
public void testBackwardsCompat() throws IOException {
// Tests with a state store that was saved before the WritableShim changes
Config bwConfig = ConfigFactory.load(config);
URL path = getClass().getResource("/backwardsCompatTestStore");
Assert.assertNotNull(path);
bwConfig = bwConfig.withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY,
ConfigValueFactory.fromAnyRef(path.toString()));
StateStore<State> bwStateStore = stateStoreFactory.createStateStore(bwConfig, State.class);
Assert.assertTrue(bwStateStore.exists("testStore", "testTable"));
List<State> states = bwStateStore.getAll("testStore", "testTable");
Assert.assertEquals(states.size(), 3);
Assert.assertEquals(states.get(0).getProp("k1"), "v1");
Assert.assertEquals(states.get(0).getId(), "s1");
Assert.assertEquals(states.get(1).getProp("k2"), "v2");
Assert.assertEquals(states.get(1).getId(), "s2");
Assert.assertEquals(states.get(2).getProp("k3"), "v3");
Assert.assertEquals(states.get(2).getId(), "s3");
}
@AfterClass
public void tearDown() throws IOException {
FileSystem fs = FileSystem.getLocal(new Configuration(false));
Path rootDir = new Path("metastore-test");
if (fs.exists(rootDir)) {
fs.delete(rootDir, true);
}
}
}
| 4,309 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/testing/TestMetastoreDatabaseFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.testing;
import java.io.Closeable;
import java.io.IOException;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigRenderOptions;
public class TestMetastoreDatabaseFactory {
private static final Object syncObject = new Object();
private static TestMetastoreDatabaseServer testMetastoreDatabaseServer;
private static Set<ITestMetastoreDatabase> instances = new HashSet<>();
private TestMetastoreDatabaseFactory() {
}
public static Config getDefaultConfig() {
return ConfigFactory.defaultOverrides().withFallback(ConfigFactory.load());
}
public static ITestMetastoreDatabase get() throws Exception {
return TestMetastoreDatabaseFactory.get("latest");
}
public static ITestMetastoreDatabase get(String version) throws Exception {
return get(version, getDefaultConfig());
}
public static ITestMetastoreDatabase get(String version, Config dbConfig) throws Exception {
try {
synchronized (syncObject) {
ensureDatabaseExists(dbConfig);
TestMetadataDatabase instance = new TestMetadataDatabase(testMetastoreDatabaseServer, version);
instances.add(instance);
return instance;
}
}
catch (Exception e) {
throw new RuntimeException("Failed to create TestMetastoreDatabase with version " + version +
" and config " + dbConfig.root().render(ConfigRenderOptions.defaults().setFormatted(true).setJson(true))
+ " cause: " + e, e);
}
}
static void release(ITestMetastoreDatabase instance) throws IOException {
synchronized (syncObject) {
if (instances.remove(instance) && instances.size() == 0) {
testMetastoreDatabaseServer.close();
testMetastoreDatabaseServer = null;
}
}
}
private static void ensureDatabaseExists(Config dbConfig) throws Exception {
if (testMetastoreDatabaseServer == null) {
try (Mutex ignored = new Mutex()) {
if (testMetastoreDatabaseServer == null) {
testMetastoreDatabaseServer = new TestMetastoreDatabaseServer(dbConfig);
}
}
}
}
private static class Mutex implements Closeable {
private final Object syncObject = new Object();
private final AtomicBoolean isLocked = new AtomicBoolean(false);
private FileChannel fileChannel;
private FileLock fileLock;
public Mutex() throws IOException {
take();
}
@Override
public void close() {
release();
}
private boolean take() throws IOException {
if (!isLocked.get()) {
synchronized (syncObject) {
if (!isLocked.get()) {
if (fileChannel == null) {
Path lockPath = Paths.get(System.getProperty("user.home")).resolve(".embedmysql.lock");
fileChannel = FileChannel.open(lockPath, StandardOpenOption.CREATE,
StandardOpenOption.WRITE, StandardOpenOption.READ);
}
fileLock = fileChannel.lock();
isLocked.set(true);
return true;
}
return true;
}
}
return true;
}
private boolean release() {
if (isLocked.get()) {
synchronized (syncObject) {
if (isLocked.get()) {
if (fileLock != null) {
boolean result = true;
try {
fileLock.close();
fileLock = null;
isLocked.set(false);
} catch (IOException ignored) {
result = false;
}
if (fileChannel != null) {
try {
fileChannel.close();
} catch (IOException ignored) {
result = false;
}
}
return result;
}
isLocked.set(false);
return true;
}
return true;
}
}
return true;
}
}
}
| 4,310 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/testing/TestMetadataDatabase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.testing;
import org.apache.commons.lang.StringUtils;
import java.io.IOException;
import java.net.URISyntaxException;
import java.sql.SQLException;
import java.util.Objects;
import java.util.UUID;
class TestMetadataDatabase implements ITestMetastoreDatabase {
private final TestMetastoreDatabaseServer testMetastoreDatabaseServer;
private final String database;
TestMetadataDatabase(TestMetastoreDatabaseServer testMetastoreDatabaseServer, String version) throws Exception {
this.testMetastoreDatabaseServer = testMetastoreDatabaseServer;
this.database = String.format("gobblin_%s", UUID.randomUUID().toString().replace("-", StringUtils.EMPTY));
this.resetDatabase(version);
}
@Override
public void close() throws IOException {
try {
this.testMetastoreDatabaseServer.drop(database);
} catch (URISyntaxException | SQLException ignored) {
} finally {
TestMetastoreDatabaseFactory.release(this);
}
}
@Override
public String getJdbcUrl() throws URISyntaxException {
return this.testMetastoreDatabaseServer.getJdbcUrl(this.database).toString();
}
@Override
public void resetDatabase(String version) throws Exception {
this.testMetastoreDatabaseServer.prepareDatabase(database, version);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestMetadataDatabase that = (TestMetadataDatabase) o;
return Objects.equals(database, that.database);
}
@Override
public int hashCode() {
return Objects.hash(database);
}
}
| 4,311 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/testing/TestMetastoreDatabaseServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.testing;
import java.io.Closeable;
import java.io.IOException;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.wix.mysql.EmbeddedMysql;
import com.wix.mysql.config.MysqldConfig;
import com.wix.mysql.distribution.Version;
import javax.sql.DataSource;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.MetaStoreModule;
import org.apache.gobblin.metastore.util.DatabaseJobHistoryStoreSchemaManager;
import org.apache.gobblin.metastore.util.MySqlJdbcUrl;
import org.apache.gobblin.util.PortUtils;
class TestMetastoreDatabaseServer implements Closeable {
private static final String INFORMATION_SCHEMA = "information_schema";
private static final String ROOT_USER = "root";
private static final String ROOT_PASSWORD = "password";
private static final String DROP_DATABASE_TEMPLATE = "DROP DATABASE IF EXISTS %s;";
private static final String CREATE_DATABASE_TEMPLATE = "CREATE DATABASE %s CHARACTER SET = %s COLLATE = %s;";
private static final String ADD_USER_TEMPLATE = "GRANT ALL ON %s.* TO '%s'@'%%';";
public static final String CONFIG_PREFIX = "gobblin.metastore.testing";
public static final String EMBEDDED_MYSQL_ENABLED_KEY = "embeddedMysqlEnabled";
public static final String EMBEDDED_MYSQL_ENABLED_FULL_KEY =
CONFIG_PREFIX + "." + EMBEDDED_MYSQL_ENABLED_KEY;
public static final String DBUSER_NAME_KEY = "testUser";
public static final String DBUSER_NAME_FULL_KEY = CONFIG_PREFIX + "." + DBUSER_NAME_KEY;
public static final String DBUSER_PASSWORD_KEY = "testPassword";
public static final String DBUSER_PASSWORD_FULL_KEY = CONFIG_PREFIX + "." + DBUSER_PASSWORD_KEY;
public static final String DBHOST_KEY = "dbHost";
public static final String DBHOST_FULL_KEY = CONFIG_PREFIX + "." + DBHOST_KEY;
public static final String DBPORT_KEY = "dbPort";
public static final String DBPORT_FULL_KEY = CONFIG_PREFIX + "." + DBPORT_KEY;
private final Logger log = LoggerFactory.getLogger(TestMetastoreDatabaseServer.class);
private final MysqldConfig config;
private final EmbeddedMysql testingMySqlServer;
private final boolean embeddedMysqlEnabled;
private final String dbUserName;
private final String dbUserPassword;
private final String dbHost;
private final int dbPort;
TestMetastoreDatabaseServer(Config dbConfig) throws Exception {
Config realConfig = dbConfig.withFallback(getDefaultConfig()).getConfig(CONFIG_PREFIX);
this.embeddedMysqlEnabled = realConfig.getBoolean(EMBEDDED_MYSQL_ENABLED_KEY);
this.dbUserName = realConfig.getString(DBUSER_NAME_KEY);
this.dbUserPassword = realConfig.getString(DBUSER_PASSWORD_KEY);
this.dbHost = this.embeddedMysqlEnabled ? "localhost" : realConfig.getString(DBHOST_KEY);
this.dbPort = this.embeddedMysqlEnabled ? new PortUtils.ServerSocketPortLocator().random() : realConfig.getInt(DBPORT_KEY);
this.log.info("Starting with config: embeddedMysqlEnabled={} dbUserName={} dbHost={} dbPort={}",
this.embeddedMysqlEnabled,
this.dbUserName,
this.dbHost,
this.dbPort);
config = MysqldConfig.aMysqldConfig(Version.v8_latest)
.withPort(this.dbPort)
.withUser(this.dbUserName, this.dbUserPassword)
.withServerVariable("explicit_defaults_for_timestamp", "off")
.build();
if (this.embeddedMysqlEnabled) {
testingMySqlServer = EmbeddedMysql.anEmbeddedMysql(config).start();
}
else {
testingMySqlServer = null;
}
}
static Config getDefaultConfig() {
return ConfigFactory.parseMap(ImmutableMap.<String, Object>builder()
.put(EMBEDDED_MYSQL_ENABLED_FULL_KEY, true)
.put(DBUSER_NAME_FULL_KEY, "testUser")
.put(DBUSER_PASSWORD_FULL_KEY, "testPassword")
.put(DBHOST_FULL_KEY, "localhost")
.put(DBPORT_FULL_KEY, 3306)
.build());
}
public void drop(String database) throws SQLException, URISyntaxException {
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = getConnector(getInformationSchemaJdbcUrl());
Connection connection = connectionOptional.get();
executeStatement(connection, String.format(DROP_DATABASE_TEMPLATE, database));
} finally {
if (connectionOptional.isPresent()) {
connectionOptional.get().close();
}
}
}
@Override
public void close() throws IOException {
if (testingMySqlServer != null) {
testingMySqlServer.stop();
}
}
MySqlJdbcUrl getJdbcUrl(String database) throws URISyntaxException {
return getBaseJdbcUrl()
.setPath(database)
.setUser(this.dbUserName)
.setPassword(this.dbUserPassword)
.setParameter("useLegacyDatetimeCode", "false")
.setParameter("rewriteBatchedStatements", "true");
}
private MySqlJdbcUrl getBaseJdbcUrl() throws URISyntaxException {
return MySqlJdbcUrl.create()
.setHost(this.dbHost)
.setPort(this.dbPort);
}
private MySqlJdbcUrl getInformationSchemaJdbcUrl() throws URISyntaxException {
// embedded mysql has an empty password by default
String password = this.embeddedMysqlEnabled ? "" : ROOT_PASSWORD;
return getBaseJdbcUrl()
.setPath(INFORMATION_SCHEMA)
.setUser(ROOT_USER)
.setPassword(password);
}
private Optional<Connection> getConnector(MySqlJdbcUrl jdbcUrl) throws SQLException {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY, jdbcUrl.toString());
Injector injector = Guice.createInjector(new MetaStoreModule(properties));
DataSource dataSource = injector.getInstance(DataSource.class);
return Optional.of(dataSource.getConnection());
}
private void ensureDatabaseExists(String database) throws SQLException, URISyntaxException {
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = getConnector(getInformationSchemaJdbcUrl());
Connection connection = connectionOptional.get();
executeStatements(connection,
String.format(DROP_DATABASE_TEMPLATE, database),
String.format(CREATE_DATABASE_TEMPLATE, database,
config.getCharset().getCharset(), config.getCharset().getCollate()),
String.format(ADD_USER_TEMPLATE, database, config.getUsername()));
} finally {
if (connectionOptional.isPresent()) {
connectionOptional.get().close();
}
}
}
void prepareDatabase(String database, String version) throws Exception {
// Drop/create the database
this.ensureDatabaseExists(database);
// Deploy the schema
DatabaseJobHistoryStoreSchemaManager schemaManager =
DatabaseJobHistoryStoreSchemaManager.builder()
.setDataSource(getJdbcUrl(database).toString(), this.dbUserName, this.dbUserPassword)
.setVersion(version)
.build();
schemaManager.migrate();
}
private void executeStatements(Connection connection, String... statements) throws SQLException {
for (String statement : statements) {
executeStatement(connection, statement);
}
}
private void executeStatement(Connection connection, String statement) throws SQLException {
try (PreparedStatement preparedStatement = connection.prepareStatement(statement)) {
preparedStatement.execute();
}
}
}
| 4,312 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/testing/ITestMetastoreDatabase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.testing;
import java.io.Closeable;
import java.net.URISyntaxException;
public interface ITestMetastoreDatabase extends Closeable {
String getJdbcUrl() throws URISyntaxException;
void resetDatabase(String version) throws Exception;
}
| 4,313 |
0 | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/test/java/org/apache/gobblin/metastore/nameParser/GuidDatasetUrnStateStoreNameParserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.nameParser;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import org.apache.gobblin.util.guid.Guid;
/**
* Test for {@link GuidDatasetUrnStateStoreNameParser}.
*/
public class GuidDatasetUrnStateStoreNameParserTest {
Path jobStateRootDir;
FileSystem testFs;
@BeforeTest
public void setUp()
throws IOException {
this.jobStateRootDir = new Path("testStateStoreParser");
this.testFs = FileSystem.getLocal(new Configuration());
}
@Test
public void testPersistDatasetUrns()
throws IOException {
GuidDatasetUrnStateStoreNameParser parser =
new GuidDatasetUrnStateStoreNameParser(this.testFs, this.jobStateRootDir);
parser.persistDatasetUrns(Lists.newArrayList("dataset1", "dataset2"));
Assert.assertTrue(this.testFs.exists(new Path(jobStateRootDir,
GuidDatasetUrnStateStoreNameParser.StateStoreNameVersion.V1.getDatasetUrnNameMapFile())));
}
@Test(dependsOnMethods = {"testPersistDatasetUrns"})
public void testGetDatasetUrnFromStateStoreName()
throws IOException {
GuidDatasetUrnStateStoreNameParser parser =
new GuidDatasetUrnStateStoreNameParser(this.testFs, this.jobStateRootDir);
Assert.assertEquals(parser.sanitizedNameToDatasetURNMap.size(), 2);
Assert.assertTrue(parser.sanitizedNameToDatasetURNMap.inverse().containsKey("dataset1"));
Assert.assertTrue(parser.sanitizedNameToDatasetURNMap.inverse().containsKey("dataset2"));
Assert.assertEquals(parser.getStateStoreNameFromDatasetUrn("dataset1"), Guid.fromStrings("dataset1").toString());
Assert.assertEquals(parser.getStateStoreNameFromDatasetUrn("dataset2"), Guid.fromStrings("dataset2").toString());
}
@AfterTest
public void cleanUp()
throws IOException {
if (this.testFs.exists(this.jobStateRootDir)) {
this.testFs.delete(this.jobStateRootDir, true);
}
}
}
| 4,314 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/MysqlDagStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import javax.sql.DataSource;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.MysqlStateStore;
import org.apache.gobblin.service.ServiceConfigKeys;
@Slf4j
/**
* An implementation of {@link MysqlStateStore} backed by MySQL to store Dag.
*
* @param <T> state object type
**/
public class MysqlDagStore<T extends State> extends MysqlStateStore<T> {
/**
* Manages the persistence and retrieval of {@link State} in a MySQL database
* @param dataSource the {@link DataSource} object for connecting to MySQL
* @param stateStoreTableName the table for storing the state in rows keyed by two levels (store_name, table_name)
* @param compressedValues should values be compressed for storage?
* @param stateClass class of the {@link State}s stored in this state store
* @throws IOException in case of failures
*/
public MysqlDagStore(DataSource dataSource, String stateStoreTableName, boolean compressedValues,
Class<T> stateClass)
throws IOException {
super(dataSource, stateStoreTableName, compressedValues, stateClass);
}
@Override
protected String getCreateJobStateTableTemplate() {
int maxStoreName = ServiceConfigKeys.MAX_FLOW_NAME_LENGTH + ServiceConfigKeys.STATE_STORE_KEY_SEPARATION_CHARACTER.length()
+ ServiceConfigKeys.MAX_FLOW_GROUP_LENGTH;
int maxTableName = ServiceConfigKeys.MAX_FLOW_EXECUTION_ID_LENGTH;
return "CREATE TABLE IF NOT EXISTS $TABLE$ (store_name varchar(" + maxStoreName + ") CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ "table_name varchar(" + maxTableName + ") CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ " modified_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,"
+ " state longblob, primary key(store_name, table_name))";
}
}
| 4,315 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/runtime/StateStoreBasedWatermarkStorage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import com.google.common.base.Preconditions;
import com.google.gson.Gson;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.StateStore;
import org.apache.gobblin.source.extractor.CheckpointableWatermark;
import org.apache.gobblin.util.ClassAliasResolver;
import org.apache.gobblin.util.io.GsonInterfaceAdapter;
import org.apache.gobblin.writer.WatermarkStorage;
/**
* A Watermark storage implementation that can use any {@link StateStore} for persistence.
*/
@Slf4j
public class StateStoreBasedWatermarkStorage implements WatermarkStorage {
private static final Gson GSON = GsonInterfaceAdapter.getGson(Object.class);
public static final String WATERMARK_STORAGE_TYPE_KEY ="streaming.watermarkStateStore.type";
public static final String WATERMARK_STORAGE_TYPE_DEFAULT ="zk";
public static final String WATERMARK_STORAGE_CONFIG_PREFIX="streaming.watermarkStateStore.config.";
/**
* A watermark prefix that is compatible with different watermark storage implementations.
* As such, this prefix should not include any characters disallowed in a {@link java.net.URI}.
*/
protected static final String WATERMARK_STORAGE_PREFIX="streamingWatermarks_";
public final StateStore<CheckpointableWatermarkState> _stateStore;
protected final String _storeName;
/**
* A private method that creates a state store config
* @return a filled out config that can be passed on to a state store.
*/
Config getStateStoreConfig(State state) {
// Select and prefix-strip all properties prefixed by WATERMARK_STORAGE_CONFIG_PREFIX
Properties properties = state.getProperties();
for (String key : properties.stringPropertyNames()) {
if (key.startsWith(WATERMARK_STORAGE_CONFIG_PREFIX)) {
properties.setProperty(key.substring(WATERMARK_STORAGE_CONFIG_PREFIX.length()),
(String) properties.get(key));
}
}
Config config = ConfigFactory.parseProperties(properties);
// Defaults
if (!config.hasPath(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY)) {
config = config.withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY,
ConfigValueFactory.fromAnyRef("/streamingWatermarks"));
}
return config;
}
public StateStoreBasedWatermarkStorage(State taskState) {
Preconditions.checkArgument(taskState != null);
Preconditions.checkArgument(!taskState.getProp(ConfigurationKeys.JOB_NAME_KEY).isEmpty());
String watermarkStateStoreType = taskState.getProp(WATERMARK_STORAGE_TYPE_KEY, WATERMARK_STORAGE_TYPE_DEFAULT);
ClassAliasResolver<StateStore.Factory> resolver =
new ClassAliasResolver<>(StateStore.Factory.class);
StateStore.Factory stateStoreFactory;
try {
stateStoreFactory = resolver.resolveClass(watermarkStateStoreType).newInstance();
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException(cnfe);
} catch (InstantiationException ie) {
throw new RuntimeException(ie);
} catch (IllegalAccessException iae) {
throw new RuntimeException(iae);
}
Config config = getStateStoreConfig(taskState);
_stateStore = stateStoreFactory.createStateStore(config, CheckpointableWatermarkState.class);
_storeName = WATERMARK_STORAGE_PREFIX + taskState.getProp(ConfigurationKeys.JOB_NAME_KEY);
log.info("State Store directory configured as : {}", config.getString(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY));
log.info("Configured the StateStoreBasedWatermarkStorage with storeName: {}", _storeName);
}
@Override
public void commitWatermarks(Iterable<CheckpointableWatermark> watermarks)
throws IOException {
for (CheckpointableWatermark watermark: watermarks) {
String tableName = watermark.getSource();
_stateStore.put(_storeName, tableName, new CheckpointableWatermarkState(watermark, GSON));
log.info("Committed watermark {} for table {}", watermark.getWatermark().toString(), tableName);
}
}
@Override
public Map<String, CheckpointableWatermark> getCommittedWatermarks(Class<? extends CheckpointableWatermark> watermarkClass,
Iterable<String> sourcePartitions)
throws IOException {
Map<String, CheckpointableWatermark> committed = new HashMap<String, CheckpointableWatermark>();
for (String sourcePartition: sourcePartitions) {
CheckpointableWatermarkState watermarkState = _stateStore.get(_storeName, sourcePartition, sourcePartition);
if (watermarkState != null) {
CheckpointableWatermark watermark = GSON.fromJson(watermarkState.getProp(sourcePartition), watermarkClass);
committed.put(sourcePartition, watermark);
}
}
if (committed.isEmpty()) {
log.warn("Didn't find any committed watermarks");
}
return committed;
}
public Iterable<CheckpointableWatermarkState> getAllCommittedWatermarks() throws IOException {
return _stateStore.getAll(_storeName);
}
public void deleteWatermarks(List<String> tableNames) throws IOException {
_stateStore.delete(_storeName, tableNames);
}
}
| 4,316 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/DatasetStateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.google.common.base.Strings;
import com.typesafe.config.Config;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
import org.apache.gobblin.util.ClassAliasResolver;
import org.apache.gobblin.util.ConfigUtils;
import lombok.Getter;
public interface DatasetStateStore<T extends State> extends StateStore<T> {
String DATASET_STATE_STORE_TABLE_SUFFIX = ".jst";
String CURRENT_DATASET_STATE_FILE_SUFFIX = "current";
Pattern TABLE_NAME_PARSER_PATTERN = Pattern.compile("^(?:(.+)-)?([^-]+)\\.jst$");
interface Factory {
<T extends State> DatasetStateStore<T> createStateStore(Config config);
}
public Map<String, T> getLatestDatasetStatesByUrns(String jobName) throws IOException;
public T getLatestDatasetState(String storeName, String datasetUrn) throws IOException;
public void persistDatasetState(String datasetUrn, T datasetState) throws IOException;
public void persistDatasetURNs(String storeName, Collection<String> datasetUrns) throws IOException;
@Override
default List<? extends DatasetStateStoreEntryManager> getMetadataForTables(StateStorePredicate predicate)
throws IOException {
throw new UnsupportedOperationException();
}
default String sanitizeDatasetStatestoreNameFromDatasetURN(String storeName, String datasetURN) throws IOException {
return datasetURN;
}
static String buildTableName(DatasetStateStore store, String storeName, String stateId, String datasetUrn) throws IOException {
return Strings.isNullOrEmpty(datasetUrn) ? stateId + DATASET_STATE_STORE_TABLE_SUFFIX
: store.sanitizeDatasetStatestoreNameFromDatasetURN(storeName,datasetUrn) + "-" + stateId + DATASET_STATE_STORE_TABLE_SUFFIX;
}
@Getter
class TableNameParser {
private final String sanitizedDatasetUrn;
private final String stateId;
public TableNameParser(String tableName) {
Matcher matcher = TABLE_NAME_PARSER_PATTERN.matcher(tableName);
if (matcher.matches()) {
this.sanitizedDatasetUrn = matcher.group(1);
this.stateId = matcher.group(2);
} else {
throw new IllegalArgumentException("Cannot parse table name " + tableName);
}
}
}
static DatasetStateStore buildDatasetStateStore(Config config) throws IOException {
ClassAliasResolver<Factory> resolver =
new ClassAliasResolver<>(DatasetStateStore.Factory.class);
String stateStoreType = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_TYPE_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_TYPE);
try {
DatasetStateStore.Factory stateStoreFactory =
resolver.resolveClass(stateStoreType).newInstance();
return stateStoreFactory.createStateStore(config);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IOException(e);
}
}
}
| 4,317 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlJobStatusStateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import com.google.common.collect.Lists;
import javax.sql.DataSource;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
import org.apache.gobblin.metastore.predicates.StoreNamePredicate;
import org.apache.gobblin.service.ServiceConfigKeys;
@Slf4j
/**
* An implementation of {@link MysqlStateStore} backed by MySQL to store JobStatuses.
*
* @param <T> state object type
**/
public class MysqlJobStatusStateStore<T extends State> extends MysqlStateStore<T> implements DatasetStateStore<T> {
/**
* Manages the persistence and retrieval of {@link State} in a MySQL database
* @param dataSource the {@link DataSource} object for connecting to MySQL
* @param stateStoreTableName the table for storing the state in rows keyed by two levels (store_name, table_name)
* @param compressedValues should values be compressed for storage?
* @param stateClass class of the {@link State}s stored in this state store
* @throws IOException in case of failures
*/
public MysqlJobStatusStateStore(DataSource dataSource, String stateStoreTableName, boolean compressedValues,
Class<T> stateClass)
throws IOException {
super(dataSource, stateStoreTableName, compressedValues, stateClass);
}
/**
* Returns all the job statuses for a flow group, flow name, flow execution id
* @param storeName store name in the state store
* @param flowExecutionId Flow Execution Id
* @return list of states
* @throws IOException in case of failures
*/
public List<T> getAll(String storeName, long flowExecutionId) throws IOException {
return getAll(storeName, flowExecutionId + "%", JobStateSearchColumns.TABLE_NAME_ONLY);
}
/**
* Returns all the job statuses for a flow group (across all flows)
* @param storeNamePrefix initial substring (flow group portion) for store name in the state store
* @return list of states
* @throws IOException in case of failures
*/
public List<T> getAllWithPrefix(String storeNamePrefix) throws IOException {
return getAll(storeNamePrefix + "%", "%", JobStateSearchColumns.STORE_NAME_AND_TABLE_NAME);
}
@Override
protected String getCreateJobStateTableTemplate() {
int maxStoreName = ServiceConfigKeys.MAX_FLOW_NAME_LENGTH + ServiceConfigKeys.STATE_STORE_KEY_SEPARATION_CHARACTER.length()
+ ServiceConfigKeys.MAX_FLOW_GROUP_LENGTH;
int maxTableName = 13 // length of flowExecutionId which is epoch timestamp
+ ServiceConfigKeys.STATE_STORE_KEY_SEPARATION_CHARACTER.length() + ServiceConfigKeys.MAX_JOB_NAME_LENGTH
+ ServiceConfigKeys.STATE_STORE_KEY_SEPARATION_CHARACTER.length() + ServiceConfigKeys.MAX_JOB_GROUP_LENGTH
+ ServiceConfigKeys.STATE_STORE_KEY_SEPARATION_CHARACTER.length() + ServiceConfigKeys.STATE_STORE_TABLE_SUFFIX.length();
return "CREATE TABLE IF NOT EXISTS $TABLE$ (store_name varchar(" + maxStoreName + ") CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ "table_name varchar(" + maxTableName + ") CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ " modified_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,"
+ " state longblob, primary key(store_name, table_name))";
}
@Override
public List<DatasetStateStoreEntryManager<T>> getMetadataForTables(StateStorePredicate predicate)
throws IOException {
List<DatasetStateStoreEntryManager<T>> entryManagers = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_METADATA_SQL)) {
String storeName = predicate instanceof StoreNamePredicate ? ((StoreNamePredicate) predicate).getStoreName() : "%";
queryStatement.setString(1, storeName);
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
String rsStoreName = rs.getString(1);
String rsTableName = rs.getString(2);
Timestamp timestamp = rs.getTimestamp(3);
DatasetStateStoreEntryManager<T> entryManager =
new MysqlJobStatusStateStoreEntryManager<>(rsStoreName, rsTableName, timestamp.getTime(), this);
if (predicate.apply(entryManager)) {
entryManagers.add(new MysqlJobStatusStateStoreEntryManager<T>(rsStoreName, rsTableName, timestamp.getTime(), this));
}
}
}
} catch (SQLException e) {
throw new IOException("failure getting metadata for tables", e);
}
return entryManagers;
}
@Override
public Map<String, T> getLatestDatasetStatesByUrns(String jobName) {
throw new UnsupportedOperationException();
}
@Override
public T getLatestDatasetState(String storeName, String datasetUrn) {
throw new UnsupportedOperationException();
}
@Override
public void persistDatasetState(String datasetUrn, T datasetState) {
throw new UnsupportedOperationException();
}
@Override
public void persistDatasetURNs(String storeName, Collection<String> datasetUrns) {
throw new UnsupportedOperationException();
}
}
| 4,318 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlDataSourceFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import javax.sql.DataSource;
import com.typesafe.config.Config;
import org.apache.gobblin.broker.ResourceInstance;
import org.apache.gobblin.broker.iface.ConfigView;
import org.apache.gobblin.broker.iface.NotConfiguredException;
import org.apache.gobblin.broker.iface.ScopeType;
import org.apache.gobblin.broker.iface.ScopedConfigView;
import org.apache.gobblin.broker.iface.SharedResourceFactory;
import org.apache.gobblin.broker.iface.SharedResourceFactoryResponse;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import lombok.extern.slf4j.Slf4j;
/**
* A {@link SharedResourceFactory} for creating {@link DataSource}s.
*
* The factory creates a {@link DataSource} with the config.
*/
@Slf4j
public class MysqlDataSourceFactory<S extends ScopeType<S>>
implements SharedResourceFactory<DataSource, MysqlDataSourceKey, S> {
// WARNING: now a misnomer, but retained for legacy compatibility, despite move from `o.a.commons.dbcp.BasicDataSource` to `HikariCP`
public static final String FACTORY_NAME = "basicDataSource";
/**
* Get a {@link DataSource} based on the config
* @param config configuration
* @param broker broker
* @return a {@link DataSource}
* @throws IOException
*/
public static <S extends ScopeType<S>> DataSource get(Config config,
SharedResourcesBroker<S> broker) throws IOException {
try {
return broker.getSharedResource(new MysqlDataSourceFactory<S>(),
new MysqlDataSourceKey(MysqlStateStore.getDataSourceId(config), config));
} catch (NotConfiguredException nce) {
throw new IOException(nce);
}
}
@Override
public String getName() {
return FACTORY_NAME;
}
@Override
public SharedResourceFactoryResponse<DataSource> createResource(SharedResourcesBroker<S> broker,
ScopedConfigView<S, MysqlDataSourceKey> config) throws NotConfiguredException {
MysqlDataSourceKey key = config.getKey();
Config configuration = key.getConfig();
DataSource dataSource = MysqlStateStore.newDataSource(configuration);
return new ResourceInstance<>(dataSource);
}
@Override
public S getAutoScope(SharedResourcesBroker<S> broker, ConfigView<S, MysqlDataSourceKey> config) {
return broker.selfScope().getType().rootScope();
}
}
| 4,319 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/DatasetStoreDataset.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.util.List;
import com.google.common.base.Strings;
import org.apache.gobblin.dataset.Dataset;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import lombok.Data;
/**
* A {@link Dataset} representing a group of entries in a {@link DatasetStateStore} with the same dataset urn.
*/
@Data
public class DatasetStoreDataset implements Dataset {
private final Key key;
private final List<DatasetStateStoreEntryManager> datasetStateStoreMetadataEntries;
@Override
public String datasetURN() {
return this.key.getStoreName() +
(Strings.isNullOrEmpty(this.key.getSanitizedDatasetUrn()) ? "" : ":::" + this.key.getSanitizedDatasetUrn());
}
/**
* The key for a {@link DatasetStoreDataset}.
*/
@Data
public static class Key {
private final String storeName;
private final String sanitizedDatasetUrn;
public Key(DatasetStateStoreEntryManager metadata) {
this.storeName = metadata.getStoreName();
this.sanitizedDatasetUrn = metadata.getSanitizedDatasetUrn();
}
}
}
| 4,320 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/JobHistoryStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.Closeable;
import java.io.IOException;
import java.util.List;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobExecutionQuery;
/**
* An interface for stores that store job execution information.
*
* @author Yinan Li
*/
public interface JobHistoryStore extends Closeable {
/**
* Insert a new or update an existing job execution record.
*
* @param jobExecutionInfo a {@link JobExecutionInfo} record
* @throws java.io.IOException if the insertion or update fails
*/
public void put(JobExecutionInfo jobExecutionInfo)
throws IOException;
/**
* Get a list of {@link JobExecutionInfo} records as results of the given query.
*
* @param query a {@link JobExecutionQuery} instance
* @return a list of {@link JobExecutionInfo} records
* @throws IOException if the query fails
*/
public List<JobExecutionInfo> get(JobExecutionQuery query)
throws IOException;
}
| 4,321 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/FsStateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.DefaultCodec;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.HadoopUtils;
import org.apache.gobblin.util.WritableShimSerialization;
import org.apache.gobblin.util.hadoop.GobblinSequenceFileReader;
import static org.apache.gobblin.util.HadoopUtils.FS_SCHEMES_NON_ATOMIC;
/**
* An implementation of {@link StateStore} backed by a {@link FileSystem}.
*
* <p>
* This implementation uses Hadoop {@link org.apache.hadoop.io.SequenceFile}
* to store {@link State}s. Each store maps to one directory, and each
* table maps to one file under the store directory. Keys are state IDs
* (see {@link State#getId()}), and values are objects of {@link State} or
* any of its extensions. Keys will be empty strings if state IDs are not set
* (i.e., {@link State#getId()} returns <em>null</em>). In this case, the
* {@link FsStateStore#get(String, String, String)} method may not work.
* </p>
*
* @param <T> state object type
*
* @author Yinan Li
*/
public class FsStateStore<T extends State> implements StateStore<T> {
public static final String TMP_FILE_PREFIX = "_tmp_";
protected final Configuration conf;
protected final FileSystem fs;
protected boolean useTmpFileForPut;
// Root directory for the task state store
protected final String storeRootDir;
// Class of the state objects to be put into the store
protected final Class<T> stateClass;
public FsStateStore(String fsUri, String storeRootDir, Class<T> stateClass) throws IOException {
this.conf = getConf(null);
this.fs = FileSystem.get(URI.create(fsUri), this.conf);
this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme());
this.storeRootDir = storeRootDir;
this.stateClass = stateClass;
}
/**
* Get a Hadoop configuration that understands how to (de)serialize WritableShim objects.
*/
private Configuration getConf(Configuration otherConf) {
Configuration conf;
if (otherConf == null) {
conf = new Configuration();
} else {
conf = new Configuration(otherConf);
}
WritableShimSerialization.addToHadoopConfiguration(conf);
return conf;
}
public FsStateStore(FileSystem fs, String storeRootDir, Class<T> stateClass) {
this.fs = fs;
this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme());
this.conf = getConf(this.fs.getConf());
this.storeRootDir = storeRootDir;
this.stateClass = stateClass;
}
public FsStateStore(String storeUrl, Class<T> stateClass) throws IOException {
this.conf = getConf(null);
Path storePath = new Path(storeUrl);
this.fs = storePath.getFileSystem(this.conf);
this.useTmpFileForPut = !FS_SCHEMES_NON_ATOMIC.contains(this.fs.getUri().getScheme());
this.storeRootDir = storePath.toUri().getPath();
this.stateClass = stateClass;
}
@Override
public boolean create(String storeName) throws IOException {
Path storePath = new Path(this.storeRootDir, storeName);
return this.fs.exists(storePath) || this.fs.mkdirs(storePath, new FsPermission((short) 0755));
}
@Override
public boolean create(String storeName, String tableName) throws IOException {
Path storePath = new Path(this.storeRootDir, storeName);
if (!this.fs.exists(storePath) && !create(storeName)) {
return false;
}
Path tablePath = new Path(storePath, tableName);
if (this.fs.exists(tablePath)) {
throw new IOException(String.format("State file %s already exists for table %s", tablePath, tableName));
}
return this.fs.createNewFile(tablePath);
}
@Override
public boolean exists(String storeName, String tableName) throws IOException {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
return this.fs.exists(tablePath);
}
/**
* See {@link StateStore#put(String, String, T)}.
*
* <p>
* This implementation does not support putting the state object into an existing store as
* append is to be supported by the Hadoop SequenceFile (HADOOP-7139).
* </p>
*/
@Override
public void put(String storeName, String tableName, T state) throws IOException {
String tmpTableName = this.useTmpFileForPut ? TMP_FILE_PREFIX + tableName : tableName;
Path tmpTablePath = new Path(new Path(this.storeRootDir, storeName), tmpTableName);
if (!this.fs.exists(tmpTablePath) && !create(storeName, tmpTableName)) {
throw new IOException("Failed to create a state file for table " + tmpTableName);
}
Closer closer = Closer.create();
try {
@SuppressWarnings("deprecation")
SequenceFile.Writer writer = closer.register(SequenceFile.createWriter(this.fs, this.conf, tmpTablePath,
Text.class, this.stateClass, SequenceFile.CompressionType.BLOCK, new DefaultCodec()));
writer.append(new Text(Strings.nullToEmpty(state.getId())), state);
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
if (this.useTmpFileForPut) {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
renamePath(tmpTablePath, tablePath);
}
}
/**
* See {@link StateStore#putAll(String, String, Collection)}.
*
* <p>
* This implementation does not support putting the state objects into an existing store as
* append is to be supported by the Hadoop SequenceFile (HADOOP-7139).
* </p>
*/
@Override
public void putAll(String storeName, String tableName, Collection<T> states) throws IOException {
String tmpTableName = this.useTmpFileForPut ? TMP_FILE_PREFIX + tableName : tableName;
Path tmpTablePath = new Path(new Path(this.storeRootDir, storeName), tmpTableName);
if (!this.fs.exists(tmpTablePath) && !create(storeName, tmpTableName)) {
throw new IOException("Failed to create a state file for table " + tmpTableName);
}
Closer closer = Closer.create();
try {
@SuppressWarnings("deprecation")
SequenceFile.Writer writer = closer.register(SequenceFile.createWriter(this.fs, this.conf, tmpTablePath,
Text.class, this.stateClass, SequenceFile.CompressionType.BLOCK, new DefaultCodec()));
for (T state : states) {
writer.append(new Text(Strings.nullToEmpty(state.getId())), state);
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
if (this.useTmpFileForPut) {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
renamePath(tmpTablePath, tablePath);
}
}
protected void renamePath(Path tmpTablePath, Path tablePath) throws IOException {
HadoopUtils.renamePath(this.fs, tmpTablePath, tablePath);
}
@Override
@SuppressWarnings("unchecked")
public T get(String storeName, String tableName, String stateId) throws IOException {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
if (!this.fs.exists(tablePath)) {
return null;
}
Closer closer = Closer.create();
try {
@SuppressWarnings("deprecation")
GobblinSequenceFileReader reader = closer.register(new GobblinSequenceFileReader(this.fs, tablePath, this.conf));
try {
Text key = new Text();
T state = this.stateClass.newInstance();
while (reader.next(key)) {
state = (T)reader.getCurrentValue(state);
if (key.toString().equals(stateId)) {
state.setId(stateId);
return state;
}
}
} catch (Exception e) {
throw new IOException("failure retrieving state from storeName " + storeName + " tableName " + tableName, e);
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
return null;
}
@Override
@SuppressWarnings("unchecked")
public List<T> getAll(String storeName, String tableName) throws IOException {
List<T> states = Lists.newArrayList();
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
if (!this.fs.exists(tablePath)) {
return states;
}
Closer closer = Closer.create();
try {
@SuppressWarnings("deprecation")
GobblinSequenceFileReader reader = closer.register(new GobblinSequenceFileReader(this.fs, tablePath, this.conf));
try {
Text key = new Text();
T state = this.stateClass.newInstance();
while (reader.next(key)) {
state = (T)reader.getCurrentValue(state);
state.setId(key.toString());
states.add(state);
// We need a new object for each read state
state = this.stateClass.newInstance();
}
} catch (Exception e) {
throw new IOException("failure retrieving state from storeName " + storeName + " tableName " + tableName, e);
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
return states;
}
@Override
public List<T> getAll(String storeName) throws IOException {
List<T> states = Lists.newArrayList();
Path storePath = new Path(this.storeRootDir, storeName);
if (!this.fs.exists(storePath)) {
return states;
}
for (FileStatus status : this.fs.listStatus(storePath)) {
states.addAll(getAll(storeName, status.getPath().getName()));
}
return states;
}
@Override
public List<String> getTableNames(String storeName, Predicate<String> predicate) throws IOException {
List<String> names = Lists.newArrayList();
Path storePath = new Path(this.storeRootDir, storeName);
if (!this.fs.exists(storePath)) {
return names;
}
for (FileStatus status : this.fs.listStatus(storePath)) {
if (predicate.apply(status.getPath().getName())) {
names.add(status.getPath().getName());
}
}
return names;
}
/**
* Get store names in the state store
*
* @param predicate only returns names matching predicate
* @return (possibly empty) list of store names from the given store
* @throws IOException
*/
public List<String> getStoreNames(Predicate<String> predicate)
throws IOException {
List<String> names = Lists.newArrayList();
Path storeRootPath = new Path(this.storeRootDir);
if (!this.fs.exists(storeRootPath)) {
return names;
}
for (FileStatus status : this.fs.listStatus(storeRootPath)) {
if (predicate.apply(status.getPath().getName())) {
names.add(status.getPath().getName());
}
}
return names;
}
@Override
public void createAlias(String storeName, String original, String alias) throws IOException {
Path originalTablePath = new Path(new Path(this.storeRootDir, storeName), original);
if (!this.fs.exists(originalTablePath)) {
throw new IOException(String.format("State file %s does not exist for table %s", originalTablePath, original));
}
Path aliasTablePath = new Path(new Path(this.storeRootDir, storeName), alias);
Path tmpAliasTablePath = new Path(aliasTablePath.getParent(), new Path(TMP_FILE_PREFIX, aliasTablePath.getName()));
// Make a copy of the original table as a work-around because
// Hadoop version 1.2.1 has no support for symlink yet.
HadoopUtils.copyFile(this.fs, originalTablePath, this.fs, aliasTablePath, tmpAliasTablePath, true, this.conf);
}
@Override
public void delete(String storeName, String tableName) throws IOException {
Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
if (this.fs.exists(tablePath)) {
this.fs.delete(tablePath, false);
}
}
@Override
public void delete(String storeName) throws IOException {
Path storePath = new Path(this.storeRootDir, storeName);
if (this.fs.exists(storePath)) {
this.fs.delete(storePath, true);
}
}
}
| 4,322 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/FileContextBasedFsStateStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.net.URI;
import java.util.Map;
import org.apache.gobblin.annotation.Alias;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigValue;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
@Alias("fc")
public class FileContextBasedFsStateStoreFactory implements StateStore.Factory {
@Override
public <T extends State> StateStore<T> createStateStore(Config config, Class<T> stateClass) {
// Add all job configuration properties so they are picked up by Hadoop
Configuration conf = new Configuration();
for (Map.Entry<String, ConfigValue> entry : config.entrySet()) {
conf.set(entry.getKey(), entry.getValue().unwrapped().toString());
}
try {
String stateStoreFsUri = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_FS_URI_KEY,
ConfigurationKeys.LOCAL_FS_URI);
FileSystem stateStoreFs = FileSystem.get(URI.create(stateStoreFsUri), conf);
String stateStoreRootDir = config.getString(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY);
return new FileContextBasedFsStateStore<T>(stateStoreFs, stateStoreRootDir, stateClass);
} catch (IOException e) {
throw new RuntimeException("Failed to create FsStateStore with factory", e);
}
}
}
| 4,323 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlStateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.Duration;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.Text;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
import org.apache.gobblin.metastore.predicates.StoreNamePredicate;
import org.apache.gobblin.password.PasswordManager;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.io.StreamUtils;
import org.apache.gobblin.util.jdbc.MysqlDataSourceUtils;
/**
* An implementation of {@link StateStore} backed by MySQL.
*
* <p>
*
* This implementation stores serialized {@link State}s as a blob in the database in the Sequence file format.
* The database table row is keyed by the store name and table name.
* State keys are state IDs (see {@link State#getId()}), and values are objects of {@link State} or
* any of its extensions. Keys will be empty strings if state IDs are not set
* (i.e., {@link State#getId()} returns <em>null</em>). In this case, the
* {@link MysqlStateStore#get(String, String, String)} method may not work.
* </p>
*
* @param <T> state object type
**/
public class MysqlStateStore<T extends State> implements StateStore<T> {
private static final Logger LOG = LoggerFactory.getLogger(MysqlStateStore.class);
private static final AtomicInteger POOL_NUM = new AtomicInteger(0);
/** Specifies which 'Job State' query columns receive search evaluation (with SQL `LIKE` operator). */
protected enum JobStateSearchColumns {
NONE,
TABLE_NAME_ONLY,
STORE_NAME_AND_TABLE_NAME;
}
// Class of the state objects to be put into the store
private final Class<T> stateClass;
protected final DataSource dataSource;
private final boolean compressedValues;
private static final String UPSERT_JOB_STATE_TEMPLATE =
"INSERT INTO $TABLE$ (store_name, table_name, state) VALUES(?,?,?)"
+ " ON DUPLICATE KEY UPDATE state = values(state)";
private static final String SELECT_JOB_STATE_TEMPLATE =
"SELECT state FROM $TABLE$ WHERE store_name = ? and table_name = ?";
private static final String SELECT_JOB_STATE_WITH_LIKE_TEMPLATE =
"SELECT state FROM $TABLE$ WHERE store_name = ? and table_name like ?";
private static final String SELECT_JOB_STATE_WITH_BOTH_LIKES_TEMPLATE =
"SELECT state FROM $TABLE$ WHERE store_name like ? and table_name like ?";
private static final String SELECT_ALL_JOBS_STATE = "SELECT state FROM $TABLE$";
private static final String SELECT_JOB_STATE_EXISTS_TEMPLATE =
"SELECT 1 FROM $TABLE$ WHERE store_name = ? and table_name = ?";
private static final String SELECT_JOB_STATE_NAMES_TEMPLATE =
"SELECT table_name FROM $TABLE$ WHERE store_name = ?";
private static final String SELECT_STORE_NAMES_TEMPLATE =
"SELECT distinct store_name FROM $TABLE$";
private static final String DELETE_JOB_STORE_TEMPLATE =
"DELETE FROM $TABLE$ WHERE store_name = ?";
private static final String DELETE_JOB_STATE_TEMPLATE =
"DELETE FROM $TABLE$ WHERE store_name = ? AND table_name = ?";
private static final String CLONE_JOB_STATE_TEMPLATE =
"INSERT INTO $TABLE$(store_name, table_name, state)"
+ " (SELECT store_name, ?, state FROM $TABLE$ s WHERE"
+ " store_name = ? AND table_name = ?)"
+ " ON DUPLICATE KEY UPDATE state = s.state";
private static final String SELECT_METADATA_TEMPLATE =
"SELECT store_name, table_name, modified_time from $TABLE$ where store_name like ?";
// MySQL key length limit is 767 bytes
private static final String CREATE_JOB_STATE_TABLE_TEMPLATE =
"CREATE TABLE IF NOT EXISTS $TABLE$ (store_name varchar(100) CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ "table_name varchar(667) CHARACTER SET latin1 COLLATE latin1_bin not null,"
+ " modified_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,"
+ " state longblob, primary key(store_name, table_name))";
private final String UPSERT_JOB_STATE_SQL;
private final String SELECT_JOB_STATE_SQL;
private final String SELECT_ALL_JOBS_STATE_SQL;
private final String SELECT_JOB_STATE_WITH_LIKE_SQL;
private final String SELECT_JOB_STATE_WITH_BOTH_LIKES_SQL;
private final String SELECT_JOB_STATE_EXISTS_SQL;
private final String SELECT_JOB_STATE_NAMES_SQL;
private final String DELETE_JOB_STORE_SQL;
private final String DELETE_JOB_STATE_SQL;
private final String CLONE_JOB_STATE_SQL;
private final String SELECT_STORE_NAMES_SQL;
protected final String SELECT_METADATA_SQL;
/**
* Manages the persistence and retrieval of {@link State} in a MySQL database
* @param dataSource the {@link DataSource} object for connecting to MySQL
* @param stateStoreTableName the table for storing the state in rows keyed by two levels (store_name, table_name)
* @param compressedValues should values be compressed for storage?
* @param stateClass class of the {@link State}s stored in this state store
* @throws IOException
*/
public MysqlStateStore(DataSource dataSource, String stateStoreTableName, boolean compressedValues,
Class<T> stateClass) throws IOException {
this.dataSource = dataSource;
this.stateClass = stateClass;
this.compressedValues = compressedValues;
UPSERT_JOB_STATE_SQL = UPSERT_JOB_STATE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_JOB_STATE_SQL = SELECT_JOB_STATE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_JOB_STATE_WITH_LIKE_SQL = SELECT_JOB_STATE_WITH_LIKE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_JOB_STATE_WITH_BOTH_LIKES_SQL = SELECT_JOB_STATE_WITH_BOTH_LIKES_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_ALL_JOBS_STATE_SQL = SELECT_ALL_JOBS_STATE.replace("$TABLE$", stateStoreTableName);
SELECT_JOB_STATE_EXISTS_SQL = SELECT_JOB_STATE_EXISTS_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_JOB_STATE_NAMES_SQL = SELECT_JOB_STATE_NAMES_TEMPLATE.replace("$TABLE$", stateStoreTableName);
DELETE_JOB_STORE_SQL = DELETE_JOB_STORE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
DELETE_JOB_STATE_SQL = DELETE_JOB_STATE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
CLONE_JOB_STATE_SQL = CLONE_JOB_STATE_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_STORE_NAMES_SQL = SELECT_STORE_NAMES_TEMPLATE.replace("$TABLE$", stateStoreTableName);
SELECT_METADATA_SQL = SELECT_METADATA_TEMPLATE.replace("$TABLE$", stateStoreTableName);
// create table if it does not exist
String createJobTable = getCreateJobStateTableTemplate().replace("$TABLE$", stateStoreTableName);
try (Connection connection = dataSource.getConnection();
PreparedStatement createStatement = connection.prepareStatement(createJobTable)) {
createStatement.executeUpdate();
} catch (SQLException e) {
throw new IOException("Failure creation table " + stateStoreTableName, e);
}
}
protected String getCreateJobStateTableTemplate() {
return CREATE_JOB_STATE_TABLE_TEMPLATE;
}
/**
* creates a new {@link DataSource}
* @param config the properties used for datasource instantiation
* @return
*/
static DataSource newDataSource(Config config) {
HikariDataSource dataSource = new HikariDataSource();
PasswordManager passwordManager = PasswordManager.getInstance(ConfigUtils.configToProperties(config));
String jdbcUrl = config.getString(ConfigurationKeys.STATE_STORE_DB_URL_KEY);
String poolName = "HikariPool-" + POOL_NUM.incrementAndGet() + "-" + MysqlStateStore.class.getSimpleName();
try {
String dbPath = new URI(new URI(jdbcUrl).getSchemeSpecificPart()).getPath().replaceAll("\\W", "-");
// when possible, attempt discernment to the DB level
poolName += dbPath; // as the path will begin w/ "/", following `replaceAll`, no need to prepend additional "-"
} catch (URISyntaxException e) {
LOG.warn("unable to parse JDBC URL '{}' - {}", jdbcUrl, e.getMessage());
}
// TODO: consider whether to demote to DEBUG log level
LOG.info("creating pool '{}' for caller with stacktrace: {}", poolName,
Arrays.toString(Thread.currentThread().getStackTrace()).replace(", ", "\n at "));
dataSource.setPoolName(poolName);
dataSource.setDriverClassName(ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_JDBC_DRIVER_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER));
// MySQL server can timeout a connection so need to validate connections before use
final String validationQuery = MysqlDataSourceUtils.QUERY_CONNECTION_IS_VALID_AND_NOT_READONLY;
LOG.info("setting `DataSource` validation query: '" + validationQuery + "'");
// TODO: revisit following verification of successful connection pool migration:
// If your driver supports JDBC4 we strongly recommend not setting this property. This is for "legacy" drivers
// that do not support the JDBC4 Connection.isValid() API; see:
// https://github.com/brettwooldridge/HikariCP#gear-configuration-knobs-baby
dataSource.setConnectionTestQuery(validationQuery);
dataSource.setAutoCommit(false);
dataSource.setIdleTimeout(Duration.ofSeconds(60).toMillis());
dataSource.setJdbcUrl(jdbcUrl);
// TODO: revisit following verification of successful connection pool migration:
// whereas `o.a.commons.dbcp.BasicDataSource` defaults min idle conns to 0, hikari defaults to 10.
// perhaps non-zero would have desirable runtime perf, but anything >0 currently fails unit tests (even 1!);
// (so experimenting with a higher number would first require adjusting tests)
dataSource.setMinimumIdle(0);
dataSource.setUsername(passwordManager.readPassword(
config.getString(ConfigurationKeys.STATE_STORE_DB_USER_KEY)));
dataSource.setPassword(passwordManager.readPassword(
config.getString(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY)));
return dataSource;
}
/**
* return an identifier for the data source based on the configuration
* @param config configuration
* @return a {@link String} to identify the data source
*/
public static String getDataSourceId(Config config) {
PasswordManager passwordManager = PasswordManager.getInstance(ConfigUtils.configToProperties(config));
return ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_JDBC_DRIVER_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER) + "::"
+ config.getString(ConfigurationKeys.STATE_STORE_DB_URL_KEY) + "::"
+ passwordManager.readPassword(config.getString(ConfigurationKeys.STATE_STORE_DB_USER_KEY));
}
@Override
public boolean create(String storeName) throws IOException {
/* nothing to do since state will be stored as a new row in a DB table that has been validated */
return true;
}
@Override
public boolean create(String storeName, String tableName) throws IOException {
if (exists(storeName, tableName)) {
throw new IOException(String.format("State already exists for storeName %s tableName %s", storeName, tableName));
}
return true;
}
@Override
public boolean exists(String storeName, String tableName) throws IOException {
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_JOB_STATE_EXISTS_SQL)) {
int index = 0;
queryStatement.setString(++index, storeName);
queryStatement.setString(++index, tableName);
try (ResultSet rs = queryStatement.executeQuery()) {
if (rs.next()) {
return true;
} else {
return false;
}
}
} catch (SQLException e) {
throw new IOException("Failure checking existence of storeName " + storeName + " tableName " + tableName, e);
}
}
/**
* Serializes the state to the {@link DataOutput}
* @param dataOutput output target receiving the serialized data
* @param state the state to serialize
* @throws IOException
*/
private void addStateToDataOutputStream(DataOutput dataOutput, T state) throws IOException {
new Text(Strings.nullToEmpty(state.getId())).write(dataOutput);
state.write(dataOutput);
}
@Override
public void put(String storeName, String tableName, T state) throws IOException {
putAll(storeName, tableName, Collections.singleton(state));
}
@Override
public void putAll(String storeName, String tableName, Collection<T> states) throws IOException {
try (Connection connection = dataSource.getConnection();
PreparedStatement insertStatement = connection.prepareStatement(UPSERT_JOB_STATE_SQL);
ByteArrayOutputStream byteArrayOs = new ByteArrayOutputStream();
OutputStream os = compressedValues ? new GZIPOutputStream(byteArrayOs) : byteArrayOs;
DataOutputStream dataOutput = new DataOutputStream(os)) {
insertStatement.setString(1, storeName);
insertStatement.setString(2, tableName);
for (T state : states) {
addStateToDataOutputStream(dataOutput, state);
}
dataOutput.close();
insertStatement.setBlob(3, new ByteArrayInputStream(byteArrayOs.toByteArray()));
insertStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
throw new IOException("Failure storing state to store " + storeName + " table " + tableName, e);
}
}
@Override
public T get(String storeName, String tableName, String stateId) throws IOException {
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_JOB_STATE_SQL)) {
queryStatement.setString(1, storeName);
queryStatement.setString(2, tableName);
try (ResultSet rs = queryStatement.executeQuery()) {
if (rs.next()) {
Blob blob = rs.getBlob(1);
Text key = new Text();
try (InputStream is = StreamUtils.isCompressed(blob.getBytes(1, 2)) ?
new GZIPInputStream(blob.getBinaryStream()) : blob.getBinaryStream();
DataInputStream dis = new DataInputStream(is)){
// keep deserializing while we have data
while (dis.available() > 0) {
T state = this.stateClass.newInstance();
key.readFields(dis);
state.readFields(dis);
state.setId(key.toString());
if (key.toString().equals(stateId)) {
return state;
}
}
} catch (EOFException e) {
// no more data. GZIPInputStream.available() doesn't return 0 until after EOF.
}
}
}
} catch (RuntimeException e) {
throw e;
}catch (Exception e) {
throw new IOException("failure retrieving state from storeName " + storeName + " tableName " + tableName, e);
}
return null;
}
protected List<T> getAll(String storeName, String tableName, JobStateSearchColumns searchColumns) throws IOException {
List<T> states = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(
searchColumns == JobStateSearchColumns.TABLE_NAME_ONLY ?
SELECT_JOB_STATE_WITH_LIKE_SQL :
searchColumns == JobStateSearchColumns.STORE_NAME_AND_TABLE_NAME ?
SELECT_JOB_STATE_WITH_BOTH_LIKES_SQL :
SELECT_JOB_STATE_SQL)) {
queryStatement.setString(1, storeName);
queryStatement.setString(2, tableName);
execGetAllStatement(queryStatement, states);
return states;
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
throw new IOException("failure retrieving state from storeName " + storeName + " tableName " + tableName, e);
}
}
/**
* An additional {@link #getAll()} method to retrieve all entries in a table.
*
*/
public List<T> getAll() throws IOException {
List<T> states = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_ALL_JOBS_STATE_SQL)) {
execGetAllStatement(queryStatement, states);
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
throw new IOException(String.format("failure retrieving all states with the SQL[%s]", SELECT_ALL_JOBS_STATE_SQL), e);
}
return states;
}
@Override
public List<T> getAll(String storeName, String tableName) throws IOException {
return getAll(storeName, tableName, JobStateSearchColumns.NONE);
}
@Override
public List<T> getAll(String storeName) throws IOException {
return getAll(storeName, "%", JobStateSearchColumns.TABLE_NAME_ONLY);
}
/**
* An helper function extracted from getAll method originally that has side effects:
* - Executing queryStatement
* - Put the result into List<state> object.
* @throws SQLException
* @throws Exception
*/
private void execGetAllStatement(PreparedStatement queryStatement, List<T> states) throws SQLException, Exception {
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
Blob blob = rs.getBlob(1);
Text key = new Text();
try (InputStream is = StreamUtils.isCompressed(blob.getBytes(1, 2)) ?
new GZIPInputStream(blob.getBinaryStream()) : blob.getBinaryStream();
DataInputStream dis = new DataInputStream(is)) {
// keep deserializing while we have data
while (dis.available() > 0) {
T state = this.stateClass.newInstance();
String stateId = key.readString(dis);
state.readFields(dis);
state.setId(stateId);
states.add(state);
}
} catch (EOFException e) {
// no more data. GZIPInputStream.available() doesn't return 0 until after EOF.
}
}
}
}
@Override
public List<String> getTableNames(String storeName, Predicate<String> predicate) throws IOException {
List<String> names = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_JOB_STATE_NAMES_SQL)) {
queryStatement.setString(1, storeName);
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
String name = rs.getString(1);
if (predicate.apply(name)) {
names.add(name);
}
}
}
} catch (SQLException e) {
throw new IOException(String.format("Could not query table names for store %s", storeName), e);
}
return names;
}
/**
* Get store names in the state store
*
* @param predicate only returns names matching predicate
* @return (possibly empty) list of store names from the given store
* @throws IOException
*/
public List<String> getStoreNames(Predicate<String> predicate)
throws IOException {
List<String> names = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_STORE_NAMES_SQL)) {
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
String name = rs.getString(1);
if (predicate.apply(name)) {
names.add(name);
}
}
}
} catch (SQLException e) {
throw new IOException(String.format("Could not query store names"), e);
}
return names;
}
@Override
public void createAlias(String storeName, String original, String alias) throws IOException {
if (!exists(storeName, original)) {
throw new IOException(String.format("State does not exist for table %s", original));
}
try (Connection connection = dataSource.getConnection();
PreparedStatement cloneStatement = connection.prepareStatement(CLONE_JOB_STATE_SQL)) {
int index = 0;
cloneStatement.setString(++index, alias);
cloneStatement.setString(++index, storeName);
cloneStatement.setString(++index, original);
cloneStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
throw new IOException(String.format("Failure creating alias for store %s original %s", storeName, original), e);
}
}
@Override
public void delete(String storeName, String tableName) throws IOException {
try (Connection connection = dataSource.getConnection();
PreparedStatement deleteStatement = connection.prepareStatement(DELETE_JOB_STATE_SQL)) {
int index = 0;
deleteStatement.setString(++index, storeName);
deleteStatement.setString(++index, tableName);
deleteStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
throw new IOException("failure deleting storeName " + storeName + " tableName " + tableName, e);
}
}
@Override
public void delete(String storeName) throws IOException {
try (Connection connection = dataSource.getConnection();
PreparedStatement deleteStatement = connection.prepareStatement(DELETE_JOB_STORE_SQL)) {
deleteStatement.setString(1, storeName);
deleteStatement.executeUpdate();
connection.commit();
} catch (SQLException e) {
throw new IOException("failure deleting storeName " + storeName, e);
}
}
/**
* Gets entry managers for all tables matching the predicate
* @param predicate Predicate used to filter tables. To allow state stores to push down predicates, use native extensions
* of {@link StateStorePredicate}.
* @throws IOException
*/
@Override
public List<? extends StateStoreEntryManager> getMetadataForTables(StateStorePredicate predicate)
throws IOException {
List<MysqlStateStoreEntryManager> entryManagers = Lists.newArrayList();
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement = connection.prepareStatement(SELECT_METADATA_SQL)) {
String storeName = predicate instanceof StoreNamePredicate ? ((StoreNamePredicate) predicate).getStoreName() : "%";
queryStatement.setString(1, storeName);
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
String rsStoreName = rs.getString(1);
String rsTableName = rs.getString(2);
Timestamp timestamp = rs.getTimestamp(3);
StateStoreEntryManager entryManager =
new MysqlStateStoreEntryManager(rsStoreName, rsTableName, timestamp.getTime(), this);
if (predicate.apply(entryManager)) {
entryManagers.add(new MysqlStateStoreEntryManager(rsStoreName, rsTableName, timestamp.getTime(), this));
}
}
}
} catch (SQLException e) {
throw new IOException("failure getting metadata for tables", e);
}
return entryManagers;
}
/**
* For setting timestamps in tests
* @param timestamp 0 to set to default, non-zero to set an epoch time
* @throws SQLException
*/
@VisibleForTesting
public void setTestTimestamp(long timestamp) throws IOException {
String statement = "SET TIMESTAMP =";
// 0 is used to reset to the default
if (timestamp > 0 ) {
statement += timestamp;
} else {
statement += " DEFAULT";
}
try (Connection connection = dataSource.getConnection();
PreparedStatement queryStatement =
connection.prepareStatement(statement)) {
queryStatement.execute();
} catch (SQLException e) {
throw new IOException("Could not set timestamp " + timestamp, e);
}
}
}
| 4,324 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/DatasetStoreDatasetFinder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import org.apache.gobblin.dataset.DatasetsFinder;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.DatasetPredicate;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
import org.apache.gobblin.metastore.predicates.StoreNamePredicate;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
/**
* A {@link DatasetsFinder} to find {@link DatasetStoreDataset}s.
*/
public class DatasetStoreDatasetFinder implements DatasetsFinder<DatasetStoreDataset> {
public static final String STORE_NAME_FILTER = "datasetStoreDatasetFinder.filter.storeName";
public static final String DATASET_URN_FILTER = "datasetStoreDatasetFinder.filter.datasetUrn";
private final Config config;
private final DatasetStateStore store;
private final StateStorePredicate predicate;
public DatasetStoreDatasetFinder(FileSystem fs, Properties props) throws IOException {
this.config = ConfigFactory.parseProperties(props);
this.store = DatasetStateStore.buildDatasetStateStore(this.config);
this.predicate = buildPredicate();
}
public DatasetStoreDatasetFinder(Properties props) throws IOException {
this(FileSystem.get(new Configuration()), props);
}
private StateStorePredicate buildPredicate() {
StateStorePredicate predicate = null;
String storeName = null;
String datasetUrn;
if (ConfigUtils.hasNonEmptyPath(this.config, STORE_NAME_FILTER)) {
storeName = this.config.getString(STORE_NAME_FILTER);
predicate = new StoreNamePredicate(storeName, x -> true);
}
if (ConfigUtils.hasNonEmptyPath(this.config, DATASET_URN_FILTER)) {
if (storeName == null) {
throw new IllegalArgumentException(
DATASET_URN_FILTER + " requires " + STORE_NAME_FILTER + " to also be defined.");
}
datasetUrn = this.config.getString(DATASET_URN_FILTER);
predicate = new DatasetPredicate(storeName, datasetUrn, x -> true);
}
return predicate == null ? new StateStorePredicate(x -> true) : predicate;
}
@Override
public List<DatasetStoreDataset> findDatasets() throws IOException {
List<DatasetStateStoreEntryManager> entries = this.store.getMetadataForTables(this.predicate);
Map<DatasetStoreDataset.Key, List<DatasetStateStoreEntryManager>> entriesGroupedByDataset =
entries.stream().collect(Collectors.groupingBy(DatasetStoreDataset.Key::new));
return entriesGroupedByDataset.entrySet().stream().
map(entry -> new DatasetStoreDataset(entry.getKey(), entry.getValue())).collect(Collectors.toList());
}
@Override
public Path commonDatasetRoot() {
return null;
}
}
| 4,325 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlJobStatusStateStoreEntryManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
/**
* A {@link DatasetStateStoreEntryManager} generated by {@link MysqlJobStatusStateStore}.
*/
public class MysqlJobStatusStateStoreEntryManager<T extends State> extends DatasetStateStoreEntryManager<T> {
private final MysqlJobStatusStateStore<T> stateStore;
public MysqlJobStatusStateStoreEntryManager(String storeName, String tableName, long timestamp, MysqlJobStatusStateStore<T> mysqlJobStatusStateStore) {
super(storeName, tableName, timestamp, "", "", mysqlJobStatusStateStore);
this.stateStore = mysqlJobStatusStateStore;
}
@Override
public T readState() throws IOException {
return this.stateStore.get(getStoreName(), getTableName(), "");
}
@Override
public void delete() throws IOException {
this.stateStore.delete(getStoreName(), getTableName());
}
}
| 4,326 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlDataSourceKey.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import javax.sql.DataSource;
import com.typesafe.config.Config;
import org.apache.gobblin.broker.iface.SharedResourceKey;
import lombok.Getter;
/**
* {@link SharedResourceKey} for requesting {@link DataSource}s from a
* {@link org.apache.gobblin.broker.iface.SharedResourceFactory}
*/
@Getter
public class MysqlDataSourceKey implements SharedResourceKey {
private final String dataSourceName;
private final Config config;
/**
* Constructs a key for the mysql data source. The dataSourceName is used as the key.
* @param dataSourceName an identifier for the data source
* @param config configuration that is passed along to configure the data source
*/
public MysqlDataSourceKey(String dataSourceName, Config config) {
this.dataSourceName = dataSourceName;
this.config = config;
}
@Override
public String toConfigurationKey() {
return this.dataSourceName;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MysqlDataSourceKey that = (MysqlDataSourceKey) o;
return dataSourceName == null ?
that.dataSourceName == null : dataSourceName.equals(that.dataSourceName);
}
@Override
public int hashCode() {
return dataSourceName != null ? dataSourceName.hashCode() : 0;
}
}
| 4,327 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/JobHistoryDataSourceProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.name.Named;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.password.PasswordManager;
import org.apache.gobblin.util.jdbc.MysqlDataSourceUtils;
/**
* This class extends {@link org.apache.gobblin.util.jdbc.DataSourceProvider} with its own property keys.
*/
public class JobHistoryDataSourceProvider extends org.apache.gobblin.util.jdbc.DataSourceProvider {
private static final Logger LOG = LoggerFactory.getLogger(JobHistoryDataSourceProvider.class);
private static final AtomicInteger POOL_NUM = new AtomicInteger(0);
@Inject
public JobHistoryDataSourceProvider(@Named("dataSourceProperties") Properties properties) {
this.dataSource.setPoolName("HikariPool-" + POOL_NUM.incrementAndGet() + "-" + getClass().getSimpleName());
this.dataSource.setDriverClassName(properties.getProperty(ConfigurationKeys.JOB_HISTORY_STORE_JDBC_DRIVER_KEY,
ConfigurationKeys.DEFAULT_JOB_HISTORY_STORE_JDBC_DRIVER));
// Set validation query to verify connection
if (!Boolean.parseBoolean(properties.getProperty(SKIP_VALIDATION_QUERY, "false"))) {
// MySQL server can timeout a connection so need to validate connections before use
final String validationQuery = MysqlDataSourceUtils.QUERY_CONNECTION_IS_VALID_AND_NOT_READONLY;
LOG.info("setting `DataSource` validation query: '" + validationQuery + "'");
// TODO: revisit following verification of successful connection pool migration:
// If your driver supports JDBC4 we strongly recommend not setting this property. This is for "legacy" drivers
// that do not support the JDBC4 Connection.isValid() API; see:
// https://github.com/brettwooldridge/HikariCP#gear-configuration-knobs-baby
this.dataSource.setConnectionTestQuery(validationQuery);
this.dataSource.setIdleTimeout(Duration.ofSeconds(60).toMillis());
}
this.dataSource.setJdbcUrl(properties.getProperty(ConfigurationKeys.JOB_HISTORY_STORE_URL_KEY));
// TODO: revisit following verification of successful connection pool migration:
// whereas `o.a.commons.dbcp.BasicDataSource` defaults min idle conns to 0, hikari defaults to 10.
// perhaps non-zero would have desirable runtime perf, but anything >0 currently fails unit tests (even 1!);
// (so experimenting with a higher number would first require adjusting tests)
this.dataSource.setMinimumIdle(0);
if (properties.containsKey(ConfigurationKeys.JOB_HISTORY_STORE_USER_KEY)
&& properties.containsKey(ConfigurationKeys.JOB_HISTORY_STORE_PASSWORD_KEY)) {
this.dataSource.setUsername(properties.getProperty(ConfigurationKeys.JOB_HISTORY_STORE_USER_KEY));
this.dataSource.setPassword(PasswordManager.getInstance(properties)
.readPassword(properties.getProperty(ConfigurationKeys.JOB_HISTORY_STORE_PASSWORD_KEY)));
}
}
}
| 4,328 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlStateStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import javax.sql.DataSource;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
@Alias("mysql")
public class MysqlStateStoreFactory implements StateStore.Factory {
@Override
public <T extends State> StateStore<T> createStateStore(Config config, Class<T> stateClass) {
String stateStoreTableName = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_TABLE_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_DB_TABLE);
boolean compressedValues = ConfigUtils.getBoolean(config, ConfigurationKeys.STATE_STORE_COMPRESSED_VALUES_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_COMPRESSED_VALUES);
try {
DataSource dataSource = MysqlDataSourceFactory.get(config,
SharedResourcesBrokerFactory.getImplicitBroker());
return new MysqlStateStore<>(dataSource, stateStoreTableName, compressedValues, stateClass);
} catch (Exception e) {
throw new RuntimeException("Failed to create MysqlStateStore with factory", e);
}
}
} | 4,329 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlDagStateStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import javax.sql.DataSource;
import com.typesafe.config.Config;
import org.apache.gobblin.MysqlDagStore;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
public class MysqlDagStateStoreFactory extends MysqlStateStoreFactory {
@Override
public <T extends State> MysqlDagStore<T> createStateStore(Config config, Class<T> stateClass) {
String stateStoreTableName = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_TABLE_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_DB_TABLE);
boolean compressedValues = ConfigUtils.getBoolean(config, ConfigurationKeys.STATE_STORE_COMPRESSED_VALUES_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_COMPRESSED_VALUES);
try {
DataSource dataSource = MysqlDataSourceFactory.get(config,
SharedResourcesBrokerFactory.getImplicitBroker());
return new MysqlDagStore<>(dataSource, stateStoreTableName, compressedValues, stateClass);
} catch (Exception e) {
throw new RuntimeException("Failed to create MysqlDagStore with factory", e);
}
}
}
| 4,330 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/FsStateStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.net.URI;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigValue;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
@Alias("fs")
public class FsStateStoreFactory implements StateStore.Factory {
@Override
public <T extends State> StateStore<T> createStateStore(Config config, Class<T> stateClass) {
// Add all job configuration properties so they are picked up by Hadoop
Configuration conf = new Configuration();
for (Map.Entry<String, ConfigValue> entry : config.entrySet()) {
conf.set(entry.getKey(), entry.getValue().unwrapped().toString());
}
try {
String stateStoreFsUri = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_FS_URI_KEY,
ConfigurationKeys.LOCAL_FS_URI);
FileSystem stateStoreFs = FileSystem.get(URI.create(stateStoreFsUri), conf);
String stateStoreRootDir = config.getString(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY);
return new FsStateStore(stateStoreFs, stateStoreRootDir, stateClass);
} catch (IOException e) {
throw new RuntimeException("Failed to create FsStateStore with factory", e);
}
}
} | 4,331 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/StateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Predicate;
import com.typesafe.config.Config;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
import org.apache.gobblin.metastore.predicates.StateStorePredicate;
/**
* An interface for stores that persist {@link State}s.
*
* <p>
* Each such store consists of zero or more tables, and each table
* stores zero or more {@link State}s keyed on the state IDs (see
* {@link State#getId()}).
* </p>
*
* <p>
* Note: Implementations of dataset store should maintain a timestamp for every state they persist. Certain utilities
* will not work if this is not the case.
* </p>
*
* @param <T> state object type
*
* @author Yinan Li
*/
public interface StateStore<T extends State> {
interface Factory {
<T extends State> StateStore<T> createStateStore(Config config, Class<T> stateClass);
}
/**
* Create a new store.
*
* <p>
* A store that does not exist will be created when any put
* method is called against it.
* </p>
*
* @param storeName store name
* @return if the store is successfully created
* @throws IOException
*/
public boolean create(String storeName)
throws IOException;
/**
* Create a new table in a store.
*
* <p>
* A table that does not exist will be created when any put
* method is called against it.
* </p>
*
* @param storeName store name
* @param tableName table name
* @return if the table is successfully created
* @throws IOException
*/
public boolean create(String storeName, String tableName)
throws IOException;
/**
* Check whether a given table exists.
*
* @param storeName store name
* @param tableName table name
* @return whether the given table exists
* @throws IOException
*/
public boolean exists(String storeName, String tableName)
throws IOException;
/**
* Put a {@link State} into a table.
*
* <p>
* Calling this method against a store or a table that
* does not exist will cause it to be created.
* </p>
*
* @param storeName store name
* @param tableName table name
* @param state {@link State} to be put into the table
* @throws IOException
*/
public void put(String storeName, String tableName, T state)
throws IOException;
/**
* Put a collection of {@link State}s into a table.
*
* <p>
* Calling this method against a store or a table that
* does not exist will cause it to be created.
* </p>
*
* @param storeName store name
* @param tableName table name
* @param states collection of {@link State}s to be put into the table
* @throws IOException
*/
public void putAll(String storeName, String tableName, Collection<T> states)
throws IOException;
/**
* Get a {@link State} with a given state ID from a table.
*
* @param storeName store name
* @param tableName table name
* @param stateId state ID
* @return {@link State} with the given state ID or <em>null</em>
* if the state with the given state ID does not exist
* @throws IOException
*/
public T get(String storeName, String tableName, String stateId)
throws IOException;
/**
* Get all {@link State}s from a table.
*
* @param storeName store name
* @param tableName table name
* @return (possibly empty) list of {@link State}s from the given table
* @throws IOException
*/
public List<T> getAll(String storeName, String tableName)
throws IOException;
/**
* Get all {@link State}s from a store.
*
* @param storeName store name
* @return (possibly empty) list of {@link State}s from the given store
* @throws IOException
*/
public List<T> getAll(String storeName)
throws IOException;
/**
* Get table names under the storeName
*
* @param storeName store name
* @param predicate only returns names matching predicate
* @return (possibly empty) list of state names from the given store
* @throws IOException
*/
public List<String> getTableNames(String storeName, Predicate<String> predicate)
throws IOException;
/**
* Get store names in the state store
*
* @param predicate only returns names matching predicate
* @return (possibly empty) list of store names from the given store
* @throws IOException
*/
public default List<String> getStoreNames(Predicate<String> predicate)
throws IOException {
throw new UnsupportedOperationException("Not implemented");
}
/**
* Create an alias for an existing table.
*
* @param storeName store name
* @param original original table name
* @param alias alias table name
* @throws IOException
*/
public void createAlias(String storeName, String original, String alias)
throws IOException;
/**
* Delete a table from a store.
*
* @param storeName store name
* @param tableName table name
* @throws IOException
*/
public void delete(String storeName, String tableName)
throws IOException;
/**
* Delete a list of tables from a store.
*
* @param storeName store name
* @param tableNames List of table names in the state store to delete
* @throws IOException
*/
default void delete(String storeName, List<String> tableNames)
throws IOException {
for (String tableName : tableNames) {
delete(storeName, tableName);
}
}
/**
* Delete a store.
*
* @param storeName store name
* @throws IOException
*/
public void delete(String storeName)
throws IOException;
/**
* Gets entry managers for all tables matching the input
* @param predicate Predicate used to filter tables. To allow state stores to push down predicates, use native extensions
* of {@link StateStorePredicate}.
* @return A list of all {@link StateStoreEntryManager}s matching the predicate.
* @throws IOException
*/
default List<? extends StateStoreEntryManager> getMetadataForTables(StateStorePredicate predicate)
throws IOException {
throw new UnsupportedOperationException("Operation unsupported for predicate with class " + predicate.getClass());
}
}
| 4,332 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/FileContextBasedFsStateStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.HadoopUtils;
/**
* An implementation of {@link StateStore} backed by a {@link FileSystem}.
*
* <p>
* This implementation extends {@link FsStateStore} to use
* {@link org.apache.hadoop.fs.FileContext} APIs to persist state to the state store.
* The advantage of using {@link org.apache.hadoop.fs.FileContext} is that it provides an
* atomic rename-with-overwrite option which allows atomic update to a previously written
* state file.
* </p>
*
* @param <T> state object type
*
* @author Sudarshan Vasudevan
*/
public class FileContextBasedFsStateStore<T extends State> extends FsStateStore<T> {
private FileContext fc;
public FileContextBasedFsStateStore(String fsUri, String storeRootDir, Class stateClass)
throws IOException {
super(fsUri, storeRootDir, stateClass);
this.fc = FileContext.getFileContext(URI.create(fsUri));
}
public FileContextBasedFsStateStore(FileSystem fs, String storeRootDir, Class<T> stateClass)
throws UnsupportedFileSystemException {
super(fs, storeRootDir, stateClass);
this.fc = FileContext.getFileContext(this.fs.getUri());
}
public FileContextBasedFsStateStore(String storeUrl, Class<T> stateClass)
throws IOException {
super(storeUrl, stateClass);
this.fc = FileContext.getFileContext(this.fs.getUri());
}
/**
* See {@link StateStore#put(String, String, T)}.
*
* <p>
* This implementation uses {@link FileContext#rename(Path, Path, org.apache.hadoop.fs.Options.Rename...)}, with
* {@link org.apache.hadoop.fs.Options.Rename#OVERWRITE} set to true, to write the
* state to the underlying state store.
* </p>
*/
@Override
protected void renamePath(Path tmpTablePath, Path tablePath) throws IOException {
HadoopUtils.renamePath(this.fc, tmpTablePath, tablePath, true);
}
} | 4,333 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlJobStatusStateStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import javax.sql.DataSource;
import com.typesafe.config.Config;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.util.ConfigUtils;
public class MysqlJobStatusStateStoreFactory extends MysqlStateStoreFactory implements DatasetStateStore.Factory {
@Override
public <T extends State> MysqlJobStatusStateStore<T> createStateStore(Config config, Class<T> stateClass) {
String stateStoreTableName = ConfigUtils.getString(config, ConfigurationKeys.STATE_STORE_DB_TABLE_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_DB_TABLE);
boolean compressedValues = ConfigUtils.getBoolean(config, ConfigurationKeys.STATE_STORE_COMPRESSED_VALUES_KEY,
ConfigurationKeys.DEFAULT_STATE_STORE_COMPRESSED_VALUES);
try {
DataSource dataSource = MysqlDataSourceFactory.get(config,
SharedResourcesBrokerFactory.getImplicitBroker());
return new MysqlJobStatusStateStore<>(dataSource, stateStoreTableName, compressedValues, stateClass);
} catch (Exception e) {
throw new RuntimeException("Failed to create MysqlStateStore with factory", e);
}
}
@Override
public <T extends State> MysqlJobStatusStateStore createStateStore(Config config) {
return createStateStore(config, State.class);
}
}
| 4,334 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MysqlStateStoreEntryManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.IOException;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
/**
* A {@link StateStoreEntryManager} generated by {@link MysqlStateStore}.
*/
public class MysqlStateStoreEntryManager<T extends State> extends StateStoreEntryManager<T> {
private final MysqlStateStore<T> stateStore;
public MysqlStateStoreEntryManager(String storeName, String tableName, long modificationTime,
MysqlStateStore<T> stateStore) {
super(storeName, tableName, modificationTime, stateStore);
this.stateStore = stateStore;
}
@Override
public T readState() throws IOException {
return this.stateStore.get(getStoreName(), getTableName(), "");
}
@Override
public void delete() throws IOException {
this.stateStore.delete(getStoreName(), getTableName());
}
}
| 4,335 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/DatabaseJobHistoryStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.List;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.FlywayException;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.api.MigrationVersion;
import org.reflections.Configuration;
import org.reflections.Reflections;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.inject.Inject;
import javax.annotation.Nullable;
import javax.sql.DataSource;
import org.apache.gobblin.metastore.database.SupportedDatabaseVersion;
import org.apache.gobblin.metastore.database.VersionedDatabaseJobHistoryStore;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobExecutionQuery;
/**
* An implementation of {@link JobHistoryStore} backed by MySQL.
*
* <p>
* The DDLs for the MySQL job history store can be found under metastore/src/main/resources.
* </p>
*
* @author Yinan Li
*/
public class DatabaseJobHistoryStore implements JobHistoryStore {
// Scan all packages in the classpath with prefix org.apache.gobblin.metastore.database when
// class is loaded. Since scan is expensive we do it only once when class is loaded.
private static final Reflections reflections = new Reflections(getConfigurationBuilder());
private final VersionedDatabaseJobHistoryStore versionedStore;
@Inject
public DatabaseJobHistoryStore(DataSource dataSource)
throws InstantiationException, IllegalAccessException, ClassNotFoundException {
MigrationVersion databaseVersion = getDatabaseVersion(dataSource);
this.versionedStore = findVersionedDatabaseJobHistoryStore(databaseVersion);
this.versionedStore.init(dataSource);
}
@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo) throws IOException {
this.versionedStore.put(jobExecutionInfo);
}
@Override
public synchronized List<JobExecutionInfo> get(JobExecutionQuery query) throws IOException {
return this.versionedStore.get(query);
}
@Override
public void close() throws IOException {
this.versionedStore.close();
}
private static MigrationVersion getDatabaseVersion(DataSource dataSource) throws FlywayException {
Flyway flyway = Flyway.configure().dataSource(dataSource).load();
MigrationInfoService info = flyway.info();
MigrationVersion currentVersion = MigrationVersion.EMPTY;
if (info.current() != null) {
currentVersion = info.current().getVersion();
}
return currentVersion;
}
private static Collection<URL> effectiveClassPathUrls(ClassLoader... classLoaders) {
return ClasspathHelper.forManifest(ClasspathHelper.forClassLoader(classLoaders));
}
private static Configuration getConfigurationBuilder() {
ConfigurationBuilder configurationBuilder= ConfigurationBuilder.build("org.apache.gobblin.metastore.database",
effectiveClassPathUrls(DatabaseJobHistoryStore.class.getClassLoader()));
List<URL> filteredUrls = Lists.newArrayList(Iterables.filter(configurationBuilder.getUrls(), new Predicate<URL>() {
@Override
public boolean apply(@Nullable URL input) {
return input != null && (!input.getProtocol().equals("file") || new File(input.getFile()).exists());
}
}));
configurationBuilder.setUrls(filteredUrls);
return configurationBuilder;
}
private static VersionedDatabaseJobHistoryStore findVersionedDatabaseJobHistoryStore(MigrationVersion requiredVersion)
throws IllegalAccessException, InstantiationException, ClassNotFoundException {
Class<?> foundClazz = null;
Class<?> defaultClazz = null;
MigrationVersion defaultVersion = MigrationVersion.EMPTY;
for (Class<?> clazz : Sets.intersection(reflections.getTypesAnnotatedWith(SupportedDatabaseVersion.class),
reflections.getSubTypesOf(VersionedDatabaseJobHistoryStore.class))) {
SupportedDatabaseVersion annotation = clazz.getAnnotation(SupportedDatabaseVersion.class);
String version = annotation.version();
MigrationVersion actualVersion = MigrationVersion.fromVersion(Strings.isNullOrEmpty(version) ? null : version);
if (annotation.isDefault() && actualVersion.compareTo(defaultVersion) > 0) {
defaultClazz = clazz;
defaultVersion = actualVersion;
}
if (actualVersion.compareTo(requiredVersion) == 0) {
foundClazz = clazz;
}
}
if (foundClazz == null) {
foundClazz = defaultClazz;
}
if (foundClazz == null) {
throw new ClassNotFoundException(
String.format("Could not find an instance of %s which supports database " + "version %s.",
VersionedDatabaseJobHistoryStore.class.getSimpleName(), requiredVersion.toString()));
}
return (VersionedDatabaseJobHistoryStore) foundClazz.newInstance();
}
}
| 4,336 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/MetaStoreModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore;
import java.util.Properties;
import javax.sql.DataSource;
import com.google.inject.AbstractModule;
import com.google.inject.name.Names;
/**
* A Guice module defining the dependencies used by the metastore module.
*
* @author Yinan Li
*/
public class MetaStoreModule extends AbstractModule {
private final Properties properties;
public MetaStoreModule(Properties properties) {
this.properties = properties;
}
@Override
protected void configure() {
bind(Properties.class).annotatedWith(Names.named("dataSourceProperties")).toInstance(this.properties);
bind(DataSource.class).toProvider(JobHistoryDataSourceProvider.class);
bind(JobHistoryStore.class).to(DatabaseJobHistoryStore.class);
}
}
| 4,337 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/predicates/StateStorePredicate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.predicates;
import com.google.common.base.Predicate;
import lombok.RequiredArgsConstructor;
import lombok.experimental.Delegate;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
/**
* A {@link Predicate} used to filter entries in a {@link org.apache.gobblin.metastore.StateStore}.
*
* {@link org.apache.gobblin.metastore.StateStore}s can usually partially push down extensions of this class, so it
* is recommended to use bundled {@link StateStorePredicate} extensions as much as possible.
*/
@RequiredArgsConstructor
public class StateStorePredicate implements Predicate<StateStoreEntryManager> {
/**
* An additional {@link Predicate} for filtering. This predicate is never pushed down.
*/
@Delegate
private final Predicate<StateStoreEntryManager> customPredicate;
}
| 4,338 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/predicates/DatasetPredicate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.predicates;
import java.io.IOException;
import org.apache.gobblin.metastore.metadata.DatasetStateStoreEntryManager;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
import com.google.common.base.Predicate;
/**
* A {@link StateStorePredicate} used to select only entries from a {@link org.apache.gobblin.metastore.DatasetStateStore}
* with the provided dataset urn.
*/
public class DatasetPredicate extends StoreNamePredicate {
private final String datasetUrn;
public DatasetPredicate(String storeName, String datasetUrn, Predicate<StateStoreEntryManager> customPredicate) {
super(storeName, customPredicate);
this.datasetUrn = datasetUrn;
}
@Override
public boolean apply(StateStoreEntryManager input) {
if (!(input instanceof DatasetStateStoreEntryManager)) {
return false;
}
DatasetStateStoreEntryManager datasetStateStoreEntryMetadata = (DatasetStateStoreEntryManager) input;
try {
return super.apply(input) && datasetStateStoreEntryMetadata.getStateStore().
sanitizeDatasetStatestoreNameFromDatasetURN(getStoreName(), this.datasetUrn).
equals(((DatasetStateStoreEntryManager) input).getSanitizedDatasetUrn());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
}
| 4,339 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/predicates/StoreNamePredicate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.predicates;
import org.apache.gobblin.metastore.metadata.StateStoreEntryManager;
import com.google.common.base.Predicate;
import lombok.Getter;
/**
* A {@link StateStorePredicate} to select only entries with a specific {@link #storeName}.
*/
public class StoreNamePredicate extends StateStorePredicate {
@Getter
private final String storeName;
public StoreNamePredicate(String storeName, Predicate<StateStoreEntryManager> customPredicate) {
super(customPredicate);
this.storeName = storeName;
}
@Override
public boolean apply(StateStoreEntryManager input) {
return input.getStoreName().equals(this.storeName) && super.apply(input);
}
}
| 4,340 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/SupportedDatabaseVersion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The supported database version of the class
*/
@Documented
@Retention(value= RetentionPolicy.RUNTIME) @Target(value= ElementType.TYPE)
public @interface SupportedDatabaseVersion {
/**
* Whether this version if the default.
*/
public boolean isDefault();
/**
* The version supported by the class
*/
public String version();
} | 4,341 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/DatabaseJobHistoryStoreV102.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import org.apache.gobblin.metastore.JobHistoryStore;
import org.apache.gobblin.rest.JobExecutionInfo;
/**
* An implementation of {@link JobHistoryStore} backed by MySQL.
*
* <p>
* The DDLs for the MySQL job history store can be found under metastore/src/main/resources.
* </p>
*
* @author Joel Baranick
*/
@SupportedDatabaseVersion(isDefault = false, version = "1.0.2")
public class DatabaseJobHistoryStoreV102 extends DatabaseJobHistoryStoreV101 implements VersionedDatabaseJobHistoryStore {
@Override
protected String getLauncherType(JobExecutionInfo info) {
if (info.hasLauncherType()) {
return info.getLauncherType().name();
}
return null;
}
}
| 4,342 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/DatabaseJobHistoryStoreV103.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import org.apache.gobblin.metastore.JobHistoryStore;
/**
* An implementation of {@link JobHistoryStore} backed by MySQL.
*
* <p>
* The DDLs for the MySQL job history store can be found under metastore/src/main/resources.
* </p>
*
*/
@SupportedDatabaseVersion(isDefault = false, version = "1.0.3")
public class DatabaseJobHistoryStoreV103 extends DatabaseJobHistoryStoreV101 implements VersionedDatabaseJobHistoryStore {
}
| 4,343 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/VersionedDatabaseJobHistoryStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import org.apache.gobblin.metastore.JobHistoryStore;
import javax.sql.DataSource;
/**
* Denotes that a class that is a database store.
*/
public interface VersionedDatabaseJobHistoryStore extends JobHistoryStore {
/**
* Initializes the datastore
* @param dataSource The datasource the datastore should connect to.
*/
void init(DataSource dataSource);
}
| 4,344 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/DatabaseJobHistoryStoreV100.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.linkedin.data.template.StringMap;
import org.apache.gobblin.metastore.DatabaseJobHistoryStore;
import org.apache.gobblin.metastore.JobHistoryStore;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobExecutionQuery;
import org.apache.gobblin.rest.JobStateEnum;
import org.apache.gobblin.rest.LauncherTypeEnum;
import org.apache.gobblin.rest.Metric;
import org.apache.gobblin.rest.MetricArray;
import org.apache.gobblin.rest.MetricTypeEnum;
import org.apache.gobblin.rest.QueryListType;
import org.apache.gobblin.rest.Table;
import org.apache.gobblin.rest.TableTypeEnum;
import org.apache.gobblin.rest.TaskExecutionInfo;
import org.apache.gobblin.rest.TaskExecutionInfoArray;
import org.apache.gobblin.rest.TaskStateEnum;
import org.apache.gobblin.rest.TimeRange;
/**
* An implementation of {@link JobHistoryStore} backed by MySQL.
*
* <p>
* The DDLs for the MySQL job history store can be found under metastore/src/main/resources.
* </p>
*
* @author Yinan Li
*/
@SupportedDatabaseVersion(isDefault = true, version = "1.0.0")
public class DatabaseJobHistoryStoreV100 implements VersionedDatabaseJobHistoryStore {
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseJobHistoryStore.class);
private static final String JOB_EXECUTION_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_executions (job_name,job_id,start_time,end_time,duration,state,"
+ "launched_tasks,completed_tasks,launcher_type,tracking_url) VALUES(?,?,?,?,?,?,?,?,?,?)";
private static final String TASK_EXECUTION_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_executions (task_id,job_id,start_time,end_time,duration,"
+ "state,failure_exception,low_watermark,high_watermark,table_namespace,table_name,table_type) "
+ "VALUES(?,?,?,?,?,?,?,?,?,?,?,?)";
private static final String JOB_METRIC_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_metrics (job_id,metric_group,metric_name,"
+ "metric_type,metric_value) VALUES(?,?,?,?,?)";
private static final String TASK_METRIC_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_metrics (task_id,metric_group,metric_name,"
+ "metric_type,metric_value) VALUES(?,?,?,?,?)";
private static final String JOB_PROPERTY_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_properties (job_id,property_key,property_value) VALUES(?,?,?)";
private static final String TASK_PROPERTY_INSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_properties (task_id,property_key,property_value) VALUES(?,?,?)";
private static final String JOB_EXECUTION_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_job_executions SET start_time=?,end_time=?,duration=?,"
+ "state=?,launched_tasks=?,completed_tasks=?,launcher_type=?,tracking_url=? WHERE job_id=?";
private static final String TASK_EXECUTION_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_task_executions SET start_time=?,end_time=?,duration=?,state=?,failure_exception=?,"
+ "low_watermark=?,high_watermark=?,table_namespace=?,table_name=?,table_type=? WHERE task_id=?";
private static final String JOB_METRIC_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_job_metrics SET metric_value=? WHERE job_id=? AND "
+ "metric_group=? AND metric_name=? AND metric_type=?";
private static final String TASK_METRIC_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_task_metrics SET metric_value=? WHERE task_id=? AND "
+ "metric_group=? AND metric_name=? AND metric_type=?";
private static final String JOB_PROPERTY_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_job_properties SET property_value=? WHERE job_id=? AND property_key=?";
private static final String TASK_PROPERTY_UPDATE_STATEMENT_TEMPLATE =
"UPDATE gobblin_task_properties SET property_value=? WHERE task_id=? AND property_key=?";
private static final String LIST_DISTINCT_JOB_EXECUTION_QUERY_TEMPLATE =
"SELECT j.job_id FROM gobblin_job_executions j, " + "(SELECT MAX(last_modified_ts) AS most_recent_ts, job_name "
+ "FROM gobblin_job_executions GROUP BY job_name) max_results "
+ "WHERE j.job_name = max_results.job_name AND j.last_modified_ts = max_results.most_recent_ts";
private static final String LIST_RECENT_JOB_EXECUTION_QUERY_TEMPLATE = "SELECT job_id FROM gobblin_job_executions";
private static final String JOB_NAME_QUERY_BY_TABLE_STATEMENT_TEMPLATE =
"SELECT j.job_name FROM gobblin_job_executions j, gobblin_task_executions t "
+ "WHERE j.job_id=t.job_id AND %s GROUP BY j.job_name";
private static final String JOB_ID_QUERY_BY_JOB_NAME_STATEMENT_TEMPLATE =
"SELECT job_id FROM gobblin_job_executions WHERE job_name=?";
private static final String JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_job_executions WHERE job_id=?";
private static final String TASK_EXECUTION_EXIST_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_task_executions WHERE task_id=?";
private static final String TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_task_executions WHERE job_id=?";
private static final String JOB_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_job_metrics " + "WHERE job_id=? AND metric_group=? AND metric_name=? AND metric_type=?";
private static final String TASK_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_task_metrics " + "WHERE task_id=? AND metric_group=? AND metric_name=? AND metric_type=?";
private static final String JOB_METRIC_QUERY_STATEMENT_TEMPLATE =
"SELECT metric_group,metric_name,metric_type,metric_value FROM gobblin_job_metrics WHERE job_id=?";
private static final String TASK_METRIC_QUERY_STATEMENT_TEMPLATE =
"SELECT metric_group,metric_name,metric_type,metric_value FROM gobblin_task_metrics WHERE task_id=?";
private static final String JOB_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_job_properties WHERE job_id=? AND property_key=?";
private static final String TASK_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_task_properties WHERE task_id=? AND property_key=?";
private static final String JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE =
"SELECT property_key, property_value FROM gobblin_job_properties WHERE job_id=?";
private static final String TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE =
"SELECT property_key, property_value FROM gobblin_task_properties WHERE task_id=?";
private static final Timestamp DEFAULT_TIMESTAMP = new Timestamp(1000L);
private DataSource dataSource;
@Override
public void init(DataSource dataSource) {
this.dataSource = dataSource;
}
@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo) throws IOException {
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
connection.setAutoCommit(false);
// Insert or update job execution information
if (existsJobExecutionInfo(connection, jobExecutionInfo)) {
updateJobExecutionInfo(connection, jobExecutionInfo);
} else {
insertJobExecutionInfo(connection, jobExecutionInfo);
}
// Insert or update job metrics
if (jobExecutionInfo.hasMetrics()) {
for (Metric metric : jobExecutionInfo.getMetrics()) {
boolean insert =
!existsMetric(connection, JOB_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, jobExecutionInfo.getJobId(), metric);
updateMetric(connection, insert ? JOB_METRIC_INSERT_STATEMENT_TEMPLATE : JOB_METRIC_UPDATE_STATEMENT_TEMPLATE,
jobExecutionInfo.getJobId(), metric, insert);
}
}
// Insert or update job properties
if (jobExecutionInfo.hasJobProperties()) {
for (Map.Entry<String, String> entry : jobExecutionInfo.getJobProperties().entrySet()) {
boolean insert = !existsProperty(connection, JOB_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE,
jobExecutionInfo.getJobId(), entry.getKey());
updateProperty(connection,
insert ? JOB_PROPERTY_INSERT_STATEMENT_TEMPLATE : JOB_PROPERTY_UPDATE_STATEMENT_TEMPLATE,
jobExecutionInfo.getJobId(), entry.getKey(), entry.getValue(), insert);
}
}
// Insert or update task execution information
if (jobExecutionInfo.hasTaskExecutions()) {
for (TaskExecutionInfo info : jobExecutionInfo.getTaskExecutions()) {
// Insert or update task execution information
if (existsTaskExecutionInfo(connection, info)) {
updateTaskExecutionInfo(connection, info);
} else {
insertTaskExecutionInfo(connection, info);
}
// Insert or update task metrics
if (info.hasMetrics()) {
for (Metric metric : info.getMetrics()) {
boolean insert =
!existsMetric(connection, TASK_METRIC_EXIST_QUERY_STATEMENT_TEMPLATE, info.getTaskId(), metric);
updateMetric(connection,
insert ? TASK_METRIC_INSERT_STATEMENT_TEMPLATE : TASK_METRIC_UPDATE_STATEMENT_TEMPLATE,
info.getTaskId(), metric, insert);
}
}
// Insert or update task properties
if (info.hasTaskProperties()) {
for (Map.Entry<String, String> entry : info.getTaskProperties().entrySet()) {
boolean insert = !existsProperty(connection, TASK_PROPERTY_EXIST_QUERY_STATEMENT_TEMPLATE,
info.getTaskId(), entry.getKey());
updateProperty(connection,
insert ? TASK_PROPERTY_INSERT_STATEMENT_TEMPLATE : TASK_PROPERTY_UPDATE_STATEMENT_TEMPLATE,
info.getTaskId(), entry.getKey(), entry.getValue(), insert);
}
}
}
}
connection.commit();
} catch (SQLException se) {
LOGGER.error("Failed to put a new job execution information record", se);
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().rollback();
} catch (SQLException se1) {
LOGGER.error("Failed to rollback", se1);
}
}
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
@Override
public synchronized List<JobExecutionInfo> get(JobExecutionQuery query) throws IOException {
Preconditions.checkArgument(query.hasId() && query.hasIdType());
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
switch (query.getIdType()) {
case JOB_ID:
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
JobExecutionInfo jobExecutionInfo =
processQueryById(connection, query.getId().getString(), query, Filter.MISSING);
if (jobExecutionInfo != null) {
jobExecutionInfos.add(jobExecutionInfo);
}
return jobExecutionInfos;
case JOB_NAME:
return processQueryByJobName(connection, query.getId().getString(), query, Filter.MISSING);
case TABLE:
return processQueryByTable(connection, query);
case LIST_TYPE:
return processListQuery(connection, query);
default:
throw new IOException("Unsupported query ID type: " + query.getIdType().name());
}
} catch (SQLException se) {
LOGGER.error("Failed to execute query: " + query, se);
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
@Override
public void close() throws IOException {
// Nothing to do
}
private Connection getConnection() throws SQLException {
return this.dataSource.getConnection();
}
private static boolean existsJobExecutionInfo(Connection connection, JobExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasJobId());
try (PreparedStatement queryStatement = connection
.prepareStatement(JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE)) {
queryStatement.setString(1, info.getJobId());
try (ResultSet resultSet = queryStatement.executeQuery()) {
return resultSet.next();
}
}
}
private static void insertJobExecutionInfo(Connection connection, JobExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasJobName());
Preconditions.checkArgument(info.hasJobId());
try (PreparedStatement insertStatement = connection.prepareStatement(JOB_EXECUTION_INSERT_STATEMENT_TEMPLATE)) {
int index = 0;
insertStatement.setString(++index, info.getJobName());
insertStatement.setString(++index, info.getJobId());
insertStatement
.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
insertStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
insertStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
insertStatement.setString(++index, info.hasState() ? info.getState().name() : null);
insertStatement.setInt(++index, info.hasLaunchedTasks() ? info.getLaunchedTasks() : -1);
insertStatement.setInt(++index, info.hasCompletedTasks() ? info.getCompletedTasks() : -1);
insertStatement.setString(++index, info.hasLauncherType() ? info.getLauncherType().name() : null);
insertStatement.setString(++index, info.hasTrackingUrl() ? info.getTrackingUrl() : null);
insertStatement.executeUpdate();
}
}
private static void updateJobExecutionInfo(Connection connection, JobExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasJobId());
try (PreparedStatement updateStatement = connection.prepareStatement(JOB_EXECUTION_UPDATE_STATEMENT_TEMPLATE)) {
int index = 0;
updateStatement
.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
updateStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
updateStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
updateStatement.setString(++index, info.hasState() ? info.getState().name() : null);
updateStatement.setInt(++index, info.hasLaunchedTasks() ? info.getLaunchedTasks() : -1);
updateStatement.setInt(++index, info.hasCompletedTasks() ? info.getCompletedTasks() : -1);
updateStatement.setString(++index, info.hasLauncherType() ? info.getLauncherType().name() : null);
updateStatement.setString(++index, info.hasTrackingUrl() ? info.getTrackingUrl() : null);
updateStatement.setString(++index, info.getJobId());
updateStatement.executeUpdate();
}
}
private static boolean existsTaskExecutionInfo(Connection connection, TaskExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasTaskId());
try (PreparedStatement queryStatement = connection.prepareStatement(TASK_EXECUTION_EXIST_QUERY_STATEMENT_TEMPLATE)) {
queryStatement.setString(1, info.getTaskId());
try (ResultSet resultSet = queryStatement.executeQuery()) {
return resultSet.next();
}
}
}
private static void insertTaskExecutionInfo(Connection connection, TaskExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasTaskId());
Preconditions.checkArgument(info.hasJobId());
try (PreparedStatement insertStatement = connection.prepareStatement(TASK_EXECUTION_INSERT_STATEMENT_TEMPLATE)) {
int index = 0;
insertStatement.setString(++index, info.getTaskId());
insertStatement.setString(++index, info.getJobId());
insertStatement
.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
insertStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
insertStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
insertStatement.setString(++index, info.hasState() ? info.getState().name() : null);
insertStatement.setString(++index, info.hasFailureException() ? info.getFailureException() : null);
insertStatement.setLong(++index, info.hasLowWatermark() ? info.getLowWatermark() : -1);
insertStatement.setLong(++index, info.hasHighWatermark() ? info.getHighWatermark() : -1);
insertStatement.setString(++index,
info.hasTable() && info.getTable().hasNamespace() ? info.getTable().getNamespace() : null);
insertStatement
.setString(++index, info.hasTable() && info.getTable().hasName() ? info.getTable().getName() : null);
insertStatement
.setString(++index, info.hasTable() && info.getTable().hasType() ? info.getTable().getType().name() : null);
insertStatement.executeUpdate();
}
}
private static void updateTaskExecutionInfo(Connection connection, TaskExecutionInfo info) throws SQLException {
Preconditions.checkArgument(info.hasTaskId());
try (PreparedStatement updateStatement = connection.prepareStatement(TASK_EXECUTION_UPDATE_STATEMENT_TEMPLATE)) {
int index = 0;
updateStatement
.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
updateStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : DEFAULT_TIMESTAMP,
getCalendarUTCInstance());
updateStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
updateStatement.setString(++index, info.hasState() ? info.getState().name() : null);
updateStatement.setString(++index, info.hasFailureException() ? info.getFailureException() : null);
updateStatement.setLong(++index, info.hasLowWatermark() ? info.getLowWatermark() : -1);
updateStatement.setLong(++index, info.hasHighWatermark() ? info.getHighWatermark() : -1);
updateStatement.setString(++index,
info.hasTable() && info.getTable().hasNamespace() ? info.getTable().getNamespace() : null);
updateStatement
.setString(++index, info.hasTable() && info.getTable().hasName() ? info.getTable().getName() : null);
updateStatement
.setString(++index, info.hasTable() && info.getTable().hasType() ? info.getTable().getType().name() : null);
updateStatement.setString(++index, info.getTaskId());
updateStatement.executeUpdate();
}
}
private static boolean existsMetric(Connection connection, String template, String id, Metric metric)
throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(metric.hasGroup());
Preconditions.checkArgument(metric.hasName());
Preconditions.checkArgument(metric.hasType());
try (PreparedStatement queryStatement = connection.prepareStatement(template)) {
int index = 0;
queryStatement.setString(++index, id);
queryStatement.setString(++index, metric.getGroup());
queryStatement.setString(++index, metric.getName());
queryStatement.setString(++index, metric.getType().name());
try (ResultSet resultSet = queryStatement.executeQuery()) {
return resultSet.next();
}
}
}
private static void updateMetric(Connection connection, String template, String id, Metric metric, boolean insert)
throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(metric.hasGroup());
Preconditions.checkArgument(metric.hasName());
Preconditions.checkArgument(metric.hasType());
Preconditions.checkArgument(metric.hasValue());
try (PreparedStatement updateStatement = connection.prepareStatement(template)) {
int index = 0;
if (insert) {
updateStatement.setString(++index, id);
updateStatement.setString(++index, metric.getGroup());
updateStatement.setString(++index, metric.getName());
updateStatement.setString(++index, metric.getType().name());
updateStatement.setString(++index, metric.getValue());
} else {
updateStatement.setString(++index, metric.getValue());
updateStatement.setString(++index, id);
updateStatement.setString(++index, metric.getGroup());
updateStatement.setString(++index, metric.getName());
updateStatement.setString(++index, metric.getType().name());
}
updateStatement.executeUpdate();
}
}
private static boolean existsProperty(Connection connection, String template, String id, String key)
throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
try (PreparedStatement queryStatement = connection.prepareStatement(template)) {
int index = 0;
queryStatement.setString(++index, id);
queryStatement.setString(++index, key);
try (ResultSet resultSet = queryStatement.executeQuery()) {
return resultSet.next();
}
}
}
private static void updateProperty(Connection connection, String template, String id, String key, String value,
boolean insert) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
Preconditions.checkArgument(!Strings.isNullOrEmpty(value));
try (PreparedStatement updateStatement = connection.prepareStatement(template)) {
int index = 0;
if (insert) {
updateStatement.setString(++index, id);
updateStatement.setString(++index, key);
updateStatement.setString(++index, value);
} else {
updateStatement.setString(++index, value);
updateStatement.setString(++index, id);
updateStatement.setString(++index, key);
}
updateStatement.executeUpdate();
}
}
private JobExecutionInfo processQueryById(Connection connection, String jobId, JobExecutionQuery query,
Filter tableFilter) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobId));
// Query job execution information
try (PreparedStatement jobExecutionQueryStatement = connection
.prepareStatement(JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE)) {
jobExecutionQueryStatement.setString(1, jobId);
try (ResultSet jobRs = jobExecutionQueryStatement.executeQuery()) {
if (!jobRs.next()) {
return null;
}
JobExecutionInfo jobExecutionInfo = resultSetToJobExecutionInfo(jobRs);
// Query job metrics
if (query.isIncludeJobMetrics()) {
try (PreparedStatement jobMetricQueryStatement = connection
.prepareStatement(JOB_METRIC_QUERY_STATEMENT_TEMPLATE)) {
jobMetricQueryStatement.setString(1, jobRs.getString(2));
try (ResultSet jobMetricRs = jobMetricQueryStatement.executeQuery()) {
MetricArray jobMetrics = new MetricArray();
while (jobMetricRs.next()) {
jobMetrics.add(resultSetToMetric(jobMetricRs));
}
// Add job metrics
jobExecutionInfo.setMetrics(jobMetrics);
}
}
}
// Query job properties
Set<String> requestedJobPropertyKeys = null;
if (query.hasJobProperties()) {
requestedJobPropertyKeys = new HashSet<>(Arrays.asList(query.getJobProperties().split(",")));
}
try (PreparedStatement jobPropertiesQueryStatement = connection
.prepareStatement(JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE)) {
jobPropertiesQueryStatement.setString(1, jobExecutionInfo.getJobId());
try (ResultSet jobPropertiesRs = jobPropertiesQueryStatement.executeQuery()) {
Map<String, String> jobProperties = Maps.newHashMap();
while (jobPropertiesRs.next()) {
Map.Entry<String, String> property = resultSetToProperty(jobPropertiesRs);
if (requestedJobPropertyKeys == null || requestedJobPropertyKeys.contains(property.getKey())) {
jobProperties.put(property.getKey(), property.getValue());
}
}
// Add job properties
jobExecutionInfo.setJobProperties(new StringMap(jobProperties));
}
}
// Query task execution information
if (query.isIncludeTaskExecutions()) {
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
String taskExecutionQuery = TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE;
// Add table filter if applicable
if (tableFilter.isPresent()) {
taskExecutionQuery += " AND " + tableFilter;
}
try (PreparedStatement taskExecutionQueryStatement = connection.prepareStatement(taskExecutionQuery)) {
taskExecutionQueryStatement.setString(1, jobId);
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskExecutionQueryStatement, 2);
}
try (ResultSet taskRs = taskExecutionQueryStatement.executeQuery()) {
while (taskRs.next()) {
TaskExecutionInfo taskExecutionInfo = resultSetToTaskExecutionInfo(taskRs);
// Query task metrics for each task execution record
if (query.isIncludeTaskMetrics()) {
try (PreparedStatement taskMetricQueryStatement = connection
.prepareStatement(TASK_METRIC_QUERY_STATEMENT_TEMPLATE)) {
taskMetricQueryStatement.setString(1, taskExecutionInfo.getTaskId());
try (ResultSet taskMetricRs = taskMetricQueryStatement.executeQuery()) {
MetricArray taskMetrics = new MetricArray();
while (taskMetricRs.next()) {
taskMetrics.add(resultSetToMetric(taskMetricRs));
}
// Add task metrics
taskExecutionInfo.setMetrics(taskMetrics);
}
}
}
taskExecutionInfos.add(taskExecutionInfo);
// Query task properties
Set<String> queryTaskPropertyKeys = null;
if (query.hasTaskProperties()) {
queryTaskPropertyKeys = new HashSet<>(Arrays.asList(query.getTaskProperties().split(",")));
}
try (PreparedStatement taskPropertiesQueryStatement = connection
.prepareStatement(TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE)) {
taskPropertiesQueryStatement.setString(1, taskExecutionInfo.getTaskId());
try (ResultSet taskPropertiesRs = taskPropertiesQueryStatement.executeQuery()) {
Map<String, String> taskProperties = Maps.newHashMap();
while (taskPropertiesRs.next()) {
Map.Entry<String, String> property = resultSetToProperty(taskPropertiesRs);
if (queryTaskPropertyKeys == null || queryTaskPropertyKeys.contains(property.getKey())) {
taskProperties.put(property.getKey(), property.getValue());
}
}
// Add job properties
taskExecutionInfo.setTaskProperties(new StringMap(taskProperties));
}
}
// Add task properties
}
// Add task execution information
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
}
}
}
return jobExecutionInfo;
}
}
}
private List<JobExecutionInfo> processQueryByJobName(Connection connection, String jobName, JobExecutionQuery query,
Filter tableFilter) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobName));
// Construct the query for job IDs by a given job name
Filter timeRangeFilter = Filter.MISSING;
String jobIdByNameQuery = JOB_ID_QUERY_BY_JOB_NAME_STATEMENT_TEMPLATE;
if (query.hasTimeRange()) {
// Add time range filter if applicable
try {
timeRangeFilter = constructTimeRangeFilter(query.getTimeRange());
if (timeRangeFilter.isPresent()) {
jobIdByNameQuery += " AND " + timeRangeFilter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
// Add ORDER BY
jobIdByNameQuery += " ORDER BY created_ts DESC";
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
// Query job IDs by the given job name
try (PreparedStatement queryStatement = connection.prepareStatement(jobIdByNameQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
queryStatement.setString(1, jobName);
if (timeRangeFilter.isPresent()) {
timeRangeFilter.addParameters(queryStatement, 2);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobExecutionInfos.add(processQueryById(connection, rs.getString(1), query, tableFilter));
}
}
}
return jobExecutionInfos;
}
private List<JobExecutionInfo> processQueryByTable(Connection connection, JobExecutionQuery query)
throws SQLException {
Preconditions.checkArgument(query.getId().isTable());
Filter tableFilter = constructTableFilter(query.getId().getTable());
// Construct the query for job names by table definition
String jobNameByTableQuery = String.format(JOB_NAME_QUERY_BY_TABLE_STATEMENT_TEMPLATE, tableFilter);
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
// Query job names by table definition
try (PreparedStatement queryStatement = connection.prepareStatement(jobNameByTableQuery)) {
if (tableFilter.isPresent()) {
tableFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobExecutionInfos.addAll(processQueryByJobName(connection, rs.getString(1), query, tableFilter));
}
}
}
return jobExecutionInfos;
}
private List<JobExecutionInfo> processListQuery(Connection connection, JobExecutionQuery query) throws SQLException {
Preconditions.checkArgument(query.getId().isQueryListType());
Filter timeRangeFilter = Filter.MISSING;
QueryListType queryType = query.getId().getQueryListType();
String listJobExecutionsQuery = "";
if (queryType == QueryListType.DISTINCT) {
listJobExecutionsQuery = LIST_DISTINCT_JOB_EXECUTION_QUERY_TEMPLATE;
if (query.hasTimeRange()) {
try {
timeRangeFilter = constructTimeRangeFilter(query.getTimeRange());
if (timeRangeFilter.isPresent()) {
listJobExecutionsQuery += " AND " + timeRangeFilter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
} else {
listJobExecutionsQuery = LIST_RECENT_JOB_EXECUTION_QUERY_TEMPLATE;
}
listJobExecutionsQuery += " ORDER BY last_modified_ts DESC";
try (PreparedStatement queryStatement = connection.prepareStatement(listJobExecutionsQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
if (timeRangeFilter.isPresent()) {
timeRangeFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
while (rs.next()) {
jobExecutionInfos.add(processQueryById(connection, rs.getString(1), query, Filter.MISSING));
}
return jobExecutionInfos;
}
}
}
private JobExecutionInfo resultSetToJobExecutionInfo(ResultSet rs) throws SQLException {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName(rs.getString("job_name"));
jobExecutionInfo.setJobId(rs.getString("job_id"));
try {
jobExecutionInfo.setStartTime(rs.getTimestamp("start_time").getTime());
} catch (SQLException se) {
jobExecutionInfo.setStartTime(0);
}
try {
jobExecutionInfo.setEndTime(rs.getTimestamp("end_time").getTime());
} catch (SQLException se) {
jobExecutionInfo.setEndTime(0);
}
jobExecutionInfo.setDuration(rs.getLong("duration"));
String state = rs.getString("state");
if (!Strings.isNullOrEmpty(state)) {
jobExecutionInfo.setState(JobStateEnum.valueOf(state));
}
jobExecutionInfo.setLaunchedTasks(rs.getInt("launched_tasks"));
jobExecutionInfo.setCompletedTasks(rs.getInt("completed_tasks"));
String launcherType = rs.getString("launcher_type");
if (!Strings.isNullOrEmpty(launcherType)) {
jobExecutionInfo.setLauncherType(LauncherTypeEnum.valueOf(launcherType));
}
String trackingUrl = rs.getString("tracking_url");
if (!Strings.isNullOrEmpty(trackingUrl)) {
jobExecutionInfo.setTrackingUrl(trackingUrl);
}
return jobExecutionInfo;
}
private static TaskExecutionInfo resultSetToTaskExecutionInfo(ResultSet rs) throws SQLException {
TaskExecutionInfo taskExecutionInfo = new TaskExecutionInfo();
taskExecutionInfo.setTaskId(rs.getString("task_id"));
taskExecutionInfo.setJobId(rs.getString("job_id"));
try {
taskExecutionInfo.setStartTime(rs.getTimestamp("start_time").getTime());
} catch (SQLException se) {
taskExecutionInfo.setStartTime(0);
}
try {
taskExecutionInfo.setEndTime(rs.getTimestamp("end_time").getTime());
} catch (SQLException se) {
taskExecutionInfo.setEndTime(0);
}
taskExecutionInfo.setDuration(rs.getLong("duration"));
String state = rs.getString("state");
if (!Strings.isNullOrEmpty(state)) {
taskExecutionInfo.setState(TaskStateEnum.valueOf(state));
}
String failureException = rs.getString("failure_exception");
if (!Strings.isNullOrEmpty(failureException)) {
taskExecutionInfo.setFailureException(failureException);
}
taskExecutionInfo.setLowWatermark(rs.getLong("low_watermark"));
taskExecutionInfo.setHighWatermark(rs.getLong("high_watermark"));
Table table = new Table();
String namespace = rs.getString("table_namespace");
if (!Strings.isNullOrEmpty(namespace)) {
table.setNamespace(namespace);
}
String name = rs.getString("table_name");
if (!Strings.isNullOrEmpty(name)) {
table.setName(name);
}
String type = rs.getString("table_type");
if (!Strings.isNullOrEmpty(type)) {
table.setType(TableTypeEnum.valueOf(type));
}
taskExecutionInfo.setTable(table);
return taskExecutionInfo;
}
private static Metric resultSetToMetric(ResultSet rs) throws SQLException {
Metric metric = new Metric();
metric.setGroup(rs.getString("metric_group"));
metric.setName(rs.getString("metric_name"));
metric.setType(MetricTypeEnum.valueOf(rs.getString("metric_type")));
metric.setValue(rs.getString("metric_value"));
return metric;
}
private static AbstractMap.SimpleEntry<String, String> resultSetToProperty(ResultSet rs) throws SQLException {
return new AbstractMap.SimpleEntry<>(rs.getString(1), rs.getString(2));
}
private Filter constructTimeRangeFilter(TimeRange timeRange) throws ParseException {
List<String> values = Lists.newArrayList();
StringBuilder sb = new StringBuilder();
if (!timeRange.hasTimeFormat()) {
LOGGER.warn("Skipping the time range filter as there is no time format in: " + timeRange);
return Filter.MISSING;
}
DateFormat dateFormat = new SimpleDateFormat(timeRange.getTimeFormat());
boolean hasStartTime = timeRange.hasStartTime();
if (hasStartTime) {
sb.append("start_time>?");
values.add(new Timestamp(dateFormat.parse(timeRange.getStartTime()).getTime()).toString());
}
if (timeRange.hasEndTime()) {
if (hasStartTime) {
sb.append(" AND ");
}
sb.append("end_time<?");
values.add(new Timestamp(dateFormat.parse(timeRange.getEndTime()).getTime()).toString());
}
if (sb.length() > 0) {
return new Filter(sb.toString(), values);
}
return Filter.MISSING;
}
private Filter constructTableFilter(Table table) {
List<String> values = Lists.newArrayList();
StringBuilder sb = new StringBuilder();
boolean hasNamespace = table.hasNamespace();
if (hasNamespace) {
sb.append("table_namespace=?");
values.add(table.getNamespace());
}
boolean hasName = table.hasName();
if (hasName) {
if (hasNamespace) {
sb.append(" AND ");
}
sb.append("table_name=?");
values.add(table.getName());
}
if (table.hasType()) {
if (hasName) {
sb.append(" AND ");
}
sb.append("table_type=?");
values.add(table.getType().name());
}
if (sb.length() > 0) {
return new Filter(sb.toString(), values);
}
return Filter.MISSING;
}
private static Calendar getCalendarUTCInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("UTC"));
}
}
| 4,345 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/DatabaseJobHistoryStoreV101.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import javax.sql.DataSource;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.AbstractMap;
import java.util.Arrays;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.linkedin.data.template.GetMode;
import com.linkedin.data.template.StringMap;
import org.apache.gobblin.metastore.JobHistoryStore;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.JobExecutionQuery;
import org.apache.gobblin.rest.JobStateEnum;
import org.apache.gobblin.rest.LauncherTypeEnum;
import org.apache.gobblin.rest.Metric;
import org.apache.gobblin.rest.MetricArray;
import org.apache.gobblin.rest.MetricTypeEnum;
import org.apache.gobblin.rest.QueryListType;
import org.apache.gobblin.rest.Table;
import org.apache.gobblin.rest.TableTypeEnum;
import org.apache.gobblin.rest.TaskExecutionInfo;
import org.apache.gobblin.rest.TaskExecutionInfoArray;
import org.apache.gobblin.rest.TaskStateEnum;
import org.apache.gobblin.rest.TimeRange;
/**
* An implementation of {@link JobHistoryStore} backed by MySQL.
*
* <p>
* The DDLs for the MySQL job history store can be found under metastore/src/main/resources.
* </p>
*
* @author Yinan Li
*/
@SupportedDatabaseVersion(isDefault = false, version = "1.0.1")
public class DatabaseJobHistoryStoreV101 implements VersionedDatabaseJobHistoryStore {
private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseJobHistoryStoreV101.class);
private static final String JOB_EXECUTION_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_executions (job_name,job_id,start_time,end_time,duration,state,"
+ "launched_tasks,completed_tasks,launcher_type,tracking_url) VALUES(?,?,?,?,?,?,?,?,?,?)"
+ " ON DUPLICATE KEY UPDATE start_time=VALUES(start_time),end_time=VALUES(end_time),"
+ "duration=VALUES(duration),state=VALUES(state),launched_tasks=VALUES(launched_tasks),"
+ "completed_tasks=VALUES(completed_tasks),launcher_type=VALUES(launcher_type),"
+ "tracking_url=VALUES(tracking_url)";
private static final String TASK_EXECUTION_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_executions (task_id,job_id,start_time,end_time,duration,"
+ "state,failure_exception,low_watermark,high_watermark,table_namespace,table_name,table_type) "
+ "VALUES(?,?,?,?,?,?,?,?,?,?,?,?) ON DUPLICATE KEY UPDATE start_time=VALUES(start_time),"
+ "end_time=VALUES(end_time),duration=VALUES(duration),state=VALUES(state),"
+ "failure_exception=VALUES(failure_exception),low_watermark=VALUES(low_watermark),"
+ "high_watermark=VALUES(high_watermark),table_namespace=VALUES(table_namespace),"
+ "table_name=VALUES(table_name),table_type=VALUES(table_type)";
private static final String JOB_METRIC_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_metrics (job_id,metric_group,metric_name,"
+ "metric_type,metric_value) VALUES(?,?,?,?,?) ON DUPLICATE KEY UPDATE "
+ "metric_value=VALUES(metric_value)";
private static final String TASK_METRIC_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_metrics (task_id,metric_group,metric_name,"
+ "metric_type,metric_value) VALUES(?,?,?,?,?) ON DUPLICATE KEY UPDATE "
+ "metric_value=VALUES(metric_value)";
private static final String JOB_PROPERTY_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_job_properties (job_id,property_key,property_value) VALUES(?,?,?)"
+ " ON DUPLICATE KEY UPDATE property_value=VALUES(property_value)";
private static final String TASK_PROPERTY_UPSERT_STATEMENT_TEMPLATE =
"INSERT INTO gobblin_task_properties (task_id,property_key,property_value) VALUES(?,?,?)"
+ " ON DUPLICATE KEY UPDATE property_value=VALUES(property_value)";
private static final String LIST_DISTINCT_JOB_EXECUTION_QUERY_TEMPLATE =
"SELECT j.job_id FROM gobblin_job_executions j, "
+ "(SELECT MAX(last_modified_ts) AS most_recent_ts, job_name "
+ "FROM gobblin_job_executions%s GROUP BY job_name) max_results "
+ "WHERE j.job_name = max_results.job_name AND j.last_modified_ts = max_results.most_recent_ts";
private static final String LIST_RECENT_JOB_EXECUTION_QUERY_TEMPLATE =
"SELECT job_id FROM gobblin_job_executions%s";
private static final String JOB_NAME_QUERY_BY_TABLE_STATEMENT_TEMPLATE =
"SELECT j.job_name FROM gobblin_job_executions j, gobblin_task_executions t "
+ "WHERE j.job_id=t.job_id AND %s%s GROUP BY j.job_name";
private static final String JOB_ID_QUERY_BY_JOB_NAME_STATEMENT_TEMPLATE =
"SELECT job_id FROM gobblin_job_executions WHERE job_name=?";
private static final String JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_job_executions WHERE job_id IN (%s)";
private static final String TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE =
"SELECT * FROM gobblin_task_executions WHERE job_id IN (%s)";
private static final String JOB_METRIC_QUERY_STATEMENT_TEMPLATE =
"SELECT job_id,metric_group,metric_name,metric_type,metric_value FROM gobblin_job_metrics WHERE job_id IN (%s)";
private static final String TASK_METRIC_QUERY_STATEMENT_TEMPLATE =
"SELECT job_id,m.task_id,metric_group,metric_name,metric_type,metric_value FROM gobblin_task_metrics m JOIN gobblin_task_executions t ON t.task_id = m.task_id WHERE job_id IN (%s)";
private static final String JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE =
"SELECT job_id,property_key,property_value FROM gobblin_job_properties WHERE job_id IN (%s)";
private static final String TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE =
"SELECT job_id,p.task_id,property_key,property_value FROM gobblin_task_properties p JOIN gobblin_task_executions t ON t.task_id = p.task_id WHERE job_id IN (%s)";
private static final String FILTER_JOBS_WITH_TASKS = "(`state` != 'COMMITTED' OR launched_tasks > 0)";
private DataSource dataSource;
@Override
public void init(DataSource dataSource) {
this.dataSource = dataSource;
}
@Override
public synchronized void put(JobExecutionInfo jobExecutionInfo)
throws IOException {
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
connection.setAutoCommit(false);
// Insert or update job execution information
upsertJobExecutionInfo(connection, jobExecutionInfo);
upsertJobMetrics(connection, jobExecutionInfo);
upsertJobProperties(connection, jobExecutionInfo);
// Insert or update task execution information
if (jobExecutionInfo.hasTaskExecutions()) {
upsertTaskExecutionInfos(connection, jobExecutionInfo.getTaskExecutions());
upsertTaskMetrics(connection, jobExecutionInfo.getTaskExecutions());
Optional<StringMap> jobProperties = Optional.absent();
if (jobExecutionInfo.hasJobProperties()) {
jobProperties = Optional.of(jobExecutionInfo.getJobProperties());
}
upsertTaskProperties(connection, jobProperties, jobExecutionInfo.getTaskExecutions());
}
connection.commit();
} catch (SQLException se) {
LOGGER.error("Failed to put a new job execution information record", se);
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().rollback();
} catch (SQLException se1) {
LOGGER.error("Failed to rollback", se1);
}
}
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
@Override
public synchronized List<JobExecutionInfo> get(JobExecutionQuery query)
throws IOException {
Preconditions.checkArgument(query.hasId() && query.hasIdType());
Optional<Connection> connectionOptional = Optional.absent();
try {
connectionOptional = Optional.of(getConnection());
Connection connection = connectionOptional.get();
switch (query.getIdType()) {
case JOB_ID:
return processQueryByIds(connection, query, Filter.MISSING, Lists.newArrayList(query.getId().getString()));
case JOB_NAME:
return processQueryByJobName(connection, query.getId().getString(), query, Filter.MISSING);
case TABLE:
return processQueryByTable(connection, query);
case LIST_TYPE:
return processListQuery(connection, query);
default:
throw new IOException("Unsupported query ID type: " + query.getIdType().name());
}
} catch (SQLException se) {
LOGGER.error("Failed to execute query: " + query, se);
throw new IOException(se);
} finally {
if (connectionOptional.isPresent()) {
try {
connectionOptional.get().close();
} catch (SQLException se) {
LOGGER.error("Failed to close connection", se);
}
}
}
}
@Override
public void close()
throws IOException {
// Nothing to do
}
private Connection getConnection()
throws SQLException {
return this.dataSource.getConnection();
}
protected String getLauncherType(JobExecutionInfo info) {
if (info.hasLauncherType()) {
if (info.getLauncherType() == LauncherTypeEnum.CLUSTER) {
return LauncherTypeEnum.YARN.name();
}
return info.getLauncherType().name();
}
return null;
}
private void upsertJobExecutionInfo(Connection connection, JobExecutionInfo info)
throws SQLException {
Preconditions.checkArgument(info.hasJobName());
Preconditions.checkArgument(info.hasJobId());
try (PreparedStatement upsertStatement = connection.prepareStatement(JOB_EXECUTION_UPSERT_STATEMENT_TEMPLATE)) {
int index = 0;
upsertStatement.setString(++index, info.getJobName());
upsertStatement.setString(++index, info.getJobId());
upsertStatement.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : null,
getCalendarUTCInstance());
upsertStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : null,
getCalendarUTCInstance());
upsertStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
upsertStatement.setString(++index, info.hasState() ? info.getState().name() : null);
upsertStatement.setInt(++index, info.hasLaunchedTasks() ? info.getLaunchedTasks() : -1);
upsertStatement.setInt(++index, info.hasCompletedTasks() ? info.getCompletedTasks() : -1);
upsertStatement.setString(++index, getLauncherType(info));
upsertStatement.setString(++index, info.hasTrackingUrl() ? info.getTrackingUrl() : null);
upsertStatement.executeUpdate();
}
}
private void upsertTaskExecutionInfos(Connection connection, TaskExecutionInfoArray taskExecutions)
throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_EXECUTION_UPSERT_STATEMENT_TEMPLATE));
}
addTaskExecutionInfoToBatch(upsertStatement.get(), taskExecution);
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
executeBatches(upsertStatement);
}
private void upsertJobProperties(Connection connection, JobExecutionInfo jobExecutionInfo) throws SQLException {
if (jobExecutionInfo.hasJobProperties()) {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (Map.Entry<String, String> property : jobExecutionInfo.getJobProperties().entrySet()) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(JOB_PROPERTY_UPSERT_STATEMENT_TEMPLATE));
}
addPropertyToBatch(upsertStatement.get(), property.getKey(), property.getValue(), jobExecutionInfo.getJobId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
executeBatches(upsertStatement);
}
}
private void upsertTaskProperties(Connection connection, Optional<StringMap> jobProperties,
TaskExecutionInfoArray taskExecutions)
throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (taskExecution.hasTaskProperties()) {
for (Map.Entry<String, String> property : taskExecution.getTaskProperties().entrySet()) {
if (!jobProperties.isPresent() || !jobProperties.get().containsKey(property.getKey()) ||
!jobProperties.get().get(property.getKey()).equals(property.getValue())) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_PROPERTY_UPSERT_STATEMENT_TEMPLATE));
}
addPropertyToBatch(upsertStatement.get(), property.getKey(), property.getValue(), taskExecution.getTaskId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
}
}
}
executeBatches(upsertStatement);
}
private void addPropertyToBatch(PreparedStatement upsertStatement, String key, String value, String id)
throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(!Strings.isNullOrEmpty(key));
Preconditions.checkArgument(!Strings.isNullOrEmpty(value));
int index = 0;
upsertStatement.setString(++index, id);
upsertStatement.setString(++index, key);
upsertStatement.setString(++index, value);
upsertStatement.addBatch();
}
private void upsertJobMetrics(Connection connection, JobExecutionInfo jobExecutionInfo) throws SQLException {
if (jobExecutionInfo.hasMetrics()) {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (Metric metric : jobExecutionInfo.getMetrics()) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(JOB_METRIC_UPSERT_STATEMENT_TEMPLATE));
}
addMetricToBatch(upsertStatement.get(), metric, jobExecutionInfo.getJobId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
executeBatches(upsertStatement);
}
}
private void upsertTaskMetrics(Connection connection, TaskExecutionInfoArray taskExecutions)
throws SQLException {
Optional<PreparedStatement> upsertStatement = Optional.absent();
int batchSize = 0;
for (TaskExecutionInfo taskExecution : taskExecutions) {
if (taskExecution.hasMetrics()) {
for (Metric metric : taskExecution.getMetrics()) {
if (!upsertStatement.isPresent()) {
upsertStatement = Optional.of(connection.prepareStatement(TASK_METRIC_UPSERT_STATEMENT_TEMPLATE));
}
addMetricToBatch(upsertStatement.get(), metric, taskExecution.getTaskId());
if (batchSize++ > 1000) {
executeBatches(upsertStatement);
upsertStatement = Optional.absent();
batchSize = 0;
}
}
}
}
executeBatches(upsertStatement);
}
private void executeBatches(Optional<PreparedStatement> upsertStatement) throws SQLException {
if (upsertStatement.isPresent()) {
try {
upsertStatement.get().executeBatch();
} finally {
upsertStatement.get().close();
}
}
}
private void addMetricToBatch(PreparedStatement upsertStatement, Metric metric, String id) throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(id));
Preconditions.checkArgument(metric.hasGroup());
Preconditions.checkArgument(metric.hasName());
Preconditions.checkArgument(metric.hasType());
Preconditions.checkArgument(metric.hasValue());
int index = 0;
upsertStatement.setString(++index, id);
upsertStatement.setString(++index, metric.getGroup());
upsertStatement.setString(++index, metric.getName());
upsertStatement.setString(++index, metric.getType().name());
upsertStatement.setString(++index, metric.getValue());
upsertStatement.addBatch();
}
private void addTaskExecutionInfoToBatch(PreparedStatement upsertStatement, TaskExecutionInfo info)
throws SQLException {
Preconditions.checkArgument(info.hasTaskId());
Preconditions.checkArgument(info.hasJobId());
int index = 0;
upsertStatement.setString(++index, info.getTaskId());
upsertStatement.setString(++index, info.getJobId());
upsertStatement.setTimestamp(++index, info.hasStartTime() ? new Timestamp(info.getStartTime()) : null,
getCalendarUTCInstance());
upsertStatement.setTimestamp(++index, info.hasEndTime() ? new Timestamp(info.getEndTime()) : null,
getCalendarUTCInstance());
upsertStatement.setLong(++index, info.hasDuration() ? info.getDuration() : -1);
upsertStatement.setString(++index, info.hasState() ? info.getState().name() : null);
upsertStatement.setString(++index, info.hasFailureException() ? info.getFailureException() : null);
upsertStatement.setLong(++index, info.hasLowWatermark() ? info.getLowWatermark() : -1);
upsertStatement.setLong(++index, info.hasHighWatermark() ? info.getHighWatermark() : -1);
upsertStatement.setString(++index,
info.hasTable() && info.getTable().hasNamespace() ? info.getTable().getNamespace() : null);
upsertStatement.setString(++index, info.hasTable() && info.getTable().hasName() ? info.getTable().getName() : null);
upsertStatement.setString(++index,
info.hasTable() && info.getTable().hasType() ? info.getTable().getType().name() : null);
upsertStatement.addBatch();
}
private List<JobExecutionInfo> processQueryByIds(Connection connection, JobExecutionQuery query,
Filter tableFilter, List<String> jobIds)
throws SQLException {
Map<String, JobExecutionInfo> jobExecutionInfos = getJobExecutionInfos(connection, jobIds);
addMetricsToJobExecutions(connection, query, jobExecutionInfos);
addPropertiesToJobExecutions(connection, query, jobExecutionInfos);
addTasksToJobExecutions(connection, query, tableFilter, jobExecutionInfos);
return ImmutableList.copyOf(jobExecutionInfos.values());
}
private Map<String, JobExecutionInfo> getJobExecutionInfos(Connection connection, List<String> jobIds)
throws SQLException {
Map<String, JobExecutionInfo> jobExecutionInfos = Maps.newLinkedHashMap();
if (jobIds != null && jobIds.size() > 0) {
String template = String.format(JOB_EXECUTION_QUERY_BY_JOB_ID_STATEMENT_TEMPLATE, getInPredicate(jobIds.size()));
int index = 1;
try (PreparedStatement jobExecutionQueryStatement = connection.prepareStatement(template)) {
for (String jobId : jobIds) {
jobExecutionQueryStatement.setString(index++, jobId);
}
try (ResultSet jobRs = jobExecutionQueryStatement.executeQuery()) {
while (jobRs.next()) {
JobExecutionInfo jobExecutionInfo = resultSetToJobExecutionInfo(jobRs);
jobExecutionInfos.put(jobExecutionInfo.getJobId(), jobExecutionInfo);
}
}
}
}
return jobExecutionInfos;
}
private void addMetricsToJobExecutions(Connection connection, JobExecutionQuery query,
Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
if (query.isIncludeJobMetrics() && jobExecutionInfos.size() > 0) {
String template = String.format(JOB_METRIC_QUERY_STATEMENT_TEMPLATE, getInPredicate(jobExecutionInfos.size()));
int index = 1;
try (PreparedStatement jobMetricQueryStatement = connection.prepareStatement(template)) {
for (String jobId : jobExecutionInfos.keySet()) {
jobMetricQueryStatement.setString(index++, jobId);
}
try (ResultSet jobMetricRs = jobMetricQueryStatement.executeQuery()) {
while (jobMetricRs.next()) {
String jobId = jobMetricRs.getString("job_id");
JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(jobId);
MetricArray metricArray = jobExecutionInfo.getMetrics(GetMode.NULL);
if (metricArray == null) {
metricArray = new MetricArray();
jobExecutionInfo.setMetrics(metricArray);
}
metricArray.add(resultSetToMetric(jobMetricRs));
}
}
}
}
}
private void addPropertiesToJobExecutions(Connection connection, JobExecutionQuery query,
Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
if (jobExecutionInfos.size() > 0) {
Set<String> propertyKeys = null;
if (query.hasJobProperties()) {
propertyKeys = Sets.newHashSet(Iterables.filter(Arrays.asList(query.getJobProperties().split(",")),
new Predicate<String>() {
@Override
public boolean apply(String input) {
return !Strings.isNullOrEmpty(input);
}
}));
}
if (propertyKeys == null || propertyKeys.size() > 0) {
String template = String.format(JOB_PROPERTY_QUERY_STATEMENT_TEMPLATE,
getInPredicate(jobExecutionInfos.size()));
if (propertyKeys != null && propertyKeys.size() > 0) {
template += String.format(" AND property_key IN (%s)", getInPredicate(propertyKeys.size()));
}
int index = 1;
try (PreparedStatement jobPropertiesQueryStatement = connection.prepareStatement(template)) {
for (String jobId : jobExecutionInfos.keySet()) {
jobPropertiesQueryStatement.setString(index++, jobId);
}
if (propertyKeys != null && propertyKeys.size() > 0) {
for (String propertyKey : propertyKeys) {
jobPropertiesQueryStatement.setString(index++, propertyKey);
}
}
try (ResultSet jobPropertiesRs = jobPropertiesQueryStatement.executeQuery()) {
while (jobPropertiesRs.next()) {
String jobId = jobPropertiesRs.getString("job_id");
JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(jobId);
StringMap jobProperties = jobExecutionInfo.getJobProperties(GetMode.NULL);
if (jobProperties == null) {
jobProperties = new StringMap(Maps.<String, String>newHashMap());
jobExecutionInfo.setJobProperties(jobProperties);
}
Map.Entry<String, String> property = resultSetToProperty(jobPropertiesRs);
if (propertyKeys == null || propertyKeys.contains(property.getKey())) {
jobProperties.put(property.getKey(), property.getValue());
}
}
}
}
}
}
}
private void addTasksToJobExecutions(Connection connection, JobExecutionQuery query, Filter tableFilter,
Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
Map<String, Map<String, TaskExecutionInfo>> tasksExecutions =
getTasksForJobExecutions(connection, query, tableFilter, jobExecutionInfos);
addMetricsToTasks(connection, query, tableFilter, tasksExecutions);
addPropertiesToTasks(connection, query, tableFilter, tasksExecutions);
for (Map.Entry<String, Map<String, TaskExecutionInfo>> taskExecution : tasksExecutions.entrySet()) {
JobExecutionInfo jobExecutionInfo = jobExecutionInfos.get(taskExecution.getKey());
TaskExecutionInfoArray taskExecutionInfos = new TaskExecutionInfoArray();
for (TaskExecutionInfo taskExecutionInfo : taskExecution.getValue().values()) {
taskExecutionInfos.add(taskExecutionInfo);
}
jobExecutionInfo.setTaskExecutions(taskExecutionInfos);
}
}
private Map<String, Map<String, TaskExecutionInfo>> getTasksForJobExecutions(Connection connection,
JobExecutionQuery query, Filter tableFilter,
Map<String, JobExecutionInfo> jobExecutionInfos) throws SQLException {
Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos = Maps.newLinkedHashMap();
if (query.isIncludeTaskExecutions() && jobExecutionInfos.size() > 0) {
String template = String.format(TASK_EXECUTION_QUERY_STATEMENT_TEMPLATE,
getInPredicate(jobExecutionInfos.size()));
if (tableFilter.isPresent()) {
template += " AND " + tableFilter;
}
int index = 1;
try (PreparedStatement taskExecutionQueryStatement = connection.prepareStatement(template)) {
for (String jobId : jobExecutionInfos.keySet()) {
taskExecutionQueryStatement.setString(index++, jobId);
}
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskExecutionQueryStatement, index);
}
try (ResultSet taskRs = taskExecutionQueryStatement.executeQuery()) {
while (taskRs.next()) {
TaskExecutionInfo taskExecutionInfo = resultSetToTaskExecutionInfo(taskRs);
if (!taskExecutionInfos.containsKey(taskExecutionInfo.getJobId())) {
taskExecutionInfos.put(taskExecutionInfo.getJobId(), Maps.<String, TaskExecutionInfo>newLinkedHashMap());
}
taskExecutionInfos.get(taskExecutionInfo.getJobId()).put(taskExecutionInfo.getTaskId(), taskExecutionInfo);
}
}
}
}
return taskExecutionInfos;
}
private void addMetricsToTasks(Connection connection, JobExecutionQuery query, Filter tableFilter,
Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos) throws SQLException {
if (query.isIncludeTaskMetrics() && taskExecutionInfos.size() > 0) {
int index = 1;
String template = String.format(TASK_METRIC_QUERY_STATEMENT_TEMPLATE, getInPredicate(taskExecutionInfos.size()));
if (tableFilter.isPresent()) {
template += " AND t." + tableFilter;
}
try (PreparedStatement taskMetricQueryStatement = connection.prepareStatement(template)) {
for (String jobId : taskExecutionInfos.keySet()) {
taskMetricQueryStatement.setString(index++, jobId);
}
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskMetricQueryStatement, index);
}
try (ResultSet taskMetricRs = taskMetricQueryStatement.executeQuery()) {
while (taskMetricRs.next()) {
String jobId = taskMetricRs.getString("job_id");
String taskId = taskMetricRs.getString("task_id");
TaskExecutionInfo taskExecutionInfo = taskExecutionInfos.get(jobId).get(taskId);
MetricArray metricsArray = taskExecutionInfo.getMetrics(GetMode.NULL);
if (metricsArray == null) {
metricsArray = new MetricArray();
taskExecutionInfo.setMetrics(metricsArray);
}
metricsArray.add(resultSetToMetric(taskMetricRs));
}
}
}
}
}
private void addPropertiesToTasks(Connection connection, JobExecutionQuery query, Filter tableFilter,
Map<String, Map<String, TaskExecutionInfo>> taskExecutionInfos)
throws SQLException {
if (taskExecutionInfos.size() > 0) {
Set<String> propertyKeys = null;
if (query.hasTaskProperties()) {
propertyKeys = Sets.newHashSet(Iterables.filter(Arrays.asList(query.getTaskProperties().split(",")),
new Predicate<String>() {
@Override
public boolean apply(String input) {
return !Strings.isNullOrEmpty(input);
}
}));
}
if (propertyKeys == null || propertyKeys.size() > 0) {
String template = String.format(TASK_PROPERTY_QUERY_STATEMENT_TEMPLATE,
getInPredicate(taskExecutionInfos.size()));
if (propertyKeys != null && propertyKeys.size() > 0) {
template += String.format("AND property_key IN (%s)", getInPredicate(propertyKeys.size()));
}
if (tableFilter.isPresent()) {
template += " AND t." + tableFilter;
}
int index = 1;
try (PreparedStatement taskPropertiesQueryStatement = connection.prepareStatement(template)) {
for (String jobId : taskExecutionInfos.keySet()) {
taskPropertiesQueryStatement.setString(index++, jobId);
}
if (propertyKeys != null && propertyKeys.size() > 0) {
for (String propertyKey : propertyKeys) {
taskPropertiesQueryStatement.setString(index++, propertyKey);
}
}
if (tableFilter.isPresent()) {
tableFilter.addParameters(taskPropertiesQueryStatement, index);
}
try (ResultSet taskPropertiesRs = taskPropertiesQueryStatement.executeQuery()) {
while (taskPropertiesRs.next()) {
String jobId = taskPropertiesRs.getString("job_id");
String taskId = taskPropertiesRs.getString("task_id");
TaskExecutionInfo taskExecutionInfo = taskExecutionInfos.get(jobId).get(taskId);
StringMap taskProperties = taskExecutionInfo.getTaskProperties(GetMode.NULL);
if (taskProperties == null) {
taskProperties = new StringMap();
taskExecutionInfo.setTaskProperties(taskProperties);
}
Map.Entry<String, String> property = resultSetToProperty(taskPropertiesRs);
if (propertyKeys == null || propertyKeys.contains(property.getKey())) {
taskProperties.put(property.getKey(), property.getValue());
}
}
}
}
}
}
}
private List<JobExecutionInfo> processQueryByJobName(Connection connection, String jobName, JobExecutionQuery query,
Filter tableFilter)
throws SQLException {
Preconditions.checkArgument(!Strings.isNullOrEmpty(jobName));
// Construct the query for job IDs by a given job name
Filter filter = Filter.MISSING;
String jobIdByNameQuery = JOB_ID_QUERY_BY_JOB_NAME_STATEMENT_TEMPLATE;
if (query.hasTimeRange()) {
// Add time range filter if applicable
try {
filter = constructTimeRangeFilter(query.getTimeRange());
if (filter.isPresent()) {
jobIdByNameQuery += " AND " + filter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
if (!query.isIncludeJobsWithoutTasks()) {
jobIdByNameQuery += " AND " + FILTER_JOBS_WITH_TASKS;
}
// Add ORDER BY
jobIdByNameQuery += " ORDER BY created_ts DESC";
// Query job IDs by the given job name
List<String> jobIds = Lists.newArrayList();
try (PreparedStatement queryStatement = connection.prepareStatement(jobIdByNameQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
queryStatement.setString(1, jobName);
if (filter.isPresent()) {
filter.addParameters(queryStatement, 2);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobIds.add(rs.getString(1));
}
}
}
return processQueryByIds(connection, query, tableFilter, jobIds);
}
private List<JobExecutionInfo> processQueryByTable(Connection connection, JobExecutionQuery query)
throws SQLException {
Preconditions.checkArgument(query.getId().isTable());
Filter tableFilter = constructTableFilter(query.getId().getTable());
String jobsWithoutTaskFilter = "";
if (!query.isIncludeJobsWithoutTasks()) {
jobsWithoutTaskFilter = " AND " + FILTER_JOBS_WITH_TASKS;
}
// Construct the query for job names by table definition
String jobNameByTableQuery = String.format(JOB_NAME_QUERY_BY_TABLE_STATEMENT_TEMPLATE, tableFilter.getFilter(),
jobsWithoutTaskFilter);
List<JobExecutionInfo> jobExecutionInfos = Lists.newArrayList();
// Query job names by table definition
try (PreparedStatement queryStatement = connection.prepareStatement(jobNameByTableQuery)) {
if (tableFilter.isPresent()) {
tableFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
while (rs.next()) {
jobExecutionInfos.addAll(processQueryByJobName(connection, rs.getString(1), query, tableFilter));
}
}
return jobExecutionInfos;
}
}
private List<JobExecutionInfo> processListQuery(Connection connection, JobExecutionQuery query)
throws SQLException {
Preconditions.checkArgument(query.getId().isQueryListType());
Filter timeRangeFilter = Filter.MISSING;
QueryListType queryType = query.getId().getQueryListType();
String listJobExecutionsQuery;
if (queryType == QueryListType.DISTINCT) {
listJobExecutionsQuery = LIST_DISTINCT_JOB_EXECUTION_QUERY_TEMPLATE;
if (query.hasTimeRange()) {
try {
timeRangeFilter = constructTimeRangeFilter(query.getTimeRange());
if (timeRangeFilter.isPresent()) {
listJobExecutionsQuery += " AND " + timeRangeFilter;
}
} catch (ParseException pe) {
LOGGER.error("Failed to parse the query time range", pe);
throw new SQLException(pe);
}
}
} else {
listJobExecutionsQuery = LIST_RECENT_JOB_EXECUTION_QUERY_TEMPLATE;
}
String jobsWithoutTaskFilter = "";
if (!query.isIncludeJobsWithoutTasks()) {
jobsWithoutTaskFilter = " WHERE " + FILTER_JOBS_WITH_TASKS;
}
listJobExecutionsQuery = String.format(listJobExecutionsQuery, jobsWithoutTaskFilter);
listJobExecutionsQuery += " ORDER BY last_modified_ts DESC";
try (PreparedStatement queryStatement = connection.prepareStatement(listJobExecutionsQuery)) {
int limit = query.getLimit();
if (limit > 0) {
queryStatement.setMaxRows(limit);
}
if (timeRangeFilter.isPresent()) {
timeRangeFilter.addParameters(queryStatement, 1);
}
try (ResultSet rs = queryStatement.executeQuery()) {
List<String> jobIds = Lists.newArrayList();
while (rs.next()) {
jobIds.add(rs.getString(1));
}
return processQueryByIds(connection, query, Filter.MISSING, jobIds);
}
}
}
private JobExecutionInfo resultSetToJobExecutionInfo(ResultSet rs)
throws SQLException {
JobExecutionInfo jobExecutionInfo = new JobExecutionInfo();
jobExecutionInfo.setJobName(rs.getString("job_name"));
jobExecutionInfo.setJobId(rs.getString("job_id"));
try {
Timestamp startTime = rs.getTimestamp("start_time");
if (startTime != null) {
jobExecutionInfo.setStartTime(startTime.getTime());
}
} catch (SQLException se) {
jobExecutionInfo.setStartTime(0);
}
try {
Timestamp endTime = rs.getTimestamp("end_time");
if (endTime != null) {
jobExecutionInfo.setEndTime(endTime.getTime());
}
} catch (SQLException se) {
jobExecutionInfo.setEndTime(0);
}
jobExecutionInfo.setDuration(rs.getLong("duration"));
String state = rs.getString("state");
if (!Strings.isNullOrEmpty(state)) {
jobExecutionInfo.setState(JobStateEnum.valueOf(state));
}
jobExecutionInfo.setLaunchedTasks(rs.getInt("launched_tasks"));
jobExecutionInfo.setCompletedTasks(rs.getInt("completed_tasks"));
String launcherType = rs.getString("launcher_type");
if (!Strings.isNullOrEmpty(launcherType)) {
jobExecutionInfo.setLauncherType(LauncherTypeEnum.valueOf(launcherType));
}
String trackingUrl = rs.getString("tracking_url");
if (!Strings.isNullOrEmpty(trackingUrl)) {
jobExecutionInfo.setTrackingUrl(trackingUrl);
}
return jobExecutionInfo;
}
private TaskExecutionInfo resultSetToTaskExecutionInfo(ResultSet rs)
throws SQLException {
TaskExecutionInfo taskExecutionInfo = new TaskExecutionInfo();
taskExecutionInfo.setTaskId(rs.getString("task_id"));
taskExecutionInfo.setJobId(rs.getString("job_id"));
try {
Timestamp startTime = rs.getTimestamp("start_time");
if (startTime != null) {
taskExecutionInfo.setStartTime(startTime.getTime());
}
} catch (SQLException se) {
taskExecutionInfo.setStartTime(0);
}
try {
Timestamp endTime = rs.getTimestamp("end_time");
if (endTime != null) {
taskExecutionInfo.setEndTime(endTime.getTime());
}
} catch (SQLException se) {
taskExecutionInfo.setEndTime(0);
}
taskExecutionInfo.setDuration(rs.getLong("duration"));
String state = rs.getString("state");
if (!Strings.isNullOrEmpty(state)) {
taskExecutionInfo.setState(TaskStateEnum.valueOf(state));
}
String failureException = rs.getString("failure_exception");
if (!Strings.isNullOrEmpty(failureException)) {
taskExecutionInfo.setFailureException(failureException);
}
taskExecutionInfo.setLowWatermark(rs.getLong("low_watermark"));
taskExecutionInfo.setHighWatermark(rs.getLong("high_watermark"));
Table table = new Table();
String namespace = rs.getString("table_namespace");
if (!Strings.isNullOrEmpty(namespace)) {
table.setNamespace(namespace);
}
String name = rs.getString("table_name");
if (!Strings.isNullOrEmpty(name)) {
table.setName(name);
}
String type = rs.getString("table_type");
if (!Strings.isNullOrEmpty(type)) {
table.setType(TableTypeEnum.valueOf(type));
}
taskExecutionInfo.setTable(table);
return taskExecutionInfo;
}
private Metric resultSetToMetric(ResultSet rs)
throws SQLException {
Metric metric = new Metric();
metric.setGroup(rs.getString("metric_group"));
metric.setName(rs.getString("metric_name"));
metric.setType(MetricTypeEnum.valueOf(rs.getString("metric_type")));
metric.setValue(rs.getString("metric_value"));
return metric;
}
private AbstractMap.SimpleEntry<String, String> resultSetToProperty(ResultSet rs)
throws SQLException {
return new AbstractMap.SimpleEntry<>(rs.getString("property_key"), rs.getString("property_value"));
}
private Filter constructTimeRangeFilter(TimeRange timeRange)
throws ParseException {
List<String> values = Lists.newArrayList();
StringBuilder sb = new StringBuilder();
if (!timeRange.hasTimeFormat()) {
LOGGER.warn("Skipping the time range filter as there is no time format in: " + timeRange);
return Filter.MISSING;
}
DateFormat dateFormat = new SimpleDateFormat(timeRange.getTimeFormat());
boolean hasStartTime = timeRange.hasStartTime();
if (hasStartTime) {
sb.append("start_time>?");
values.add(new Timestamp(dateFormat.parse(timeRange.getStartTime()).getTime()).toString());
}
if (timeRange.hasEndTime()) {
if (hasStartTime) {
sb.append(" AND ");
}
sb.append("end_time<?");
values.add(new Timestamp(dateFormat.parse(timeRange.getEndTime()).getTime()).toString());
}
if (sb.length() > 0) {
return new Filter(sb.toString(), values);
}
return Filter.MISSING;
}
private Filter constructTableFilter(Table table) {
List<String> values = Lists.newArrayList();
StringBuilder sb = new StringBuilder();
boolean hasNamespace = table.hasNamespace();
if (hasNamespace) {
sb.append("table_namespace=?");
values.add(table.getNamespace());
}
boolean hasName = table.hasName();
if (hasName) {
if (hasNamespace) {
sb.append(" AND ");
}
sb.append("table_name=?");
values.add(table.getName());
}
if (table.hasType()) {
if (hasName) {
sb.append(" AND ");
}
sb.append("table_type=?");
values.add(table.getType().name());
}
if (sb.length() > 0) {
return new Filter(sb.toString(), values);
}
return Filter.MISSING;
}
private static Calendar getCalendarUTCInstance() {
return Calendar.getInstance(TimeZone.getTimeZone("UTC"));
}
private static String getInPredicate(int count) {
return StringUtils.join(Iterables.limit(Iterables.cycle("?"), count).iterator(), ",");
}
}
| 4,346 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/database/Filter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.database;
import com.google.common.base.Preconditions;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import lombok.EqualsAndHashCode;
import lombok.Getter;
@EqualsAndHashCode
class Filter {
public static final Filter MISSING = new Filter();
@Getter
private final String filter;
@Getter
private final List<String> values;
private Filter() {
this.filter = null;
this.values = null;
}
public Filter(String filter, List<String> values) {
Preconditions.checkNotNull(filter);
Preconditions.checkNotNull(values);
this.filter = filter;
this.values = values;
}
@Override
public String toString() {
return filter;
}
public int addParameters(PreparedStatement statement, int index) throws SQLException {
if (values != null) {
for (String value : values) {
statement.setString(index++, value);
}
}
return index;
}
public boolean isPresent() {
return this != MISSING;
}
}
| 4,347 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/util/StateStoreCleaner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.util;
import java.io.Closeable;
import java.io.FileInputStream;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metastore.nameParser.GuidDatasetUrnStateStoreNameParser;
import org.apache.gobblin.util.ExecutorsUtils;
/**
* A utility class for cleaning up old state store files created by {@link org.apache.gobblin.metastore.FsStateStore}
* based on a configured retention.
* @deprecated Please use Gobblin-retention instead: http://gobblin.readthedocs.io/en/latest/data-management/Gobblin-Retention/.
*
* @author Yinan Li
*/
public class StateStoreCleaner implements Closeable {
private static final Logger LOGGER = LoggerFactory.getLogger(StateStoreCleaner.class);
private static final String STATE_STORE_CLEANER_RETENTION_KEY = "state.store.retention";
private static final String DEFAULT_STATE_STORE_CLEANER_RETENTION = "7";
private static final String STATE_STORE_CLEANER_RETENTION_TIMEUNIT_KEY = "state.store.retention.timeunit";
private static final String DEFAULT_STATE_STORE_CLEANER_RETENTION_TIMEUNIT = TimeUnit.DAYS.toString();
private static final String STATE_STORE_CLEANER_EXECUTOR_THREADS_KEY = "state.store.cleaner.executor.threads";
private static final String DEFAULT_STATE_STORE_CLEANER_EXECUTOR_THREADS = "50";
private final Path stateStoreRootDir;
private final long retention;
private final TimeUnit retentionTimeUnit;
private final ExecutorService cleanerRunnerExecutor;
private final FileSystem fs;
public StateStoreCleaner(Properties properties)
throws IOException {
Preconditions.checkArgument(properties.containsKey(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY),
"Missing configuration property for the state store root directory: "
+ ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY);
this.stateStoreRootDir = new Path(properties.getProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY));
this.retention = Long.parseLong(
properties.getProperty(STATE_STORE_CLEANER_RETENTION_KEY, DEFAULT_STATE_STORE_CLEANER_RETENTION));
this.retentionTimeUnit = TimeUnit.valueOf(properties
.getProperty(STATE_STORE_CLEANER_RETENTION_TIMEUNIT_KEY, DEFAULT_STATE_STORE_CLEANER_RETENTION_TIMEUNIT)
.toUpperCase());
this.cleanerRunnerExecutor = Executors.newFixedThreadPool(Integer.parseInt(properties
.getProperty(STATE_STORE_CLEANER_EXECUTOR_THREADS_KEY, DEFAULT_STATE_STORE_CLEANER_EXECUTOR_THREADS)),
ExecutorsUtils.newThreadFactory(Optional.of(LOGGER), Optional.of("StateStoreCleaner")));
URI fsUri =
URI.create(properties.getProperty(ConfigurationKeys.STATE_STORE_FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI));
this.fs = FileSystem.get(fsUri, new Configuration());
}
/**
* Run the cleaner.
* @throws ExecutionException
*/
public void run()
throws IOException, ExecutionException {
FileStatus[] stateStoreDirs = this.fs.listStatus(this.stateStoreRootDir);
if (stateStoreDirs == null || stateStoreDirs.length == 0) {
LOGGER.warn("The state store root directory does not exist or is empty");
return;
}
List<Future<?>> futures = Lists.newArrayList();
for (FileStatus stateStoreDir : stateStoreDirs) {
futures.add(this.cleanerRunnerExecutor
.submit(new CleanerRunner(this.fs, stateStoreDir.getPath(), this.retention, this.retentionTimeUnit)));
}
for (Future<?> future : futures) {
try {
future.get();
} catch (InterruptedException e) {
throw new ExecutionException("Thread interrupted", e);
}
}
ExecutorsUtils.shutdownExecutorService(cleanerRunnerExecutor, Optional.of(LOGGER), 60, TimeUnit.SECONDS);
}
@Override
public void close()
throws IOException {
this.cleanerRunnerExecutor.shutdown();
}
private static class StateStoreFileFilter implements PathFilter {
@Override
public boolean accept(Path path) {
String fileName = path.getName();
String extension = Files.getFileExtension(fileName);
return isStateMetaFile(fileName) || extension.equalsIgnoreCase("jst") || extension.equalsIgnoreCase("tst") ||
(extension.equalsIgnoreCase("gst"));
}
boolean isStateMetaFile(String fileName) {
return fileName.startsWith(GuidDatasetUrnStateStoreNameParser.StateStoreNameVersion.V1.getDatasetUrnNameMapFile())
&& !fileName.equals(GuidDatasetUrnStateStoreNameParser.StateStoreNameVersion.V1.getDatasetUrnNameMapFile());
}
}
private static class CleanerRunner implements Runnable {
private final FileSystem fs;
private final Path stateStoreDir;
private final long retention;
private final TimeUnit retentionTimeUnit;
CleanerRunner(FileSystem fs, Path stateStoreDir, long retention, TimeUnit retentionTimeUnit) {
this.fs = fs;
this.stateStoreDir = stateStoreDir;
this.retention = retention;
this.retentionTimeUnit = retentionTimeUnit;
}
@Override
public void run() {
try {
FileStatus[] stateStoreFiles = this.fs.listStatus(this.stateStoreDir, new StateStoreFileFilter());
if (stateStoreFiles == null || stateStoreFiles.length == 0) {
LOGGER.warn("No state store files found in directory: " + this.stateStoreDir);
return;
}
LOGGER.info("Cleaning up state store directory: " + this.stateStoreDir);
for (FileStatus file : stateStoreFiles) {
if (shouldCleanUp(file) && !this.fs.delete(file.getPath(), false)) {
LOGGER.error("Failed to delete state store file: " + file.getPath());
}
}
} catch (IOException ioe) {
LOGGER.error("Failed to run state store cleaner for directory: " + this.stateStoreDir, ioe);
}
}
private boolean shouldCleanUp(FileStatus file) {
DateTime now = new DateTime();
DateTime modificationDateTime = new DateTime(file.getModificationTime());
long retentionInMills = this.retentionTimeUnit.toMillis(this.retention);
return modificationDateTime.plus(retentionInMills).isBefore(now);
}
}
public static void main(String[] args)
throws IOException {
if (args.length != 1) {
System.err.println("Usage: " + StateStoreCleaner.class.getSimpleName() + " <configuration file>");
System.exit(1);
}
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.load(closer.register(new FileInputStream(args[0])));
closer.register(new StateStoreCleaner(properties)).run();
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
}
| 4,348 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/util/StateStoreCleanerRunnable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.util;
import java.io.IOException;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import com.google.common.io.Closer;
import com.typesafe.config.Config;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.util.ConfigUtils;
/**
* A utility class that wraps the {@link StateStoreCleaner} implementation as a {@link Runnable}.
*/
@Slf4j
@Deprecated
public class StateStoreCleanerRunnable implements Runnable {
private Properties properties;
public StateStoreCleanerRunnable(Config config) {
this.properties = ConfigUtils.configToProperties(config);
}
public void run() {
Closer closer = Closer.create();
try {
log.info("Attempting to clean state store..");
closer.register(new StateStoreCleaner(properties)).run();
log.info("State store clean up successful.");
} catch (IOException | ExecutionException e) {
log.error("Exception encountered during execution of {}", StateStoreCleaner.class.getName());
} finally {
try {
closer.close();
} catch (IOException e) {
log.error("Exception when closing the closer", e);
}
}
}
}
| 4,349 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/util/DatabaseJobHistoryStoreSchemaManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.util;
import java.io.Closeable;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.apache.commons.configuration.AbstractConfiguration;
import org.apache.commons.configuration.CompositeConfiguration;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.SystemConfiguration;
import org.apache.commons.lang.StringUtils;
import org.flywaydb.core.Flyway;
import org.flywaydb.core.api.FlywayException;
import org.flywaydb.core.api.MigrationInfoService;
import org.flywaydb.core.internal.info.MigrationInfoDumper;
import com.google.common.io.Closer;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.metastore.DatabaseJobHistoryStore;
import org.apache.gobblin.runtime.cli.CliApplication;
/**
* A utility class for managing the {@link DatabaseJobHistoryStore} schema.
*
* @author Joel Baranick
*/
@Slf4j
@Alias(value = "job-store-schema-manager", description = "Database job history store schema manager")
public class DatabaseJobHistoryStoreSchemaManager implements CliApplication, Closeable {
private final Flyway flyway;
private DatabaseJobHistoryStoreSchemaManager(Properties properties) {
flyway = Flyway.configure(this.getClass().getClassLoader()).configuration(properties).load();
}
public static DataSourceBuilder builder() {
return new Builder();
}
private static FinalBuilder builder(Properties properties) {
return new Builder(properties);
}
public void migrate() throws FlywayException {
flyway.migrate();
}
public void info() throws FlywayException {
MigrationInfoService info = flyway.info();
System.out.println(MigrationInfoDumper.dumpToAsciiTable(info.all()));
}
@Override
public void close() throws IOException {
}
@Override
public void run(String[] args) throws Exception {
if (args.length < 1 || args.length > 2) {
printUsage();
}
Closer closer = Closer.create();
try {
CompositeConfiguration config = new CompositeConfiguration();
config.addConfiguration(new SystemConfiguration());
if (args.length == 2) {
config.addConfiguration(new PropertiesConfiguration(args[1]));
}
Properties properties = getProperties(config);
DatabaseJobHistoryStoreSchemaManager schemaManager =
closer.register(DatabaseJobHistoryStoreSchemaManager.builder(properties).build());
if (String.CASE_INSENSITIVE_ORDER.compare("migrate", args[0]) == 0) {
schemaManager.migrate();
} else if (String.CASE_INSENSITIVE_ORDER.compare("info", args[0]) == 0) {
schemaManager.info();
} else {
printUsage();
}
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
private static void printUsage() {
System.err.println("Usage: migrate|info [configuration file]");
System.exit(1);
}
private static Properties getProperties(Configuration config) {
Properties props = new Properties();
char delimiter = (config instanceof AbstractConfiguration)
? ((AbstractConfiguration) config).getListDelimiter() : ',';
Iterator keys = config.getKeys();
while (keys.hasNext())
{
String key = (String) keys.next();
List list = config.getList(key);
props.setProperty("flyway." + key, StringUtils.join(list.iterator(), delimiter));
}
return props;
}
public interface DataSourceBuilder {
VersionBuilder setDataSource(String url, String user, String password);
}
public interface VersionBuilder extends FinalBuilder {
FinalBuilder setVersion(String version);
}
public interface FinalBuilder {
DatabaseJobHistoryStoreSchemaManager build();
}
private static class Builder implements DataSourceBuilder, VersionBuilder, FinalBuilder {
private final Properties properties;
public Builder() {
properties = new Properties();
}
public Builder(Properties properties) {
this.properties = properties;
}
@Override
public DatabaseJobHistoryStoreSchemaManager build() {
return new DatabaseJobHistoryStoreSchemaManager(properties);
}
@Override
public VersionBuilder setDataSource(String url, String user, String password) {
this.properties.setProperty("flyway.url", url);
this.properties.setProperty("flyway.user", user);
this.properties.setProperty("flyway.password", password);
return this;
}
@Override
public FinalBuilder setVersion(String version) {
if (!"latest".equalsIgnoreCase(version)) {
this.properties.setProperty("flyway.target", version);
}
return this;
}
}
}
| 4,350 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/util/MySqlJdbcUrl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.util;
import org.apache.http.client.utils.URIBuilder;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
public class MySqlJdbcUrl {
private static final String PREFIX = "jdbc:";
private final URIBuilder builder;
private MySqlJdbcUrl() {
builder = new URIBuilder();
builder.setScheme("mysql");
}
private MySqlJdbcUrl(String url) throws MalformedURLException, URISyntaxException {
if (!url.startsWith(PREFIX)) {
throw new MalformedURLException();
}
builder = new URIBuilder(url.substring(PREFIX.length()));
}
public static MySqlJdbcUrl create() {
return new MySqlJdbcUrl();
}
public static MySqlJdbcUrl parse(String url) throws MalformedURLException, URISyntaxException {
return new MySqlJdbcUrl(url);
}
public MySqlJdbcUrl setHost(String host) {
builder.setHost(host);
return this;
}
public MySqlJdbcUrl setPort(int port) {
builder.setPort(port);
return this;
}
public MySqlJdbcUrl setPath(String path) {
builder.setPath("/" + path);
return this;
}
public MySqlJdbcUrl setUser(String user) {
return setParameter("user", user);
}
public MySqlJdbcUrl setPassword(String password) {
return setParameter("password", password);
}
public MySqlJdbcUrl setParameter(String param, String value) {
builder.setParameter(param, value);
return this;
}
@Override
public String toString() {
try {
return PREFIX + builder.build().toString();
} catch (URISyntaxException e) {
return "";
}
}
}
| 4,351 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/nameParser/SimpleDatasetUrnStateStoreNameParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.nameParser;
import java.io.IOException;
import java.util.Collection;
/**
* Simple implementation of {@link DatasetUrnStateStoreNameParser}, in which datasetUrn is identical to dataset state store name.
*/
public class SimpleDatasetUrnStateStoreNameParser implements DatasetUrnStateStoreNameParser {
@Override
public String getStateStoreNameFromDatasetUrn(String datasetUrn)
throws IOException {
return datasetUrn;
}
@Override
public String getDatasetUrnFromStateStoreName(String stateStoreName)
throws IOException {
return stateStoreName;
}
@Override
public void persistDatasetUrns(Collection<String> datasetUrns)
throws IOException {
//do nothing.
}
}
| 4,352 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/nameParser/GuidDatasetUrnStateStoreNameParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.nameParser;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.Collection;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.google.common.collect.Maps;
import com.google.common.io.LineReader;
import org.apache.gobblin.util.guid.Guid;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* Implements {@link DatasetUrnStateStoreNameParser} using {@link Guid}.
*/
public class GuidDatasetUrnStateStoreNameParser implements DatasetUrnStateStoreNameParser {
private static final String TMP_SUFFIX = "_tmp";
private final FileSystem fs;
private Path versionIdentifier;
@AllArgsConstructor
public enum StateStoreNameVersion {
/**
* DatasetUrn is directly used as the state store name.
* This is the initial status when {@link GuidDatasetUrnStateStoreNameParser} is first enabled.
* It will be migrated to {@link StateStoreNameVersion#V1}.
*/
V0(StringUtils.EMPTY),
/**
* DatasetUrn is hashed into shorten name.
*/
V1("datasetUrnNameMapV1");
@Getter
private String datasetUrnNameMapFile;
}
private final StateStoreNameVersion version;
@VisibleForTesting
protected final BiMap<String, String> sanitizedNameToDatasetURNMap;
public GuidDatasetUrnStateStoreNameParser(FileSystem fs, Path jobStatestoreRootDir)
throws IOException {
this.fs = fs;
this.sanitizedNameToDatasetURNMap = Maps.synchronizedBiMap(HashBiMap.<String, String>create());
this.versionIdentifier = new Path(jobStatestoreRootDir, StateStoreNameVersion.V1.getDatasetUrnNameMapFile());
if (this.fs.exists(versionIdentifier)) {
this.version = StateStoreNameVersion.V1;
try (InputStream in = this.fs.open(versionIdentifier)) {
LineReader lineReader = new LineReader(new InputStreamReader(in, Charsets.UTF_8));
String shortenName = lineReader.readLine();
while (shortenName != null) {
String datasetUrn = lineReader.readLine();
this.sanitizedNameToDatasetURNMap.put(shortenName, datasetUrn);
shortenName = lineReader.readLine();
}
}
} else {
this.version = StateStoreNameVersion.V0;
}
}
/**
* Get datasetUrn for the given state store name.
* If the state store name can be found in {@link #sanitizedNameToDatasetURNMap}, the original datasetUrn will be returned.
* Otherwise, the state store name will be returned.
*/
@Override
public String getDatasetUrnFromStateStoreName(String stateStoreName)
throws IOException {
if (version == StateStoreNameVersion.V0 || !this.sanitizedNameToDatasetURNMap.containsKey(stateStoreName)) {
return stateStoreName;
} else {
return this.sanitizedNameToDatasetURNMap.get(stateStoreName);
}
}
/**
* Merge the given {@link Collection} of datasetUrns with {@link #sanitizedNameToDatasetURNMap}, and write the results
* to the {@link Path} of {@link #versionIdentifier}, which is a text file.
*/
@Override
public void persistDatasetUrns(Collection<String> datasetUrns)
throws IOException {
for (String datasetUrn : datasetUrns) {
String key = Guid.fromStrings(datasetUrn).toString();
if (!this.sanitizedNameToDatasetURNMap.containsKey(key)) {
this.sanitizedNameToDatasetURNMap.put(key, datasetUrn);
} else if (!this.sanitizedNameToDatasetURNMap.get(key).equals(datasetUrn)) {
// This should not happen for datasetUrns since Guid is SHA1 based...
throw new RuntimeException(
"Found a collision for " + datasetUrn + " with existing: " + this.sanitizedNameToDatasetURNMap.get(key));
}
}
Path tmpMapFile = new Path(this.versionIdentifier.getParent(), this.versionIdentifier.getName() + TMP_SUFFIX);
try (FSDataOutputStream fsout = this.fs.create(tmpMapFile)) {
for (String key : this.sanitizedNameToDatasetURNMap.keySet()) {
fsout.write(key.getBytes(Charsets.UTF_8));
fsout.writeByte('\n');
fsout.write(this.sanitizedNameToDatasetURNMap.get(key).getBytes(Charsets.UTF_8));
fsout.writeByte('\n');
}
}
// Back up the previous file first, and then rename the new file.
if (this.fs.exists(this.versionIdentifier) && !this.fs.rename(this.versionIdentifier,
new Path(this.versionIdentifier.getParent(),
this.versionIdentifier.getName() + "_" + System.currentTimeMillis()))) {
throw new IOException(
"Failed to back up existing datasetUrn to stateStore name mapping file: " + this.versionIdentifier);
}
if (!fs.rename(tmpMapFile, this.versionIdentifier)) {
throw new IOException("Failed to rename from " + tmpMapFile + " to " + this.versionIdentifier);
}
}
@Override
public String getStateStoreNameFromDatasetUrn(String datasetUrn)
throws IOException {
if (!this.sanitizedNameToDatasetURNMap.inverse().containsKey(datasetUrn)) {
String guid = Guid.fromStrings(datasetUrn).toString();
this.sanitizedNameToDatasetURNMap.put(guid, datasetUrn);
}
return this.sanitizedNameToDatasetURNMap.inverse().get(datasetUrn);
}
}
| 4,353 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/nameParser/DatasetUrnStateStoreNameParser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.nameParser;
import java.io.IOException;
import java.util.Collection;
/**
* Parses datasetUrns to dataset state store name. This is useful for systems that have limit on the length of the state
* store name.
*/
public interface DatasetUrnStateStoreNameParser {
/**
* Get state store name from the given datasetUrn.
*/
String getStateStoreNameFromDatasetUrn(String datasetUrn)
throws IOException;
/**
* Get datasetUrn from the given state store name.
*/
String getDatasetUrnFromStateStoreName(String stateStoreName)
throws IOException;
/**
* Persist {@link Collection} of datasteUrns.
*/
void persistDatasetUrns(Collection<String> datasetUrns)
throws IOException;
}
| 4,354 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/metadata/StateStoreEntryManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.metadata;
import java.io.IOException;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.StateStore;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* Contains metadata about an entry in a {@link StateStore}.
* Exposes access to the {@link StateStore} that contains the entry.
* @param <T> type of {@link State} that can be read from this entry.
*/
@Data
@EqualsAndHashCode(exclude={"stateStore"})
public abstract class StateStoreEntryManager<T extends State> {
private final String storeName;
private final String tableName;
/** Timestamp at which the state was written. */
private final long timestamp;
/** {@link StateStore} where this entry exists. */
private final StateStore stateStore;
public final long getTimestamp() {
if (this.timestamp <= 0) {
throw new RuntimeException("Timestamp is not reliable.");
}
return this.timestamp;
}
/**
* @return The {@link State} contained in this entry. This operation should be lazy.
* @throws IOException
*/
public abstract T readState() throws IOException;
/**
* Delete this entry in the {@link StateStore}.
* @throws IOException
*/
public abstract void delete() throws IOException;
}
| 4,355 |
0 | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore | Create_ds/gobblin/gobblin-metastore/src/main/java/org/apache/gobblin/metastore/metadata/DatasetStateStoreEntryManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metastore.metadata;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metastore.DatasetStateStore;
import lombok.EqualsAndHashCode;
import lombok.Getter;
/**
* A {@link StateStoreEntryManager} in a {@link DatasetStateStore}.
*/
@Getter
@EqualsAndHashCode(exclude={"datasetStateStore"}, callSuper = true)
public abstract class DatasetStateStoreEntryManager<T extends State> extends StateStoreEntryManager<T> {
/**
* The sanitized dataset urn. Sanitization usually involves a one-way function on the dataset urn, so the actual
* urn cannot be determined except by {@link #readState()}.
*/
private final String sanitizedDatasetUrn;
/**
* An identifier for the state. Usually a job id or "current" for the latest state for that dataset.
*/
private final String stateId;
private final DatasetStateStore datasetStateStore;
public DatasetStateStoreEntryManager(String storeName, String tableName, long timestamp,
DatasetStateStore.TableNameParser tableNameParser, DatasetStateStore<T> datasetStateStore) {
this(storeName, tableName, timestamp, tableNameParser.getSanitizedDatasetUrn(), tableNameParser.getStateId(), datasetStateStore);
}
public DatasetStateStoreEntryManager(String storeName, String tableName, long timestamp, String sanitizedDatasetUrn,
String stateId, DatasetStateStore<T> datasetStateStore) {
super(storeName, tableName, timestamp, datasetStateStore);
this.sanitizedDatasetUrn = sanitizedDatasetUrn;
this.stateId = stateId;
this.datasetStateStore = datasetStateStore;
}
@Override
public DatasetStateStore getStateStore() {
return this.datasetStateStore;
}
}
| 4,356 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin/metrics/GobblinMetricsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.Properties;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.util.ConfigUtils;
@Test
public class GobblinMetricsTest {
/**
* Test the {@link GobblinMetrics} instance is removed from {@link GobblinMetricsRegistry} when
* it stops metrics reporting
*/
public void testStopReportingMetrics()
throws MultiReporterException {
String id = getClass().getSimpleName() + "-" + System.currentTimeMillis();
GobblinMetrics gobblinMetrics = GobblinMetrics.get(id);
Properties properties = new Properties();
properties.put(ConfigurationKeys.FAILURE_REPORTING_FILE_ENABLED_KEY, "false");
gobblinMetrics.startMetricReporting(properties);
Assert.assertEquals(GobblinMetricsRegistry.getInstance().get(id).get(), gobblinMetrics);
gobblinMetrics.stopMetricsReporting();
Assert.assertFalse(GobblinMetricsRegistry.getInstance().get(id).isPresent());
}
public void testMetricFileReporterThrowsException() {
String id = getClass().getSimpleName() + "-" + System.currentTimeMillis();
GobblinMetrics gobblinMetrics = GobblinMetrics.get(id);
//Enable file reporter without specifying metrics.log.dir.
Config config = ConfigFactory.empty()
.withValue(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY, ConfigValueFactory.fromAnyRef(true));
Properties properties = ConfigUtils.configToProperties(config);
//Ensure MultiReporterException is thrown
try {
gobblinMetrics.startMetricReporting(properties);
Assert.fail("Metric reporting unexpectedly succeeded.");
} catch (MultiReporterException e) {
//Do nothing. Expected to be here.
}
}
public void testMetricFileReporterSuccessful() {
String id = getClass().getSimpleName() + "-" + System.currentTimeMillis();
GobblinMetrics gobblinMetrics = GobblinMetrics.get(id);
//Enable file reporter without specifying metrics.log.dir.
Config config = ConfigFactory.empty()
.withValue(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY, ConfigValueFactory.fromAnyRef(true))
.withValue(ConfigurationKeys.METRICS_LOG_DIR_KEY, ConfigValueFactory.fromAnyRef("/tmp"))
.withValue(ConfigurationKeys.FAILURE_LOG_DIR_KEY, ConfigValueFactory.fromAnyRef("/tmp"));
Properties properties = ConfigUtils.configToProperties(config);
//Ensure MultiReporterException is thrown
try {
gobblinMetrics.startMetricReporting(properties);
} catch (MultiReporterException e) {
Assert.fail("Unexpected exception " + e.getMessage());
}
}
}
| 4,357 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin/metrics/reporter/ScheduledReporterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.reporter;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.typesafe.config.Config;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.context.ReportableContext;
import org.apache.gobblin.metrics.context.filter.ContextFilterFactory;
import org.apache.gobblin.metrics.test.ContextStoreReporter;
import org.apache.gobblin.util.ConfigUtils;
/**
* Test for {@link org.apache.gobblin.metrics.reporter.ScheduledReporter}
*/
public class ScheduledReporterTest {
@Test
public void testPeriodParser() {
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("1s"), 1);
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("2m"), 120);
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("3h"), 3 * 3600);
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("1m2s"), 62);
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("1h1s"), 3601);
Assert.assertEquals(ScheduledReporter.parsePeriodToSeconds("1h2m3s"), 3600 + 120 + 3);
try {
ScheduledReporter.parsePeriodToSeconds("1000000h");
Assert.fail();
} catch (RuntimeException re) {
// fail unless exception is thrown
}
}
@Test
public void testScheduledReporter() throws Exception {
long reportingIntervalMillis = 1000;
String context1Name = ScheduledReporterTest.class.getSimpleName() + "_1";
String context2Name = ScheduledReporterTest.class.getSimpleName() + "_2";
String context3Name = "SomeOtherName";
// Create a context name (to check that initialized reporter gets existing contexts correctly)
MetricContext context1 = MetricContext.builder(context1Name).build();
// Set up config for reporter
Properties props = new Properties();
ScheduledReporter.setReportingInterval(props, reportingIntervalMillis, TimeUnit.MILLISECONDS);
Config config = ConfigUtils.propertiesToConfig(props);
config = PrefixContextFilter.setPrefixString(config, ScheduledReporterTest.class.getSimpleName());
config = ContextFilterFactory.setContextFilterClass(config, PrefixContextFilter.class);
// Create reporter
ContextStoreReporter reporter = new ContextStoreReporter("testContext", config);
// Check that reporter correctly found created context
Set<String> contextNames = getContextNames(reporter);
Assert.assertEquals(contextNames.size(), 1);
Assert.assertTrue(contextNames.contains(context1Name));
// Create two more contexts
MetricContext context2 = context1.childBuilder(context2Name).build();
context1.childBuilder(context3Name).build();
// Check that reporter correctly found new reporter, but skipped the one that does not satisfy filter
contextNames = getContextNames(reporter);
Assert.assertEquals(contextNames.size(), 2);
Assert.assertTrue(contextNames.contains(context1Name));
Assert.assertTrue(contextNames.contains(context2Name));
// Check that nothing has been reported
Assert.assertEquals(reporter.getReportedContexts().size(), 0);
// Start reporter
reporter.start();
// Wait for up to 10 reporting intervals for 3 reports to run
long maxWaitMillis = 10 * reportingIntervalMillis;
long totalWait = 0;
while(reporter.getReportedContexts().size() < 6 && maxWaitMillis > 0) {
long wait = 100;
Thread.sleep(wait);
maxWaitMillis -= wait;
totalWait += wait;
}
// stop reporter
reporter.stop();
// Check wait makes sense given reporting interval (e.g. if wait = 100 millis, then 2 reports in 100 millis,
// something is wrong with schedule).
Assert.assertTrue(totalWait > reportingIntervalMillis);
Assert.assertTrue(reporter.getReportedContexts().size() >= 6);
// Check that it didn't report excessively
Assert.assertTrue(reporter.getReportedContexts().size() <= 10);
// Check that first report indeed reported the correct contexts
Set<String> firstReport = Sets.newHashSet(reporter.getReportedContexts().get(0).getName(),
reporter.getReportedContexts().get(1).getName());
Assert.assertEquals(firstReport, Sets.newHashSet(context1Name, context2Name));
// Check that second report indeed reported the correct contexts
Set<String> secondReport = Sets.newHashSet(reporter.getReportedContexts().get(2).getName(),
reporter.getReportedContexts().get(3).getName());
Assert.assertEquals(secondReport, Sets.newHashSet(context1Name, context2Name));
int totalReports = reporter.getReportedContexts().size();
// Wait for reporting interval to make sure reporting has actually stopped
Thread.sleep(2 * reportingIntervalMillis);
Assert.assertEquals(reporter.getReportedContexts().size(), totalReports);
reporter.getReportedContexts().clear();
// Dereference context 2 to ensure that it gets reported
context2 = null;
// Wait for context to be GCed
maxWaitMillis = 2000;
System.gc();
while(reporter.getReportedContexts().size() < 1 && maxWaitMillis > 0) {
System.gc();
long wait = 100;
Thread.sleep(wait);
maxWaitMillis -= wait;
}
// Check that GCed context was reported
Assert.assertEquals(reporter.getReportedContexts().size(), 1);
Assert.assertEquals(reporter.getReportedContexts().get(0).getName(), context2Name);
// Test close method
reporter.close();
}
private Set<String> getContextNames(ContextStoreReporter reporter) {
return Sets.newHashSet(Iterables.transform(reporter.getContextsToReport(),
new Function<ReportableContext, String>() {
@Nullable @Override public String apply(ReportableContext input) {
return input.getName();
}
}));
}
}
| 4,358 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/test/java/org/apache/gobblin/metrics/reporter/PrefixContextFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.reporter;
import java.util.Set;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.metrics.InnerMetricContext;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.RootMetricContext;
import org.apache.gobblin.metrics.context.filter.ContextFilter;
/**
* {@link ContextFilter} that accepts {@link MetricContext} based on whether the name has the correct prefix.
*/
public class PrefixContextFilter implements ContextFilter {
public static final String PREFIX_FILTER = "text.context.filter.prefix.string";
public static Config setPrefixString(Config config, String prefix) {
return config.withValue(PREFIX_FILTER, ConfigValueFactory.fromAnyRef(prefix));
}
private final String prefix;
public PrefixContextFilter(Config config) {
this.prefix = config.getString(PREFIX_FILTER);
}
@Override public Set<MetricContext> getMatchingContexts() {
Set<MetricContext> contexts = Sets.newHashSet();
addContextsRecursively(contexts, RootMetricContext.get());
return ImmutableSet.copyOf(contexts);
}
@Override public boolean matches(MetricContext metricContext) {
return metricContext.getName().startsWith(this.prefix);
}
@Override public boolean shouldReplaceByParent(InnerMetricContext removedMetricContext) {
return false;
}
private void addContextsRecursively(Set<MetricContext> matchingContexts, MetricContext context) {
if(matches(context)) {
matchingContexts.add(context);
}
for(MetricContext context1 : context.getChildContextsAsMap().values()) {
addContextsRecursively(matchingContexts, context1);
}
}
}
| 4,359 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin/MetricsHelper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.util.HashMap;
import java.util.Map;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.Tag;
public class MetricsHelper {
public static Map<String, Long> dumpMetrics(MetricContext context) {
Map<String, Long> output = new HashMap<>();
for (Map.Entry<String, Meter> entry : context.getMeters().entrySet()) {
output.put(entry.getKey(), entry.getValue().getCount());
}
for (Map.Entry<String, Timer> entry : context.getTimers().entrySet()) {
output.put(entry.getKey(), entry.getValue().getCount());
}
return output;
}
public static Map<String, String> dumpTags(MetricContext context) {
Map<String, String> output = new HashMap<>();
for (Tag<?> tag : context.getTags()) {
output.put(tag.getKey(), tag.getValue().toString());
}
return output;
}
}
| 4,360 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin/metrics/GobblinMetricsRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Lists;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
/**
* Registry that stores instances of {@link GobblinMetrics} identified by an arbitrary string id. The static method
* {@link #getInstance()} provides a static instance of this this class that should be considered the global registry of
* metrics.
*
* <p>
* An application could also instantiate one or more registries to, for example, separate instances of
* {@link GobblinMetrics} into different scopes.
* </p>
*/
public class GobblinMetricsRegistry {
private static final GobblinMetricsRegistry GLOBAL_INSTANCE = new GobblinMetricsRegistry();
private final Cache<String, GobblinMetrics> metricsCache = CacheBuilder.newBuilder().build();
private GobblinMetricsRegistry() {
// Do nothing
}
/**
* Associate a {@link GobblinMetrics} instance with a given ID if the ID is
* not already associated with a {@link GobblinMetrics} instance.
*
* @param id the given {@link GobblinMetrics} ID
* @param gobblinMetrics the {@link GobblinMetrics} instance to be associated with the given ID
* @return the previous {@link GobblinMetrics} instance associated with the ID or {@code null}
* if there's no previous {@link GobblinMetrics} instance associated with the ID
*/
public GobblinMetrics putIfAbsent(String id, GobblinMetrics gobblinMetrics) {
return this.metricsCache.asMap().putIfAbsent(id, gobblinMetrics);
}
/**
* Get the {@link GobblinMetrics} instance associated with a given ID.
*
* @param id the given {@link GobblinMetrics} ID
* @return the {@link GobblinMetrics} instance associated with the ID, wrapped in an {@link Optional} or
* {@link Optional#absent()} if no {@link GobblinMetrics} instance for the given ID is found
*/
public Optional<GobblinMetrics> get(String id) {
return Optional.fromNullable(this.metricsCache.getIfPresent(id));
}
/**
* Get the {@link GobblinMetrics} instance associated with a given ID. If the ID is not found this method returns the
* {@link GobblinMetrics} returned by the given {@link Callable}, and creates a mapping between the specified ID
* and the {@link GobblinMetrics} instance returned by the {@link Callable}.
*
* @param id the given {@link GobblinMetrics} ID
* @param valueLoader a {@link Callable} that returns a {@link GobblinMetrics}, the {@link Callable} is only invoked
* if the given id is not found
*
* @return a {@link GobblinMetrics} instance associated with the id
*/
public GobblinMetrics getOrCreate(String id, Callable<? extends GobblinMetrics> valueLoader) {
try {
return this.metricsCache.get(id, valueLoader);
} catch (ExecutionException ee) {
throw Throwables.propagate(ee);
}
}
/**
* Remove the {@link GobblinMetrics} instance with a given ID.
*
* @param id the given {@link GobblinMetrics} ID
* @return removed {@link GobblinMetrics} instance or {@code null} if no
* {@link GobblinMetrics} instance for the given ID is found
*/
public GobblinMetrics remove(String id) {
return this.metricsCache.asMap().remove(id);
}
/**
* Get an instance of {@link GobblinMetricsRegistry}.
*
* @return an instance of {@link GobblinMetricsRegistry}
*/
public static GobblinMetricsRegistry getInstance() {
return GLOBAL_INSTANCE;
}
/**
* Retrieve the {@link GobblinMetrics} by check if the key in cache matches a given regex.
*/
@VisibleForTesting
public Collection<GobblinMetrics> getMetricsByPattern(String regex) {
Map<String, GobblinMetrics> entries = this.metricsCache.asMap();
List<GobblinMetrics> rst = new ArrayList<>();
for (Map.Entry<String, GobblinMetrics> entry: entries.entrySet()) {
if (entry.getKey().matches(regex)) {
rst.add(entry.getValue());
}
}
return rst;
}
/**
* <p>
* Creates {@link org.apache.gobblin.metrics.MetricContext}. Tries to read the name of the parent context
* from key "metrics.context.name" at state, and tries to get the parent context by name from
* the {@link org.apache.gobblin.metrics.MetricContext} registry (the parent context must be registered).
* </p>
*
* <p>
* Automatically adds two tags to the inner context:
* <ul>
* <li> component: attempts to determine which component type within gobblin-api generated this instance. </li>
* <li> class: the specific class of the object that generated this instance of Instrumented </li>
* </ul>
* </p>
*
*/
public MetricContext getMetricContext(State state, Class<?> klazz, List<Tag<?>> tags) {
int randomId = new Random().nextInt(Integer.MAX_VALUE);
List<Tag<?>> generatedTags = Lists.newArrayList();
if (!klazz.isAnonymousClass()) {
generatedTags.add(new Tag<>("class", klazz.getCanonicalName()));
}
Optional<GobblinMetrics> gobblinMetrics = state.contains(ConfigurationKeys.METRIC_CONTEXT_NAME_KEY)
? GobblinMetricsRegistry.getInstance().get(state.getProp(ConfigurationKeys.METRIC_CONTEXT_NAME_KEY))
: Optional.<GobblinMetrics> absent();
MetricContext.Builder builder = gobblinMetrics.isPresent()
? gobblinMetrics.get().getMetricContext().childBuilder(klazz.getCanonicalName() + "." + randomId)
: MetricContext.builder(klazz.getCanonicalName() + "." + randomId);
return builder.addTags(generatedTags).addTags(tags).build();
}
}
| 4,361 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin/metrics/GobblinMetrics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.jmx.JmxReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.Closer;
import com.typesafe.config.Config;
import lombok.Getter;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metrics.graphite.GraphiteConnectionType;
import org.apache.gobblin.metrics.graphite.GraphiteEventReporter;
import org.apache.gobblin.metrics.graphite.GraphiteReporter;
import org.apache.gobblin.metrics.influxdb.InfluxDBConnectionType;
import org.apache.gobblin.metrics.influxdb.InfluxDBEventReporter;
import org.apache.gobblin.metrics.influxdb.InfluxDBReporter;
import org.apache.gobblin.metrics.reporter.FileFailureEventReporter;
import org.apache.gobblin.metrics.reporter.OutputStreamEventReporter;
import org.apache.gobblin.metrics.reporter.OutputStreamReporter;
import org.apache.gobblin.metrics.reporter.ScheduledReporter;
import org.apache.gobblin.password.PasswordManager;
import org.apache.gobblin.util.PropertiesUtils;
/**
* A class that represents a set of metrics associated with a given name.
*
* @author Yinan Li
*/
public class GobblinMetrics {
public static final String METRICS_ID_PREFIX = "gobblin.metrics.";
public static final String METRICS_STATE_CUSTOM_TAGS = "metrics.state.custom.tags";
@Getter
protected static final GobblinMetricsRegistry GOBBLIN_METRICS_REGISTRY = GobblinMetricsRegistry.getInstance();
/**
* Enumeration of metric types.
*/
public enum MetricType {
COUNTER, METER, GAUGE
}
private static final Logger LOGGER = LoggerFactory.getLogger(GobblinMetrics.class);
/**
* Check whether metrics collection and reporting are enabled or not.
*
* @param properties Configuration properties
* @return whether metrics collection and reporting are enabled
*/
public static boolean isEnabled(Properties properties) {
return PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_ENABLED_KEY, ConfigurationKeys.DEFAULT_METRICS_ENABLED);
}
/**
* Check whether metrics collection and reporting are enabled or not.
*
* @param state a {@link State} object containing configuration properties
* @return whether metrics collection and reporting are enabled
*/
public static boolean isEnabled(State state) {
return Boolean
.valueOf(state.getProp(ConfigurationKeys.METRICS_ENABLED_KEY, ConfigurationKeys.DEFAULT_METRICS_ENABLED));
}
/**
* Check whether metrics collection and reporting are enabled or not.
*
* @param cfg a {@link State} object containing configuration properties
* @return whether metrics collection and reporting are enabled
*/
public static boolean isEnabled(Config cfg) {
return cfg.hasPath(ConfigurationKeys.METRICS_ENABLED_KEY) ? cfg.getBoolean(ConfigurationKeys.METRICS_ENABLED_KEY)
: Boolean.parseBoolean(ConfigurationKeys.DEFAULT_METRICS_ENABLED);
}
/**
* Get a {@link GobblinMetrics} instance with the given ID.
*
* @param id the given {@link GobblinMetrics} ID
* @return a {@link GobblinMetrics} instance
*/
public static GobblinMetrics get(String id) {
return get(id, null);
}
/**
* Get a {@link GobblinMetrics} instance with the given ID and parent {@link MetricContext}.
*
* @param id the given {@link GobblinMetrics} ID
* @param parentContext the given parent {@link MetricContext}
* @return a {@link GobblinMetrics} instance
*/
public static GobblinMetrics get(String id, MetricContext parentContext) {
return get(id, parentContext, Lists.<Tag<?>>newArrayList());
}
/**
* Get a {@link GobblinMetrics} instance with the given ID, parent {@link MetricContext},
* and list of {@link Tag}s.
*
* @param id the given {@link GobblinMetrics} ID
* @param parentContext the given parent {@link MetricContext}
* @param tags the given list of {@link Tag}s
* @return a {@link GobblinMetrics} instance
*/
public static GobblinMetrics get(final String id, final MetricContext parentContext, final List<Tag<?>> tags) {
return GOBBLIN_METRICS_REGISTRY.getOrCreate(id, new Callable<GobblinMetrics>() {
@Override
public GobblinMetrics call()
throws Exception {
return new GobblinMetrics(id, parentContext, tags);
}
});
}
/**
* Remove the {@link GobblinMetrics} instance associated with the given ID.
*
* @param id the given {@link GobblinMetrics} ID
*/
public static void remove(String id) {
GOBBLIN_METRICS_REGISTRY.remove(id);
}
/**
* Add a {@link List} of {@link Tag}s to a {@link org.apache.gobblin.configuration.State} with key {@link #METRICS_STATE_CUSTOM_TAGS}.
*
* <p>
* {@link org.apache.gobblin.metrics.Tag}s under this key can later be parsed using the method {@link #getCustomTagsFromState}.
* </p>
*
* @param state {@link org.apache.gobblin.configuration.State} state to add the tag to.
* @param tags list of {@link Tag}s to add.
*/
public static void addCustomTagToState(State state, List<? extends Tag<?>> tags) {
for (Tag<?> tag : tags) {
state.appendToListProp(METRICS_STATE_CUSTOM_TAGS, tag.toString());
}
}
/**
* Add a {@link Tag} to a {@link org.apache.gobblin.configuration.State} with key {@link #METRICS_STATE_CUSTOM_TAGS}.
*
* <p>
* {@link org.apache.gobblin.metrics.Tag}s under this key can later be parsed using the method {@link #getCustomTagsFromState}.
* </p>
*
* @param state {@link org.apache.gobblin.configuration.State} state to add the tag to.
* @param tag {@link Tag} to add.
*/
public static void addCustomTagToState(State state, Tag<?> tag) {
state.appendToListProp(METRICS_STATE_CUSTOM_TAGS, tag.toString());
}
/**
* Add {@link List} of {@link Tag}s to a {@link Properties} with key {@link #METRICS_STATE_CUSTOM_TAGS}.
* <p>
* Also see {@link #addCustomTagToState(State, Tag)} , {@link #addCustomTagToProperties(Properties, Tag)}
* </p>
*
* <p>
* The {@link Properties} passed can be used to build a {@link State}.
* {@link org.apache.gobblin.metrics.Tag}s under this key can later be parsed using the method {@link #getCustomTagsFromState}.
* </p>
*
* @param properties {@link Properties} to add the tag to.
* @param tags list of {@link Tag}s to add.
*/
public static void addCustomTagsToProperties(Properties properties, List<Tag<?>> tags) {
for (Tag<?> tag : tags) {
addCustomTagToProperties(properties, tag);
}
}
/**
* Add a {@link Tag} to a {@link Properties} with key {@link #METRICS_STATE_CUSTOM_TAGS}.
* Also see {@link #addCustomTagToState(State, Tag)}
*
* <p>
* The {@link Properties} passed can be used to build a {@link State}.
* {@link org.apache.gobblin.metrics.Tag}s under this key can later be parsed using the method {@link #getCustomTagsFromState}.
* </p>
*
* @param properties {@link Properties} to add the tag to.
* @param tag {@link Tag} to add.
*/
public static void addCustomTagToProperties(Properties properties, Tag<?> tag) {
// Build a state wrapper to add custom tag to property
State state = new State(properties);
addCustomTagToState(state, tag);
}
/**
* Parse custom {@link org.apache.gobblin.metrics.Tag}s from property {@link #METRICS_STATE_CUSTOM_TAGS}
* in the input {@link org.apache.gobblin.configuration.State}.
* @param state {@link org.apache.gobblin.configuration.State} possibly containing custom tags.
* @return List of {@link org.apache.gobblin.metrics.Tag} parsed from input.
*/
public static List<Tag<?>> getCustomTagsFromState(State state) {
List<Tag<?>> tags = Lists.newArrayList();
for (String tagKeyValue : state.getPropAsList(METRICS_STATE_CUSTOM_TAGS, "")) {
Tag<?> tag = Tag.fromString(tagKeyValue);
if (tag != null) {
tags.add(tag);
}
}
return tags;
}
protected final String id;
protected final MetricContext metricContext;
// Closer for closing the metric output stream
protected final Closer codahaleReportersCloser = Closer.create();
// JMX metric reporter
private Optional<JmxReporter> jmxReporter = Optional.absent();
// Custom metric reporters instantiated through reflection
private final List<com.codahale.metrics.ScheduledReporter> codahaleScheduledReporters = Lists.newArrayList();
// A flag telling whether metric reporting has started or not
private volatile boolean metricsReportingStarted = false;
protected GobblinMetrics(String id, MetricContext parentContext, List<Tag<?>> tags) {
this.id = id;
this.metricContext = parentContext == null ? new MetricContext.Builder(id).addTags(tags).build()
: parentContext.childBuilder(id).addTags(tags).build();
}
/**
* Get the wrapped {@link com.codahale.metrics.MetricRegistry} instance.
*
* @return wrapped {@link com.codahale.metrics.MetricRegistry} instance
*/
public MetricContext getMetricContext() {
return this.metricContext;
}
/**
* Get the ID of this {@link GobblinMetrics}.
*
* @return ID of this {@link GobblinMetrics}
*/
public String getId() {
return this.id;
}
/**
* Get the name of this {@link GobblinMetrics}.
*
* <p>
* This method is currently equivalent to {@link #getId()}.
* </p>
*
* @return name of this {@link GobblinMetrics}
*/
public String getName() {
return this.id;
}
/**
* Get a {@link Meter} with the given name prefix and suffixes.
*
* @param prefix the given name prefix
* @param suffixes the given name suffixes
* @return a {@link Meter} with the given name prefix and suffixes
*/
public Meter getMeter(String prefix, String... suffixes) {
return this.metricContext.meter(MetricRegistry.name(prefix, suffixes));
}
/**
* Get a {@link Counter} with the given name prefix and suffixes.
*
* @param prefix the given name prefix
* @param suffixes the given name suffixes
* @return a {@link Counter} with the given name prefix and suffixes
*/
public Counter getCounter(String prefix, String... suffixes) {
return this.metricContext.counter(MetricRegistry.name(prefix, suffixes));
}
/**
* Get a {@link Histogram} with the given name prefix and suffixes.
*
* @param prefix the given name prefix
* @param suffixes the given name suffixes
* @return a {@link Histogram} with the given name prefix and suffixes
*/
public Histogram getHistogram(String prefix, String... suffixes) {
return this.metricContext.histogram(MetricRegistry.name(prefix, suffixes));
}
/**
* Get a {@link Timer} with the given name prefix and suffixes.
*
* @param prefix the given name prefix
* @param suffixes the given name suffixes
* @return a {@link Timer} with the given name prefix and suffixes
*/
public Timer getTimer(String prefix, String... suffixes) {
return this.metricContext.timer(MetricRegistry.name(prefix, suffixes));
}
/**
* Starts metric reporting and appends the given metrics file suffix to the current value of
* {@link ConfigurationKeys#METRICS_FILE_SUFFIX}.
*/
public void startMetricReportingWithFileSuffix(State state, String metricsFileSuffix)
throws MultiReporterException {
Properties metricsReportingProps = new Properties();
metricsReportingProps.putAll(state.getProperties());
String oldMetricsFileSuffix =
state.getProp(ConfigurationKeys.METRICS_FILE_SUFFIX, ConfigurationKeys.DEFAULT_METRICS_FILE_SUFFIX);
if (Strings.isNullOrEmpty(oldMetricsFileSuffix)) {
oldMetricsFileSuffix = metricsFileSuffix;
} else {
oldMetricsFileSuffix += "." + metricsFileSuffix;
}
metricsReportingProps.setProperty(ConfigurationKeys.METRICS_FILE_SUFFIX, oldMetricsFileSuffix);
startMetricReporting(metricsReportingProps);
}
/**
* Start metric reporting.
*
* @param configuration configuration properties
*/
public void startMetricReporting(Configuration configuration)
throws MultiReporterException {
Properties props = new Properties();
for (Map.Entry<String, String> entry : configuration) {
props.put(entry.getKey(), entry.getValue());
}
startMetricReporting(props);
}
/**
* Start metric reporting.
*
* @param properties configuration properties
*/
public void startMetricReporting(Properties properties) throws MultiReporterException {
if (this.metricsReportingStarted) {
LOGGER.warn("Metric reporting has already started");
return;
}
TimeUnit reportTimeUnit = TimeUnit.MILLISECONDS;
long reportInterval = Long.parseLong(properties
.getProperty(ConfigurationKeys.METRICS_REPORT_INTERVAL_KEY, ConfigurationKeys.DEFAULT_METRICS_REPORT_INTERVAL));
ScheduledReporter.setReportingInterval(properties, reportInterval, reportTimeUnit);
long startTime = System.currentTimeMillis();
List<MetricReporterException> reporterExceptions = Lists.newArrayList();
try {
for (ReporterSinkType sinkType: ReporterSinkType.values()) {
if (sinkType.equals(ReporterSinkType.CUSTOM)) {
buildCustomMetricReporters(properties, reporterExceptions);
} else {
try {
buildReporter(properties, sinkType);
} catch (MultiReporterException e) {
reporterExceptions.addAll(e.getExceptions());
}
}
}
// Start reporters that implement org.apache.gobblin.metrics.report.ScheduledReporter
RootMetricContext.get().startReporting();
// Start reporters that implement com.codahale.metrics.ScheduledReporter
for (com.codahale.metrics.ScheduledReporter scheduledReporter : this.codahaleScheduledReporters) {
scheduledReporter.start(reportInterval, reportTimeUnit);
}
} catch (Exception e) {
LOGGER.error("Metrics reporting cannot be started due to {}", ExceptionUtils.getFullStackTrace(e));
throw e;
}
this.metricsReportingStarted = true;
LOGGER.info("Metrics reporting has been started in {} ms: GobblinMetrics {}",
System.currentTimeMillis() - startTime, this.toString());
if (!reporterExceptions.isEmpty()) {
throw new MultiReporterException("Could not create one or more reporters", reporterExceptions);
}
}
private void buildReporter(Properties properties, ReporterSinkType sinkType) throws MultiReporterException {
switch (sinkType) {
case JMX:
buildJmxMetricReporter(properties);
break;
case FILE:
buildFileMetricReporter(properties);
break;
case KAFKA:
buildKafkaMetricReporter(properties);
break;
case GRAPHITE:
buildGraphiteMetricReporter(properties);
break;
case INFLUXDB:
buildInfluxDBMetricReporter(properties);
break;
case FILE_FAILURE:
buildFileFailureEventReporter(properties);
break;
default:
LOGGER.error("Unknown reporter sink type: {}", sinkType.name());
break;
}
}
/**
* Stop metric reporting.
*/
public void stopMetricsReporting() {
LOGGER.info("Metrics reporting will be stopped: GobblinMetrics {}", this.toString());
if (!this.metricsReportingStarted) {
LOGGER.warn("Metric reporting has not started yet");
return;
}
// Stop the JMX reporter
if (this.jmxReporter.isPresent()) {
this.jmxReporter.get().stop();
}
// Trigger and stop reporters that implement org.apache.gobblin.metrics.report.ScheduledReporter
RootMetricContext.get().stopReporting();
// Trigger and stop reporters that implement com.codahale.metrics.ScheduledReporter
for (com.codahale.metrics.ScheduledReporter scheduledReporter : this.codahaleScheduledReporters) {
scheduledReporter.report();
}
try {
this.codahaleReportersCloser.close();
} catch (IOException ioe) {
LOGGER.error("Failed to close metric output stream for job " + this.id, ioe);
} catch (Exception e) {
LOGGER.error("Failed to close metric output stream for job {} due to {}", this.id, ExceptionUtils.getFullStackTrace(e));
throw e;
}
this.metricsReportingStarted = false;
// Remove from the cache registry
GobblinMetrics.remove(id);
LOGGER.info("Metrics reporting stopped successfully");
}
private void buildFileMetricReporter(Properties properties)
throws MultiReporterException {
if (!Boolean.valueOf(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_FILE_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_FILE_ENABLED))) {
return;
}
LOGGER.info("Reporting metrics to log files");
if (!properties.containsKey(ConfigurationKeys.METRICS_LOG_DIR_KEY)) {
MetricReporterException e = new MetricReporterException(
"Not reporting metrics to log files because " + ConfigurationKeys.METRICS_LOG_DIR_KEY + " is undefined", ReporterType.METRIC, ReporterSinkType.FILE);
throw new MultiReporterException("Failed to create file metric reporter", Lists.newArrayList(e));
}
try {
String fsUri = properties.getProperty(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = FileSystem.get(URI.create(fsUri), new Configuration());
// Each job gets its own metric log subdirectory
Path metricsLogDir = new Path(properties.getProperty(ConfigurationKeys.METRICS_LOG_DIR_KEY), this.getName());
if (!fs.exists(metricsLogDir) && !fs.mkdirs(metricsLogDir)) {
throw new MetricReporterException("Failed to create metric log directory for metrics " + this.getName(), ReporterType.METRIC, ReporterSinkType.FILE);
}
// Add a suffix to file name if specified in properties.
String metricsFileSuffix =
properties.getProperty(ConfigurationKeys.METRICS_FILE_SUFFIX, ConfigurationKeys.DEFAULT_METRICS_FILE_SUFFIX);
if (!Strings.isNullOrEmpty(metricsFileSuffix) && !metricsFileSuffix.startsWith(".")) {
metricsFileSuffix = "." + metricsFileSuffix;
}
// Each job run gets its own metric log file
Path metricLogFile =
new Path(metricsLogDir, this.id + metricsFileSuffix + ".metrics.log");
boolean append = false;
// Append to the metric file if it already exists
if (fs.exists(metricLogFile)) {
LOGGER.info(String.format("Metric log file %s already exists, appending to it", metricLogFile));
append = true;
}
OutputStream output = append ? fs.append(metricLogFile) : fs.create(metricLogFile, true);
// Add metrics reporter
OutputStreamReporter.Factory.newBuilder().outputTo(output).build(properties);
// Set up events reporter at the same time!!
this.codahaleScheduledReporters.add(this.codahaleReportersCloser
.register(OutputStreamEventReporter.forContext(RootMetricContext.get()).outputTo(output).build()));
LOGGER.info("Will start reporting metrics to directory " + metricsLogDir);
} catch (IOException ioe) {
MetricReporterException e = new MetricReporterException("Failed to build file metric reporter for job " + this.id, ioe, ReporterType.METRIC, ReporterSinkType.FILE);
throw new MultiReporterException("Failed to create file metric reporter", Lists.newArrayList(e));
}
}
private void buildFileFailureEventReporter(Properties properties)
throws MultiReporterException {
if ((!Boolean.valueOf(properties.getProperty(ConfigurationKeys.FAILURE_REPORTING_FILE_ENABLED_KEY,
ConfigurationKeys.DEFAULT_FAILURE_REPORTING_FILE_ENABLED)) || !properties.containsKey(ConfigurationKeys.FAILURE_LOG_DIR_KEY))) {
return;
}
LOGGER.info("Reporting failure to log files");
try {
String fsUri = properties.getProperty(ConfigurationKeys.FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI);
FileSystem fs = FileSystem.get(URI.create(fsUri), new Configuration());
// Each job gets its own log subdirectory
Path failureLogDir = new Path(properties.getProperty(ConfigurationKeys.FAILURE_LOG_DIR_KEY), this.getName());
if (!fs.exists(failureLogDir) && !fs.mkdirs(failureLogDir)) {
throw new MetricReporterException("Failed to create failure log directory for metrics " + this.getName(), ReporterType.EVENT, ReporterSinkType.FILE);
}
// Add a suffix to file name if specified in properties.
String metricsFileSuffix =
properties.getProperty(ConfigurationKeys.METRICS_FILE_SUFFIX, ConfigurationKeys.DEFAULT_METRICS_FILE_SUFFIX);
if (!Strings.isNullOrEmpty(metricsFileSuffix) && !metricsFileSuffix.startsWith(".")) {
metricsFileSuffix = "." + metricsFileSuffix;
}
// Each job run gets its own failure log file
Path failureLogFile =
new Path(failureLogDir, this.id + metricsFileSuffix + ".failure.log");
this.codahaleScheduledReporters.add(this.codahaleReportersCloser
.register(new FileFailureEventReporter(RootMetricContext.get(), fs, failureLogFile)));
LOGGER.info("Will start reporting failure to directory " + failureLogDir);
} catch (IOException ioe) {
MetricReporterException e = new MetricReporterException("Failed to build file failure event reporter for job " + this.id, ioe, ReporterType.EVENT, ReporterSinkType.FILE);
throw new MultiReporterException("Failed to create failure file event reporter", Lists.newArrayList(e));
}
}
private void buildJmxMetricReporter(Properties properties) {
if (!Boolean.valueOf(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_JMX_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_JMX_ENABLED))) {
return;
}
LOGGER.info("Reporting metrics to JMX");
this.jmxReporter = Optional.of(codahaleReportersCloser.register(JmxReporter.forRegistry(RootMetricContext.get()).
convertRatesTo(TimeUnit.SECONDS).convertDurationsTo(TimeUnit.MILLISECONDS).build()));
}
private void buildKafkaMetricReporter(Properties properties)
throws MultiReporterException {
List<MetricReporterException> reporterExceptions = Lists.newArrayList();
if (!Boolean.parseBoolean(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_KAFKA_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_KAFKA_ENABLED))) {
return;
}
if (Boolean.parseBoolean(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_KAFKA_METRICS_ENABLED_KEY,
Boolean.toString(true)))) {
try {
buildScheduledReporter(properties, ConfigurationKeys.DEFAULT_METRICS_REPORTING_KAFKA_REPORTER_CLASS);
} catch (MetricReporterException e) {
reporterExceptions.add(e);
}
}
if (Boolean.parseBoolean(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_KAFKA_EVENTS_ENABLED_KEY,
Boolean.toString(true)))) {
try {
buildScheduledReporter(properties, ConfigurationKeys.DEFAULT_EVENTS_REPORTING_KAFKA_REPORTER_CLASS);
} catch (MetricReporterException e) {
reporterExceptions.add(e);
}
}
if (!reporterExceptions.isEmpty()) {
throw new MultiReporterException("Failed to start one or more Kafka reporters", reporterExceptions);
}
}
private void buildGraphiteMetricReporter(Properties properties)
throws MultiReporterException {
List<MetricReporterException> reporterExceptionList = Lists.newArrayList();
boolean metricsEnabled = PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_REPORTING_GRAPHITE_METRICS_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_METRICS_ENABLED);
if (metricsEnabled) {
LOGGER.info("Reporting metrics to Graphite");
}
boolean eventsEnabled = PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_REPORTING_GRAPHITE_EVENTS_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_EVENTS_ENABLED);
if (eventsEnabled) {
LOGGER.info("Reporting events to Graphite");
}
if (!metricsEnabled && !eventsEnabled) {
return;
}
try {
Preconditions.checkArgument(properties.containsKey(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_HOSTNAME),
"Graphite hostname is missing.");
} catch (IllegalArgumentException exception) {
reporterExceptionList.add(new MetricReporterException("Missing Graphite configuration(s).", exception, ReporterType.METRIC_EVENT, ReporterSinkType.GRAPHITE));
throw new MultiReporterException("Failed to start one or more Graphite reporters", reporterExceptionList);
}
String hostname = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_HOSTNAME);
int port = Integer.parseInt(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_PORT,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_PORT));
GraphiteConnectionType connectionType;
String type = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_SENDING_TYPE,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_SENDING_TYPE).toUpperCase();
String prefix = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_PREFIX,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_PREFIX);
try {
connectionType = GraphiteConnectionType.valueOf(type);
} catch (IllegalArgumentException exception) {
LOGGER
.warn("Graphite Reporter connection type " + type + " not recognized. Will use TCP for sending.", exception);
connectionType = GraphiteConnectionType.TCP;
}
if (metricsEnabled) {
try {
GraphiteReporter.Factory.newBuilder().withConnectionType(connectionType)
.withConnection(hostname, port).withMetricContextName(
this.metricContext.getName()) //contains the current job id
.withMetricsPrefix(prefix)
.build(properties);
} catch (IOException e) {
reporterExceptionList.add(new MetricReporterException("Failed to create Graphite metrics reporter.", e, ReporterType.METRIC, ReporterSinkType.GRAPHITE));
}
}
if (eventsEnabled) {
boolean emitValueAsKey = PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_REPORTING_GRAPHITE_EVENTS_VALUE_AS_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_GRAPHITE_EVENTS_VALUE_AS_KEY);
String eventsPortProp = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_EVENTS_PORT);
int eventsPort = (eventsPortProp == null) ? (metricsEnabled ? port
: Integer.parseInt(ConfigurationKeys.METRICS_REPORTING_GRAPHITE_PORT)) : Integer.parseInt(eventsPortProp);
try {
GraphiteEventReporter eventReporter =
GraphiteEventReporter.Factory.forContext(RootMetricContext.get())
.withConnectionType(connectionType)
.withConnection(hostname, eventsPort)
.withPrefix(prefix)
.withEmitValueAsKey(emitValueAsKey)
.build();
this.codahaleScheduledReporters.add(this.codahaleReportersCloser.register(eventReporter));
}
catch (IOException e) {
reporterExceptionList.add(new MetricReporterException("Failed to create Graphite event reporter.", e, ReporterType.EVENT, ReporterSinkType.GRAPHITE));
}
}
if (!reporterExceptionList.isEmpty()) {
throw new MultiReporterException("Failed to create one or more Graphite Reporters", reporterExceptionList);
}
}
private void buildInfluxDBMetricReporter(Properties properties)
throws MultiReporterException {
List<MetricReporterException> reporterExceptionList = Lists.newArrayList();
boolean metricsEnabled = PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_REPORTING_INFLUXDB_METRICS_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_INFLUXDB_METRICS_ENABLED);
if (metricsEnabled) {
LOGGER.info("Reporting metrics to InfluxDB");
}
boolean eventsEnabled = PropertiesUtils
.getPropAsBoolean(properties, ConfigurationKeys.METRICS_REPORTING_INFLUXDB_EVENTS_ENABLED_KEY,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_INFLUXDB_EVENTS_ENABLED);
if (eventsEnabled) {
LOGGER.info("Reporting events to InfluxDB");
}
if (!metricsEnabled && !eventsEnabled) {
return;
}
try {
Preconditions.checkArgument(properties.containsKey(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_DATABASE),
"InfluxDB database name is missing.");
} catch (IllegalArgumentException exception) {
reporterExceptionList.add(new MetricReporterException("Missing InfluxDB configuration(s)", exception, ReporterType.METRIC_EVENT, ReporterSinkType.INFLUXDB));
throw new MultiReporterException("Failed to start one or more InfluxDB reporters", reporterExceptionList);
}
String url = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_URL);
String username = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_USER);
String password = PasswordManager.getInstance(properties)
.readPassword(properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_PASSWORD));
String database = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_DATABASE);
InfluxDBConnectionType connectionType;
String type = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_SENDING_TYPE,
ConfigurationKeys.DEFAULT_METRICS_REPORTING_INFLUXDB_SENDING_TYPE).toUpperCase();
try {
connectionType = InfluxDBConnectionType.valueOf(type);
} catch (IllegalArgumentException exception) {
LOGGER
.warn("InfluxDB Reporter connection type " + type + " not recognized. Will use TCP for sending.", exception);
connectionType = InfluxDBConnectionType.TCP;
}
if (metricsEnabled) {
try {
InfluxDBReporter.Factory.newBuilder().withConnectionType(connectionType)
.withConnection(url, username, password, database).withMetricContextName(
this.metricContext.getName()) // contains the current job id
.build(properties);
} catch (IOException e) {
reporterExceptionList.add(new MetricReporterException("Failed to create InfluxDB metrics reporter.", e, ReporterType.METRIC, ReporterSinkType.INFLUXDB));
}
}
if (eventsEnabled) {
String eventsDbProp = properties.getProperty(ConfigurationKeys.METRICS_REPORTING_INFLUXDB_EVENTS_DATABASE);
String eventsDatabase = (eventsDbProp == null) ? (metricsEnabled ? database : null) : eventsDbProp;
try {
InfluxDBEventReporter eventReporter =
InfluxDBEventReporter.Factory.forContext(RootMetricContext.get())
.withConnectionType(connectionType)
.withConnection(url, username, password, eventsDatabase)
.build();
this.codahaleScheduledReporters.add(this.codahaleReportersCloser.register(eventReporter));
}
catch (IOException e) {
reporterExceptionList.add(new MetricReporterException("Failed to create InfluxDB event reporter.", e, ReporterType.EVENT, ReporterSinkType.INFLUXDB));
}
}
if (!reporterExceptionList.isEmpty()) {
throw new MultiReporterException("Failed to create one or more InfluxDB reporters", reporterExceptionList);
}
}
/**
* Build scheduled metrics reporters by reflection from the property
* {@link org.apache.gobblin.configuration.ConfigurationKeys#METRICS_CUSTOM_BUILDERS}. This allows users to specify custom
* reporters for Gobblin runtime without having to modify the code.
*/
private void buildCustomMetricReporters(Properties properties, List<MetricReporterException> reporterExceptions) {
String reporterClasses = properties.getProperty(ConfigurationKeys.METRICS_CUSTOM_BUILDERS);
if (Strings.isNullOrEmpty(reporterClasses)) {
return;
}
for (String reporterClass : Splitter.on(",").split(reporterClasses)) {
try {
buildScheduledReporter(properties, reporterClass);
} catch (MetricReporterException e) {
reporterExceptions.add(e);
}
}
}
private void buildScheduledReporter(Properties properties, String reporterClass)
throws MetricReporterException {
try {
Class<?> clazz = Class.forName(reporterClass);
if (CustomCodahaleReporterFactory.class.isAssignableFrom(clazz)) {
CustomCodahaleReporterFactory customCodahaleReporterFactory =
((CustomCodahaleReporterFactory) clazz.getConstructor().newInstance());
com.codahale.metrics.ScheduledReporter scheduledReporter =
customCodahaleReporterFactory.newScheduledReporter(RootMetricContext.get(), properties);
if (scheduledReporter == null) {
LOGGER.warn("Factory {} returns a null scheduledReporter", clazz.getSimpleName());
return;
}
this.codahaleReportersCloser.register(scheduledReporter);
LOGGER.info("Will start reporting metrics using " + reporterClass);
this.codahaleScheduledReporters.add(scheduledReporter);
} else if (CustomReporterFactory.class.isAssignableFrom(clazz)) {
CustomReporterFactory customReporterFactory = ((CustomReporterFactory) clazz.getConstructor().newInstance());
customReporterFactory.newScheduledReporter(properties);
LOGGER.info("Will start reporting metrics using " + reporterClass);
} else {
throw new MetricReporterException("Class " + reporterClass +
" specified by key " + ConfigurationKeys.METRICS_CUSTOM_BUILDERS + " must implement: "
+ CustomCodahaleReporterFactory.class + " or " + CustomReporterFactory.class, ReporterType.CUSTOM, ReporterSinkType.CUSTOM);
}
} catch (ClassNotFoundException exception) {
throw new MetricReporterException(String
.format("Failed to create metric reporter: requested CustomReporterFactory %s not found.", reporterClass),
exception, ReporterType.CUSTOM, ReporterSinkType.CUSTOM);
} catch (NoSuchMethodException exception) {
throw new MetricReporterException(String.format("Failed to create metric reporter: requested CustomReporterFactory %s "
+ "does not have parameterless constructor.", reporterClass), exception, ReporterType.CUSTOM, ReporterSinkType.CUSTOM);
} catch (MetricReporterException exception) {
throw exception;
} catch (Exception exception) {
throw new MetricReporterException("Could not create metric reporter from builder " + reporterClass + ".", exception, ReporterType.CUSTOM, ReporterSinkType.CUSTOM);
}
}
}
| 4,362 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics/src/main/java/org/apache/gobblin/metrics/ServiceMetricNames.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
public class ServiceMetricNames {
// These prefixes can be used to distinguish metrics reported by GobblinService from other metrics reported by Gobblin
// This can be used in conjunction with MetricNameRegexFilter to filter out metrics in any MetricReporter
public static final String GOBBLIN_SERVICE_PREFIX = "GobblinService";
public static final String GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER = GOBBLIN_SERVICE_PREFIX + ".";
public static final String GOBBLIN_JOB_METRICS_PREFIX = "JobMetrics";
// Flow Compilation Meters and Timer
public static final String FLOW_COMPILATION_SUCCESSFUL_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowCompilation.successful";
public static final String FLOW_COMPILATION_FAILED_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowCompilation.failed";
public static final String FLOW_COMPILATION_TIMER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowCompilation.time";
public static final String DATA_AUTHORIZATION_TIMER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowCompilation.dataAuthorization.time";
// Flow Orchestration Meters and Timer
public static final String FLOW_ORCHESTRATION_SUCCESSFUL_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowOrchestration.successful";
public static final String FLOW_ORCHESTRATION_FAILED_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowOrchestration.failed";
public static final String FLOW_ORCHESTRATION_TIMER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowOrchestration.time";
public static final String FLOW_ORCHESTRATION_DELAY = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowOrchestration.delay";
// Flow Trigger Handler
public static final String FLOW_TRIGGER_HANDLER_PREFIX = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "flowTriggerHandler.";
public static final String GOBBLIN_FLOW_TRIGGER_HANDLER_NUM_FLOWS_SUBMITTED = FLOW_TRIGGER_HANDLER_PREFIX + "numFlowsSubmitted";
public static final String FLOW_TRIGGER_HANDLER_LEASE_OBTAINED_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "leaseObtained";
public static final String FLOW_TRIGGER_HANDLER_LEASED_TO_ANOTHER_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "leasedToAnother";
public static final String FLOW_TRIGGER_HANDLER_NO_LONGER_LEASING_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "noLongerLeasing";
public static final String FLOW_TRIGGER_HANDLER_JOB_DOES_NOT_EXIST_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "jobDoesNotExistInScheduler";
public static final String FLOW_TRIGGER_HANDLER_FAILED_TO_SET_REMINDER_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "failedToSetReminderCount";
public static final String FLOW_TRIGGER_HANDLER_LEASES_OBTAINED_DUE_TO_REMINDER_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "leasesObtainedDueToReminderCount";
public static final String FLOW_TRIGGER_HANDLER_FAILED_TO_RECORD_LEASE_SUCCESS_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "failedToRecordLeaseSuccessCount";
public static final String FLOW_TRIGGER_HANDLER_RECORDED_LEASE_SUCCESS_COUNT = FLOW_TRIGGER_HANDLER_PREFIX + "recordedLeaseSuccessCount";
// DagManager Related Metrics
public static final String DAG_MANAGER_PREFIX = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "dagManager";
public static final String
DAG_MANAGER_FAILED_LAUNCH_EVENTS_ON_STARTUP_COUNT = DAG_MANAGER_PREFIX + ".failedLaunchEventsOnStartupCount";
public static final String FLOW_FAILED_FORWARD_TO_DAG_MANAGER_COUNT = DAG_MANAGER_PREFIX + ".flowFailedForwardToDagManagerCount";
//Job status poll timer
public static final String JOB_STATUS_POLLED_TIMER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "jobStatusPoll.time";
public static final String CREATE_FLOW_METER = "CreateFlow";
public static final String DELETE_FLOW_METER = "DeleteFlow";
public static final String RUN_IMMEDIATELY_FLOW_METER = "RunImmediatelyFlow";
public static final String SUCCESSFUL_FLOW_METER = "SuccessfulFlows";
public static final String START_SLA_EXCEEDED_FLOWS_METER = "StartSLAExceededFlows";
public static final String SLA_EXCEEDED_FLOWS_METER = "SlaExceededFlows";
public static final String FAILED_FLOW_METER = "FailedFlows";
public static final String SCHEDULED_FLOW_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "ScheduledFlows";
public static final String NON_SCHEDULED_FLOW_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "NonScheduledFlows";
public static final String SKIPPED_FLOWS = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "SkippedFlows";
public static final String RUNNING_FLOWS_COUNTER = "RunningFlows";
public static final String SERVICE_USERS = "ServiceUsers";
public static final String COMPILED = "Compiled";
public static final String RUNNING_STATUS = "RunningStatus";
public static final String JOBS_SENT_TO_SPEC_EXECUTOR = "JobsSentToSpecExecutor";
public static final String HELIX_LEADER_STATE = "HelixLeaderState";
public static final String FLOWGRAPH_UPDATE_FAILED_METER = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "FlowgraphUpdateFailed";
public static final String DAG_COUNT_MYSQL_DAG_STATE_COUNT = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "MysqlDagStateStore" + ".totalDagCount";
public static final String DAG_COUNT_FS_DAG_STATE_COUNT = GOBBLIN_SERVICE_PREFIX_WITH_DELIMITER + "FsDagStateStore" + ".totalDagCount";
}
| 4,363 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/MetricContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.IOException;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricFilter;
import com.codahale.metrics.Timer;
import static org.apache.gobblin.metrics.test.TestConstants.*;
import org.apache.gobblin.metrics.reporter.ContextAwareScheduledReporter;
/**
* Unit tests for {@link MetricContext}.
*
* <p>
* This test class also tests classes {@link ContextAwareCounter}, {@link ContextAwareMeter},
* {@link ContextAwareHistogram}, {@link ContextAwareTimer}, {@link ContextAwareGauge},
* {@link org.apache.gobblin.metrics.reporter.ContextAwareScheduledReporter}, and {@link TagBasedMetricFilter}.
* </p>
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metrics"})
public class MetricContextTest {
private static final String CHILD_CONTEXT_NAME = "TestChildContext";
private static final String JOB_ID_KEY = "job.id";
private static final String JOB_ID_PREFIX = "TestJob-";
private static final String TASK_ID_KEY = "task.id";
private static final String TASK_ID_PREFIX = "TestTask-";
private static final String METRIC_GROUP_KEY = "metric.group";
private static final String INPUT_RECORDS_GROUP = "INPUT_RECORDS";
private static final String TEST_REPORTER_NAME = TestContextAwareScheduledReporter.class.getName();
private MetricContext context;
private MetricContext childContext;
@BeforeClass
public void setUp() {
String contextName = CONTEXT_NAME + "_" + UUID.randomUUID().toString();
this.context = MetricContext.builder(contextName)
.addTag(new Tag<String>(JOB_ID_KEY, JOB_ID_PREFIX + 0))
.build();
Assert.assertEquals(this.context.getName(), contextName);
Assert.assertTrue(this.context.getParent().isPresent());
Assert.assertEquals(this.context.getParent().get(), RootMetricContext.get());
Assert.assertEquals(this.context.getTags().size(), 3); // uuid and name tag gets added automatically
Assert.assertEquals(this.context.getTags().get(0).getKey(), JOB_ID_KEY);
Assert.assertEquals(this.context.getTags().get(0).getValue(), JOB_ID_PREFIX + 0);
// Second tag should be uuid
Assert.assertTrue(this.context.getTags().get(1).getValue().toString()
.matches("[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}"));
}
@Test
public void testChildContext() {
this.childContext = this.context.childBuilder(CHILD_CONTEXT_NAME)
.addTag(new Tag<String>(TASK_ID_KEY, TASK_ID_PREFIX + 0))
.build();
Assert.assertEquals(this.childContext.getName(), CHILD_CONTEXT_NAME);
Assert.assertTrue(this.childContext.getParent().isPresent());
Assert.assertEquals(this.childContext.getParent().get(), this.context);
Assert.assertEquals(this.childContext.getTags().size(), 4);
Assert.assertEquals(this.childContext.getTags().get(0).getKey(), JOB_ID_KEY);
Assert.assertEquals(this.childContext.getTags().get(0).getValue(), JOB_ID_PREFIX + 0);
Assert.assertEquals(this.childContext.getTags().get(1).getKey(), MetricContext.METRIC_CONTEXT_ID_TAG_NAME);
Assert.assertEquals(this.childContext.getTags().get(2).getKey(), MetricContext.METRIC_CONTEXT_NAME_TAG_NAME);
Assert.assertEquals(this.childContext.getTags().get(3).getKey(), TASK_ID_KEY);
Assert.assertEquals(this.childContext.getTags().get(3).getValue(), TASK_ID_PREFIX + 0);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareCounter() {
ContextAwareCounter jobRecordsProcessed = this.context.contextAwareCounter(RECORDS_PROCESSED);
Assert.assertEquals(this.context.getCounters().get(jobRecordsProcessed.getName()),
jobRecordsProcessed.getInnerMetric());
Assert.assertEquals(jobRecordsProcessed.getContext(), this.context);
Assert.assertEquals(jobRecordsProcessed.getName(), RECORDS_PROCESSED);
jobRecordsProcessed.inc();
Assert.assertEquals(jobRecordsProcessed.getCount(), 1l);
jobRecordsProcessed.inc(5);
Assert.assertEquals(jobRecordsProcessed.getCount(), 6l);
jobRecordsProcessed.dec();
Assert.assertEquals(jobRecordsProcessed.getCount(), 5l);
jobRecordsProcessed.dec(3);
Assert.assertEquals(jobRecordsProcessed.getCount(), 2l);
ContextAwareCounter taskRecordsProcessed = this.childContext.contextAwareCounter(RECORDS_PROCESSED);
Assert.assertEquals(this.childContext.getCounters()
.get(taskRecordsProcessed.getName()),
taskRecordsProcessed.getInnerMetric());
Assert.assertEquals(taskRecordsProcessed.getContext(), this.childContext);
Assert.assertEquals(taskRecordsProcessed.getName(), RECORDS_PROCESSED);
taskRecordsProcessed.inc();
Assert.assertEquals(taskRecordsProcessed.getCount(), 1l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 3l);
taskRecordsProcessed.inc(3);
Assert.assertEquals(taskRecordsProcessed.getCount(), 4l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 6l);
taskRecordsProcessed.dec(4);
Assert.assertEquals(taskRecordsProcessed.getCount(), 0l);
Assert.assertEquals(jobRecordsProcessed.getCount(), 2l);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareMeter() {
ContextAwareMeter jobRecordsProcessRate = this.context.contextAwareMeter(RECORD_PROCESS_RATE);
Assert.assertEquals(this.context.getMeters()
.get(jobRecordsProcessRate.getName()),
jobRecordsProcessRate.getInnerMetric());
Assert.assertEquals(jobRecordsProcessRate.getContext(), this.context);
Assert.assertEquals(jobRecordsProcessRate.getName(), RECORD_PROCESS_RATE);
jobRecordsProcessRate.mark();
jobRecordsProcessRate.mark(3);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 4l);
ContextAwareMeter taskRecordsProcessRate = this.childContext.contextAwareMeter(RECORD_PROCESS_RATE);
Assert.assertEquals(this.childContext.getMeters()
.get(taskRecordsProcessRate.getName()),
taskRecordsProcessRate.getInnerMetric());
Assert.assertEquals(taskRecordsProcessRate.getContext(), this.childContext);
Assert.assertEquals(taskRecordsProcessRate.getName(), RECORD_PROCESS_RATE);
taskRecordsProcessRate.mark(2);
Assert.assertEquals(taskRecordsProcessRate.getCount(), 2l);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 6l);
taskRecordsProcessRate.mark(5);
Assert.assertEquals(taskRecordsProcessRate.getCount(), 7l);
Assert.assertEquals(jobRecordsProcessRate.getCount(), 11l);
}
@Test(dependsOnMethods = "testChildContext")
public void testContextAwareHistogram() {
ContextAwareHistogram jobRecordSizeDist = this.context.contextAwareHistogram(RECORD_SIZE_DISTRIBUTION);
Assert.assertEquals(
this.context.getHistograms().get(
jobRecordSizeDist.getName()),
jobRecordSizeDist.getInnerMetric());
Assert.assertEquals(jobRecordSizeDist.getContext(), this.context);
Assert.assertEquals(jobRecordSizeDist.getName(), RECORD_SIZE_DISTRIBUTION);
jobRecordSizeDist.update(2);
jobRecordSizeDist.update(4);
jobRecordSizeDist.update(7);
Assert.assertEquals(jobRecordSizeDist.getCount(), 3l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMin(), 2l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMax(), 7l);
ContextAwareHistogram taskRecordSizeDist = this.childContext.contextAwareHistogram(RECORD_SIZE_DISTRIBUTION);
Assert.assertEquals(this.childContext.getHistograms().get(taskRecordSizeDist.getName()),
taskRecordSizeDist.getInnerMetric());
Assert.assertEquals(taskRecordSizeDist.getContext(), this.childContext);
Assert.assertEquals(taskRecordSizeDist.getName(), RECORD_SIZE_DISTRIBUTION);
taskRecordSizeDist.update(3);
taskRecordSizeDist.update(14);
taskRecordSizeDist.update(11);
Assert.assertEquals(taskRecordSizeDist.getCount(), 3l);
Assert.assertEquals(taskRecordSizeDist.getSnapshot().getMin(), 3l);
Assert.assertEquals(taskRecordSizeDist.getSnapshot().getMax(), 14l);
Assert.assertEquals(jobRecordSizeDist.getCount(), 6l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMin(), 2l);
Assert.assertEquals(jobRecordSizeDist.getSnapshot().getMax(), 14l);
}
@Test
public void testContextAwareTimer() {
ContextAwareTimer jobTotalDuration = this.context.contextAwareTimer(TOTAL_DURATION);
Assert.assertEquals(this.context.getTimers().get(jobTotalDuration.getName()), jobTotalDuration.getInnerMetric());
Assert.assertEquals(jobTotalDuration.getContext(), this.context);
Assert.assertEquals(jobTotalDuration.getName(), TOTAL_DURATION);
jobTotalDuration.update(50, TimeUnit.SECONDS);
jobTotalDuration.update(100, TimeUnit.SECONDS);
jobTotalDuration.update(150, TimeUnit.SECONDS);
Assert.assertEquals(jobTotalDuration.getCount(), 3l);
Assert.assertEquals(jobTotalDuration.getSnapshot().getMin(), TimeUnit.SECONDS.toNanos(50l));
Assert.assertEquals(jobTotalDuration.getSnapshot().getMax(), TimeUnit.SECONDS.toNanos(150l));
Assert.assertTrue(jobTotalDuration.time().stop() >= 0l);
}
@Test
public void testTaggableGauge() {
ContextAwareGauge<Long> queueSize = this.context.newContextAwareGauge(
QUEUE_SIZE,
new Gauge<Long>() {
@Override
public Long getValue() {
return 1000l;
}
});
this.context.register(QUEUE_SIZE, queueSize);
Assert.assertEquals(queueSize.getValue().longValue(), 1000l);
Assert.assertEquals(
this.context.getGauges().get(queueSize.getName()),
queueSize.getInnerMetric());
Assert.assertEquals(queueSize.getContext(), this.context);
Assert.assertEquals(queueSize.getName(), QUEUE_SIZE);
}
@Test(dependsOnMethods = {
"testContextAwareCounter",
"testContextAwareMeter",
"testContextAwareHistogram",
"testContextAwareTimer",
"testTaggableGauge"
})
public void testGetMetrics() {
SortedSet<String> names = this.context.getNames();
Assert.assertEquals(names.size(), 6);
Assert.assertTrue(names.contains(RECORDS_PROCESSED));
Assert.assertTrue(names.contains(RECORD_PROCESS_RATE));
Assert.assertTrue(
names.contains(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(names.contains(TOTAL_DURATION));
Assert.assertTrue(names.contains(QUEUE_SIZE));
SortedSet<String> childNames = this.childContext.getNames();
Assert.assertEquals(childNames.size(), 4);
Assert.assertTrue(
childNames.contains(RECORDS_PROCESSED));
Assert.assertTrue(
childNames.contains(RECORD_PROCESS_RATE));
Assert.assertTrue(
childNames.contains(RECORD_SIZE_DISTRIBUTION));
Map<String, Metric> metrics = this.context.getMetrics();
Assert.assertEquals(metrics.size(), 6);
Assert.assertTrue(
metrics.containsKey(RECORDS_PROCESSED));
Assert.assertTrue(
metrics.containsKey(RECORD_PROCESS_RATE));
Assert.assertTrue(
metrics.containsKey(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(metrics.containsKey(TOTAL_DURATION));
Assert.assertTrue(metrics.containsKey(QUEUE_SIZE));
Map<String, Counter> counters = this.context.getCounters();
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(
counters.containsKey(RECORDS_PROCESSED));
Map<String, Meter> meters = this.context.getMeters();
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(
meters.containsKey(RECORD_PROCESS_RATE));
Map<String, Histogram> histograms = this.context.getHistograms();
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Map<String, Timer> timers = this.context.getTimers();
Assert.assertEquals(timers.size(), 2);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
Map<String, Gauge> gauges = this.context.getGauges();
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
}
@Test(dependsOnMethods = "testGetMetrics")
@SuppressWarnings("unchecked")
public void testGetMetricsWithFilter() {
MetricFilter filter = new MetricFilter() {
@Override public boolean matches(String name, Metric metric) {
return !name.equals(MetricContext.GOBBLIN_METRICS_NOTIFICATIONS_TIMER_NAME);
}
};
Map<String, Counter> counters = this.context.getCounters(filter);
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(
counters.containsKey(RECORDS_PROCESSED));
Map<String, Meter> meters = this.context.getMeters(filter);
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(
meters.containsKey(RECORD_PROCESS_RATE));
Map<String, Histogram> histograms = this.context.getHistograms(filter);
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Map<String, Timer> timers = this.context.getTimers(filter);
Assert.assertEquals(timers.size(), 1);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
Map<String, Gauge> gauges = this.context.getGauges(filter);
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
}
@Test(dependsOnMethods = {
"testGetMetricsWithFilter"
})
public void testRemoveMetrics() {
Assert.assertTrue(this.childContext.remove(RECORDS_PROCESSED));
Assert.assertTrue(this.childContext.getCounters().isEmpty());
Assert.assertTrue(this.childContext.remove(RECORD_PROCESS_RATE));
Assert.assertTrue(this.childContext.getMeters().isEmpty());
Assert.assertTrue(this.childContext.remove(RECORD_SIZE_DISTRIBUTION));
Assert.assertTrue(this.childContext.getHistograms().isEmpty());
Assert.assertEquals(this.childContext.getNames().size(), 1);
}
@AfterClass
public void tearDown() throws IOException {
if (this.context != null) {
this.context.close();
}
}
private static class TestContextAwareScheduledReporter extends ContextAwareScheduledReporter {
protected TestContextAwareScheduledReporter(MetricContext context, String name, MetricFilter filter,
TimeUnit rateUnit, TimeUnit durationUnit) {
super(context, name, filter, rateUnit, durationUnit);
}
@Override
protected void reportInContext(MetricContext context,
SortedMap<String, Gauge> gauges,
SortedMap<String, Counter> counters,
SortedMap<String, Histogram> histograms,
SortedMap<String, Meter> meters,
SortedMap<String, Timer> timers) {
Assert.assertEquals(context.getName(), CONTEXT_NAME);
Assert.assertEquals(gauges.size(), 1);
Assert.assertTrue(gauges.containsKey(QUEUE_SIZE));
Assert.assertEquals(counters.size(), 1);
Assert.assertTrue(counters.containsKey(RECORDS_PROCESSED));
Assert.assertEquals(histograms.size(), 1);
Assert.assertTrue(
histograms.containsKey(RECORD_SIZE_DISTRIBUTION));
Assert.assertEquals(meters.size(), 1);
Assert.assertTrue(meters.containsKey(RECORD_PROCESS_RATE));
Assert.assertEquals(timers.size(), 2);
Assert.assertTrue(timers.containsKey(TOTAL_DURATION));
}
private static class TestContextAwareScheduledReporterBuilder extends Builder {
public TestContextAwareScheduledReporterBuilder(String name) {
super(name);
}
@Override
public ContextAwareScheduledReporter build(MetricContext context) {
return new MetricContextTest.TestContextAwareScheduledReporter(
context, this.name, this.filter, this.rateUnit, this.durationUnit);
}
}
}
}
| 4,364 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/TagTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link Tag}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metrics"})
public class TagTest {
private static final String JOB_ID_KEY = "job.id";
private static final String JOB_ID = "TestJob-0";
private static final String PROJECT_VERSION_KEY = "project.version";
private static final int PROJECT_VERSION = 1;
@Test
public void testTags() {
Tag<String> jobIdTag = new Tag<String>(JOB_ID_KEY, JOB_ID);
Assert.assertEquals(jobIdTag.getKey(), JOB_ID_KEY);
Assert.assertEquals(jobIdTag.getValue(), JOB_ID);
Tag<Integer> projectVersionTag = new Tag<Integer>(PROJECT_VERSION_KEY, PROJECT_VERSION);
Assert.assertEquals(projectVersionTag.getKey(), PROJECT_VERSION_KEY);
Assert.assertEquals(projectVersionTag.getValue().intValue(), PROJECT_VERSION);
}
}
| 4,365 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/OutputStreamReporterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.apache.gobblin.metrics.reporter.OutputStreamReporter;
@Test(groups = { "gobblin.metrics" })
public class OutputStreamReporterTest {
private ByteArrayOutputStream stream = new ByteArrayOutputStream();
@Test
public void testReporter() throws IOException {
MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName()).build();
Counter counter = metricContext.counter("com.linkedin.example.counter");
Meter meter = metricContext.meter("com.linkedin.example.meter");
Histogram histogram = metricContext.histogram("com.linkedin.example.histogram");
OutputStreamReporter reporter =
OutputStreamReporter.Factory.newBuilder().outputTo(this.stream).build(new Properties());
counter.inc();
meter.mark(2);
histogram.update(1);
histogram.update(1);
histogram.update(2);
reporter.report();
String[] lines = this.stream.toString().split("\n");
Map<String, Set<String>> expected = new HashMap<>();
Set<String> counterSubMetrics = new HashSet<>();
counterSubMetrics.add("count");
expected.put("com.linkedin.example.counter", counterSubMetrics);
Set<String> histogramSubMetrics = new HashSet<>();
histogramSubMetrics.add("count");
histogramSubMetrics.add("min");
histogramSubMetrics.add("max");
histogramSubMetrics.add("mean");
histogramSubMetrics.add("stddev");
histogramSubMetrics.add("median");
histogramSubMetrics.add("75% <");
histogramSubMetrics.add("95% <");
expected.put("com.linkedin.example.histogram", histogramSubMetrics);
Set<String> meterSubmetrics = new HashSet<>();
meterSubmetrics.add("count");
meterSubmetrics.add("mean rate");
meterSubmetrics.add("1-minute rate");
meterSubmetrics.add("5-minute rate");
meterSubmetrics.add("15-minute rate");
expected.put("com.linkedin.example.meter", meterSubmetrics);
expectMetrics(expected, lines);
reporter.close();
}
@Test
public void testTags() throws IOException {
MetricContext metricContext = MetricContext.builder(this.getClass().getCanonicalName()).build();
Counter counter = metricContext.counter("com.linkedin.example.counter");
Map<String, String> tags = new HashMap<>();
tags.put("testKey", "testValue");
tags.put("key2", "value2");
OutputStreamReporter reporter =
OutputStreamReporter.Factory.newBuilder().withTags(tags).outputTo(this.stream).build(new Properties());
counter.inc();
reporter.report();
Assert.assertTrue(this.stream.toString().contains("key2=value2"));
Assert.assertTrue(this.stream.toString().contains("testKey=testValue"));
String[] lines = this.stream.toString().split("\n");
Map<String, Set<String>> expected = new HashMap<>();
expectMetrics(expected, lines);
Set<String> counterSubMetrics = new HashSet<>();
counterSubMetrics.add("count");
expected.put("com.linkedin.example.counter", counterSubMetrics);
reporter.close();
}
@Test
public void testTagsFromContext() throws IOException {
Tag<?> tag1 = new Tag<>("tag1", "value1");
MetricContext context = MetricContext.builder("context").addTag(tag1).build();
Counter counter = context.counter("com.linkedin.example.counter");
OutputStreamReporter reporter =
OutputStreamReporter.Factory.newBuilder().outputTo(this.stream).build(new Properties());
counter.inc();
reporter.report();
Assert.assertTrue(this.stream.toString().contains("tag1=value1"));
String[] lines = this.stream.toString().split("\n");
Map<String, Set<String>> expected = new HashMap<>();
expectMetrics(expected, lines);
Set<String> counterSubMetrics = new HashSet<>();
counterSubMetrics.add("count");
expected.put("com.linkedin.example.counter", counterSubMetrics);
reporter.close();
}
@BeforeMethod
public void before() {
this.stream.reset();
}
private void expectMetrics(Map<String, Set<String>> metrics, String[] lines) {
Set<String> activeSet = new HashSet<>();
String activeTopLevel = "";
for (String line : lines) {
System.out.println(line);
if (line.contains("com.linkedin.example")) {
Assert.assertTrue(activeSet.isEmpty(), String.format("%s does not contain all expected submetrics. Missing: %s",
activeTopLevel, Arrays.toString(activeSet.toArray())));
activeTopLevel = line.trim();
if (metrics.containsKey(activeTopLevel)) {
activeSet = metrics.get(activeTopLevel);
metrics.remove(activeTopLevel);
} else {
activeSet = new HashSet<>();
}
} else if (line.contains("=")) {
String submetric = line.split("=")[0].trim();
activeSet.remove(submetric);
}
}
Assert.assertTrue(activeSet.isEmpty(), String.format("%s does not contain all expected submetrics. Missing: %s",
activeTopLevel, Arrays.toString(activeSet.toArray())));
Assert.assertTrue(metrics.isEmpty(),
String.format("Output does not contain all expected top level metrics. Missing: %s",
Arrays.toString(metrics.keySet().toArray())));
}
}
| 4,366 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/ContextAwareMetricFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.apache.gobblin.metrics.test.TestConstants.*;
/**
* Unit tests for {@link ContextAwareMetricFactory}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metrics"})
public class ContextAwareMetricFactoryTest {
private static final String JOB_ID_KEY = "job.id";
private static final String JOB_ID = "TestJob-0";
private MetricContext context;
private ContextAwareCounter counter;
private ContextAwareMeter meter;
private ContextAwareHistogram histogram;
private ContextAwareTimer timer;
@BeforeClass
public void setUp() {
this.context = MetricContext.builder(CONTEXT_NAME).build();
this.counter = ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_COUNTER_FACTORY.newMetric(
this.context, RECORDS_PROCESSED);
this.meter = ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_METER_FACTORY.newMetric(
this.context, RECORD_PROCESS_RATE);
this.histogram = ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_HISTOGRAM_FACTORY.newMetric(
this.context, RECORD_SIZE_DISTRIBUTION);
this.timer = ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_TIMER_FACTORY.newMetric(
this.context, TOTAL_DURATION);
}
@Test
public void testContextAwareCounterFactory() {
Assert.assertTrue(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_COUNTER_FACTORY.isInstance(this.counter));
Assert.assertFalse(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_COUNTER_FACTORY.isInstance(this.meter));
Assert.assertEquals(this.counter.getContext(), this.context);
Assert.assertEquals(this.counter.getName(), RECORDS_PROCESSED);
}
@Test
public void testContextAwareMeterFactory() {
Assert.assertTrue(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_METER_FACTORY.isInstance(this.meter));
Assert.assertFalse(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_METER_FACTORY.isInstance(this.histogram));
Assert.assertEquals(this.meter.getContext(), this.context);
Assert.assertEquals(this.meter.getName(), RECORD_PROCESS_RATE);
}
@Test
public void testContextAwareHistogramFactory() {
Assert.assertTrue(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_HISTOGRAM_FACTORY.isInstance(this.histogram));
Assert.assertFalse(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_HISTOGRAM_FACTORY.isInstance(this.timer));
Assert.assertEquals(this.histogram.getContext(), this.context);
Assert.assertEquals(this.histogram.getName(), RECORD_SIZE_DISTRIBUTION);
}
@Test
public void testContextAwareTimerFactory() {
Assert.assertTrue(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_TIMER_FACTORY.isInstance(this.timer));
Assert.assertFalse(ContextAwareMetricFactory.DEFAULT_CONTEXT_AWARE_TIMER_FACTORY.isInstance(this.counter));
Assert.assertEquals(this.timer.getContext(), this.context);
Assert.assertEquals(this.timer.getName(), TOTAL_DURATION);
}
@AfterClass
public void tearDown() throws IOException {
this.context.close();
}
}
| 4,367 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/RootMetricContextTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.lang.ref.WeakReference;
import java.util.UUID;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.base.Predicate;
import com.typesafe.config.ConfigFactory;
import lombok.AllArgsConstructor;
import org.apache.gobblin.metrics.callback.NotificationStore;
import org.apache.gobblin.metrics.notification.MetricContextCleanupNotification;
import org.apache.gobblin.metrics.notification.NewMetricContextNotification;
import org.apache.gobblin.metrics.notification.Notification;
import org.apache.gobblin.metrics.test.ContextStoreReporter;
/**
* Tests for {@link org.apache.gobblin.metrics.RootMetricContext}
*/
public class RootMetricContextTest {
private static final Cache<String, int[]> CACHE = CacheBuilder.newBuilder().softValues().build();
@BeforeMethod
public void setUp() throws Exception {
System.gc();
RootMetricContext.get().clearNotificationTargets();
}
@AfterMethod
public void tearDown() throws Exception {
CACHE.invalidateAll();
System.gc();
RootMetricContext.get().clearNotificationTargets();
}
@Test
public void testGet() throws Exception {
Assert.assertNotNull(RootMetricContext.get());
Assert.assertEquals(RootMetricContext.get(), RootMetricContext.get());
Assert.assertEquals(RootMetricContext.get().getName(), RootMetricContext.ROOT_METRIC_CONTEXT);
}
@Test
public void testReporterCanBeAddedToStartedContext() throws Exception {
RootMetricContext.get().startReporting();
ContextStoreReporter reporter = new ContextStoreReporter("testReporter", ConfigFactory.empty());
Assert.assertTrue(reporter.isStarted());
RootMetricContext.get().stopReporting();
}
@Test
public void testMetricContextLifecycle() throws Exception {
String name = UUID.randomUUID().toString();
NotificationStore store = new NotificationStore(new ContextNamePredicate(name));
RootMetricContext.get().addNotificationTarget(store);
// Create a new metric context
MetricContext metricContext = MetricContext.builder(name).build();
WeakReference<MetricContext> contextWeakReference = new WeakReference<MetricContext>(metricContext);
InnerMetricContext innerMetricContext = metricContext.getInnerMetricContext();
WeakReference<InnerMetricContext> innerMetricContextWeakReference =
new WeakReference<InnerMetricContext>(innerMetricContext);
innerMetricContext = null;
// Check that existence of a reporter does not prevent GC
ContextStoreReporter reporter = new ContextStoreReporter("testReporter", ConfigFactory.empty());
// Check that metric context is a child of root metric context
Assert.assertTrue(RootMetricContext.get().getChildContextsAsMap().containsKey(name));
Assert.assertEquals(RootMetricContext.get().getChildContextsAsMap().get(name), metricContext);
// Check that notification on new metric context was generated
Assert.assertEquals(store.getNotificationList().size(), 1);
Assert.assertEquals(store.getNotificationList().get(0).getClass(), NewMetricContextNotification.class);
Assert.assertEquals(((NewMetricContextNotification) store.getNotificationList().get(0)).getMetricContext(),
metricContext);
store.getNotificationList().clear();
// Create a counter
ContextAwareCounter counter1 = metricContext.contextAwareCounter("textCounter1");
// If losing reference of counter, should not be GCed while context is present
WeakReference<ContextAwareCounter> counterWeakReference1 = new WeakReference<ContextAwareCounter>(counter1);
counter1 = null;
ensureNotGarbageCollected(counterWeakReference1);
// Create some more metrics
ContextAwareCounter counter2 = metricContext.contextAwareCounter("testCounter");
WeakReference<ContextAwareCounter> counterWeakReference2 = new WeakReference<ContextAwareCounter>(counter2);
ContextAwareMeter meter = metricContext.contextAwareMeter("testMeter");
WeakReference<ContextAwareMeter> meterWeakReference = new WeakReference<ContextAwareMeter>(meter);
meter.mark();
ContextAwareHistogram histogram = metricContext.contextAwareHistogram("testHistogram");
WeakReference<ContextAwareHistogram> histogramWeakReference = new WeakReference<ContextAwareHistogram>(histogram);
ContextAwareTimer timer = metricContext.contextAwareTimer("testTimer");
WeakReference<ContextAwareTimer> timerWeakReference = new WeakReference<ContextAwareTimer>(timer);
// If losing reference to context, should not be GCed while reference to metric is present
metricContext = null;
ensureNotGarbageCollected(contextWeakReference);
ensureNotGarbageCollected(counterWeakReference2);
ensureNotGarbageCollected(meterWeakReference);
ensureNotGarbageCollected(timerWeakReference);
ensureNotGarbageCollected(histogramWeakReference);
// After losing reference to context and all metrics, context and all metrics should be GCed
store.getNotificationList().clear();
reporter.getReportedContexts().clear();
counter2 = null;
meter = null;
histogram = null;
timer = null;
ensureGarbageCollected(contextWeakReference);
ensureGarbageCollected(counterWeakReference1);
ensureGarbageCollected(counterWeakReference2);
ensureGarbageCollected(meterWeakReference);
ensureGarbageCollected(timerWeakReference);
ensureGarbageCollected(histogramWeakReference);
// Inner metric context should not be GCed
ensureNotGarbageCollected(innerMetricContextWeakReference);
// Notification on removal of metric context should be available
int maxWait = 10;
while(store.getNotificationList().isEmpty() && maxWait > 0) {
Thread.sleep(1000);
maxWait--;
}
Assert.assertEquals(store.getNotificationList().size(), 1);
Assert.assertEquals(store.getNotificationList().get(0).getClass(), MetricContextCleanupNotification.class);
Assert.assertEquals(((MetricContextCleanupNotification) store.getNotificationList().get(0)).getMetricContext(),
innerMetricContextWeakReference.get());
// Reporter should have attempted to report metric context
Assert.assertEquals(reporter.getReportedContexts().size(), 1);
Assert.assertEquals(reporter.getReportedContexts().get(0), innerMetricContextWeakReference.get());
// Metrics in deleted metric context should still be readable
Assert.assertEquals(innerMetricContextWeakReference.get().getCounters().size(), 2);
Assert.assertEquals(innerMetricContextWeakReference.get().getMeters().size(), 1);
Assert.assertEquals(innerMetricContextWeakReference.get().getTimers().size(), 2);
Assert.assertEquals(innerMetricContextWeakReference.get().getHistograms().size(), 1);
Assert.assertEquals(innerMetricContextWeakReference.get().getMeters().get("testMeter").getCount(), 1);
// After clearing notification, inner metric context should be GCed
store.getNotificationList().clear();
reporter.getReportedContexts().clear();
ensureGarbageCollected(innerMetricContextWeakReference);
RootMetricContext.get().removeReporter(reporter);
}
private void triggerGarbageCollection(WeakReference<?> weakReference) {
System.gc();
// System.gc() might not clean up the object being checked, so allocate some memory and call System.gc() again
for (int i = 0; weakReference.get() != null && i <= 10000; i++) {
CACHE.put(Integer.toString(i), new int[10000]);
if (i % 1000 == 0) {
System.gc();
}
}
}
private void ensureGarbageCollected(WeakReference<?> weakReference) {
triggerGarbageCollection(weakReference);
Assert.assertNull(weakReference.get());
}
private void ensureNotGarbageCollected(WeakReference<?> weakReference) {
triggerGarbageCollection(weakReference);
Assert.assertNotNull(weakReference.get());
}
@AllArgsConstructor
private class ContextNamePredicate implements Predicate<Notification> {
private final String name;
@Override
public boolean apply(Notification input) {
if (input instanceof NewMetricContextNotification &&
((NewMetricContextNotification) input).getInnerMetricContext().getName().equals(this.name)) {
return true;
}
if (input instanceof MetricContextCleanupNotification &&
((MetricContextCleanupNotification) input).getMetricContext().getName().equals(this.name)) {
return true;
}
return false;
}
}
}
| 4,368 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/TaggedTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.codahale.metrics.MetricRegistry;
/**
* Unit tests for {@link Tagged}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.metrics"})
public class TaggedTest {
private static final String JOB_ID_KEY = "job.id";
private static final String JOB_ID = "TestJob-0";
private static final String PROJECT_VERSION_KEY = "project.version";
private static final int PROJECT_VERSION = 1;
private Tagged tagged;
@BeforeClass
public void setUp() {
this.tagged = new Tagged();
}
@Test
public void testAddTags() {
this.tagged.addTag(new Tag<String>(JOB_ID_KEY, JOB_ID));
this.tagged.addTag(new Tag<Integer>(PROJECT_VERSION_KEY, PROJECT_VERSION));
}
@Test(dependsOnMethods = "testAddTags")
public void testGetTags() {
List<Tag<?>> tags = this.tagged.getTags();
Assert.assertEquals(tags.size(), 2);
Assert.assertEquals(tags.get(0).getKey(), JOB_ID_KEY);
Assert.assertEquals(tags.get(0).getValue(), JOB_ID);
Assert.assertEquals(tags.get(1).getKey(), PROJECT_VERSION_KEY);
Assert.assertEquals(tags.get(1).getValue(), PROJECT_VERSION);
}
@Test(dependsOnMethods = "testAddTags")
public void testMetricNamePrefix() {
Assert.assertEquals(
this.tagged.metricNamePrefix(false), MetricRegistry.name(JOB_ID, Integer.toString(PROJECT_VERSION)));
Assert.assertEquals(
this.tagged.metricNamePrefix(true),
MetricRegistry.name(this.tagged.getTags().get(0).toString(), this.tagged.getTags().get(1).toString()));
}
}
| 4,369 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/broker/MetricContextFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.broker;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.broker.BrokerConfigurationKeyGenerator;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.SimpleScopeType;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.Tag;
public class MetricContextFactoryTest {
@Test
public void test() throws Exception {
MetricContextFactory<SimpleScopeType> factory = new MetricContextFactory<>();
Config config = ConfigFactory.parseMap(ImmutableMap.of(
BrokerConfigurationKeyGenerator.generateKey(factory, null, null, MetricContextFactory.TAG_KEY + ".tag1"), "value1",
BrokerConfigurationKeyGenerator.generateKey(factory, null, SimpleScopeType.GLOBAL, MetricContextFactory.TAG_KEY + ".tag2"), "value2",
BrokerConfigurationKeyGenerator.generateKey(factory, null, SimpleScopeType.LOCAL, MetricContextFactory.TAG_KEY + ".tag3"), "value3"
));
SharedResourcesBroker<SimpleScopeType> rootBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
SimpleScopeType.GLOBAL.defaultScopeInstance());
SharedResourcesBroker<SimpleScopeType> localBroker = rootBroker.newSubscopedBuilder(SimpleScopeType.LOCAL.defaultScopeInstance()).build();
MetricContext localContext = localBroker.getSharedResource(factory, new MetricContextKey());
Map<String, String> tagMap = (Map<String, String>) Tag.toMap(Tag.tagValuesToString(localContext.getTags()));
Assert.assertEquals(tagMap.get("tag1"), "value1");
Assert.assertEquals(tagMap.get("tag2"), "value2");
Assert.assertEquals(tagMap.get("tag3"), "value3");
MetricContext globalContext = rootBroker.getSharedResource(factory, new MetricContextKey());
Assert.assertEquals(localContext.getParent().get(), globalContext);
tagMap = (Map<String, String>) Tag.toMap(Tag.tagValuesToString(globalContext.getTags()));
Assert.assertEquals(tagMap.get("tag1"), "value1");
Assert.assertEquals(tagMap.get("tag2"), "value2");
Assert.assertFalse(tagMap.containsKey("tag3"));
}
@Test
public void testSubTaggedMetricContext() throws Exception {
MetricContextFactory<SimpleScopeType> factory = new MetricContextFactory<>();
Config config = ConfigFactory.parseMap(ImmutableMap.of(
BrokerConfigurationKeyGenerator.generateKey(factory, null, null, MetricContextFactory.TAG_KEY + ".tag1"), "value1"
));
SharedResourcesBroker<SimpleScopeType> rootBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(config,
SimpleScopeType.GLOBAL.defaultScopeInstance());
MetricContext metricContext = rootBroker.getSharedResource(factory,
new SubTaggedMetricContextKey("myMetricContext", ImmutableMap.of("tag2", "value2")));
Map<String, String> tagMap = (Map<String, String>) Tag.toMap(Tag.tagValuesToString(metricContext.getTags()));
Assert.assertEquals(metricContext.getName(), "myMetricContext");
Assert.assertEquals(tagMap.get("tag1"), "value1");
Assert.assertEquals(tagMap.get("tag2"), "value2");
MetricContext metricContext2 = rootBroker.getSharedResource(factory,
new SubTaggedMetricContextKey("myMetricContext", ImmutableMap.of("tag2", "value2")));
Assert.assertEquals(metricContext, metricContext2);
MetricContext metricContext3 = rootBroker.getSharedResource(factory,
new SubTaggedMetricContextKey("myMetricContext", ImmutableMap.of("tag3", "value3")));
Assert.assertNotEquals(metricContext, metricContext3);
MetricContext parent = rootBroker.getSharedResource(factory, new MetricContextKey());
tagMap = (Map<String, String>) Tag.toMap(Tag.tagValuesToString(parent.getTags()));
Assert.assertEquals(metricContext.getParent().get(), parent);
Assert.assertEquals(tagMap.get("tag1"), "value1");
Assert.assertFalse(tagMap.containsKey("tag2"));
}
}
| 4,370 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/reporter/FileFailureEventReporterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.reporter;
import java.io.IOException;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.Test;
import com.google.common.collect.Maps;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.event.FailureEventBuilder;
import static org.mockito.Mockito.*;
public class FileFailureEventReporterTest {
@Test
public void testReport()
throws IOException {
MetricContext testContext = MetricContext.builder(getClass().getCanonicalName()).build();
FileSystem fs = mock(FileSystem.class);
Path failureLogPath = mock(Path.class);
FSDataOutputStream outputStream = mock(FSDataOutputStream.class);
FileFailureEventReporter reporter = new FileFailureEventReporter(testContext, fs, failureLogPath);
when(fs.exists(any())).thenReturn(true);
when(fs.append(any())).thenReturn(outputStream);
final String eventName = "testEvent";
final String eventNamespace = "testNamespace";
GobblinTrackingEvent event =
new GobblinTrackingEvent(0L, eventNamespace, eventName, Maps.newHashMap());
// Noop on normal event
testContext.submitEvent(event);
verify(fs, never()).append(failureLogPath);
verify(outputStream, never()).write(anyByte());
// Process failure event
FailureEventBuilder failureEvent = new FailureEventBuilder(eventName, eventNamespace);
failureEvent.submit(testContext);
reporter.report();
// Failure log output is setup
verify(fs, times(1)).append(failureLogPath);
// Report successfully
doAnswer( invocation -> null ).when(outputStream).write(any(byte[].class), anyInt(), anyInt());
verify(outputStream, times(1)).write(any(byte[].class), anyInt(), anyInt());
}
}
| 4,371 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/callback/NotificationStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.callback;
import lombok.Getter;
import javax.annotation.Nullable;
import java.util.List;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import org.apache.gobblin.metrics.notification.Notification;
/**
* Just stores notifications that satisfy a filter for testing.
*/
@Getter
public class NotificationStore implements Function<Notification, Void> {
private final Predicate<Notification> predicate;
private final List<Notification> notificationList = Lists.newArrayList();
public NotificationStore(Predicate<Notification> filter) {
this.predicate = filter;
}
@Nullable @Override public Void apply(Notification input) {
if(this.predicate.apply(input)) {
this.notificationList.add(input);
}
return null;
}
}
| 4,372 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/performance/MetricsUpdater.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.performance;
import lombok.Builder;
import lombok.Getter;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import com.google.common.collect.Lists;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.RootMetricContext;
/**
* A {@link Runnable} that updates a set of metrics for every call to {@link #run}.
*
* <p>
* This class will automatically generate a metric context tree and a set of metrics at the lowest level context.
* An instance is created through a builder with the following optional parameters:
* * depth: number of levels in generated {@link MetricContext} tree. Default: 0
* * context: base {@link MetricContext} used as the root of the generated tree. Default: {@link RootMetricContext}.
* * counters: number of counters to generate. Named counter0, counter1, ...
* * meters: number of meters to generate. Named meter0, meter1, ...
* * histograms: number of histograms to generate. Named histogram0, histogram1, ...
* * timers: number of timers to generate. Named timer0, timer1, ...
* </p>
*/
public class MetricsUpdater implements Runnable {
private final int depth;
@Getter
private final MetricContext context;
private final List<Counter> counters;
private final List<Meter> meters;
private final List<Histogram> histograms;
private final List<Timer> timers;
private final Random random;
@Builder
private MetricsUpdater(int depth, int counters, int meters, int histograms,
int timers, MetricContext baseContext) {
this.depth = depth;
this.random = new Random();
MetricContext tmpContext = baseContext == null ? RootMetricContext.get() : baseContext;
while(depth > 0) {
tmpContext = tmpContext.childBuilder(UUID.randomUUID().toString()).build();
depth--;
}
this.context = tmpContext;
this.counters = Lists.newArrayList();
for(int i = 0; i < counters; i++) {
this.counters.add(this.context.counter("gobblin.performance.test.counter" + i));
}
this.meters = Lists.newArrayList();
for(int i = 0; i < meters; i++) {
this.meters.add(this.context.meter("gobblin.performance.test.meter" + i));
}
this.histograms = Lists.newArrayList();
for(int i = 0; i < histograms; i++) {
this.histograms.add(this.context.histogram("gobblin.performance.test.histogram" + i));
}
this.timers = Lists.newArrayList();
for(int i = 0; i < timers; i++) {
this.timers.add(this.context.timer("gobblin.performance.test.timer" + i));
}
}
@Override public void run() {
updateCounters();
updateMeters();
updateHistograms();
updateTimers();
}
private void updateCounters() {
for(Counter counter : this.counters) {
counter.inc();
}
}
private void updateMeters() {
for(Meter meter : this.meters) {
meter.mark();
}
}
private void updateTimers() {
for(Timer timer : this.timers) {
timer.update(this.random.nextInt(1000), TimeUnit.SECONDS);
}
}
private void updateHistograms() {
for(Histogram histogram : this.histograms) {
histogram.update(this.random.nextInt(1000));
}
}
}
| 4,373 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/performance/PerformanceUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.performance;
import lombok.Builder;
import lombok.Singular;
import lombok.extern.slf4j.Slf4j;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.gobblin.util.ExecutorsUtils;
/**
* Methods to run performance tests on Gobblin Metrics.
*/
@Slf4j
public class PerformanceUtils {
/**
* Runs a set of performance tests. The method will take the cardinal product of the values of each input parameter,
* and run a performance test for each combination of paramters. At the end, it will print out the results.
*
* <p>
* All parameters (except for queries) are a set of integers, meaning that separate tests will be run for all
* the values provided. The number of queries will be identical for all tests.
* </p>
*
* @param threads Number of threads to spawn. Each thread will have an {@link Incrementer} and update metrics.
* @param depth Depth of the {@link org.apache.gobblin.metrics.MetricContext} tree.
* @param forkAtDepth If multiple threads, each thread has its own {@link org.apache.gobblin.metrics.MetricContext}. This
* parameter sets the first level in the tree where the per-thread MetricContexts branch off.
* @param counters Number of counters to generate per thread.
* @param meters Number of meters to generate per thread.
* @param histograms Number of histograms to generate per thread.
* @param timers Number of timers to generate per thread.
* @param queries Number of increments to do, divided among all threads.
* @throws Exception
*/
@Builder(buildMethodName = "run", builderMethodName = "multiTest")
public static void _multiTest(@Singular("threads") Set<Integer> threads,
@Singular("depth") Set<Integer> depth,
@Singular("forkAtDepth") Set<Integer> forkAtDepth,
@Singular("counters") Set<Integer> counters,
@Singular("meters") Set<Integer> meters,
@Singular("histograms") Set<Integer> histograms,
@Singular("timers") Set<Integer> timers,
long queries, String name) throws Exception {
if(threads.isEmpty()) {
threads = Sets.newHashSet(1);
}
if(forkAtDepth.isEmpty()) {
forkAtDepth = Sets.newHashSet(0);
}
if(depth.isEmpty()) {
depth = Sets.newHashSet(0);
}
if(counters.isEmpty()) {
counters = Sets.newHashSet(0);
}
if(meters.isEmpty()) {
meters = Sets.newHashSet(0);
}
if(histograms.isEmpty()) {
histograms = Sets.newHashSet(0);
}
if(timers.isEmpty()) {
timers = Sets.newHashSet(0);
}
if(queries == 0) {
queries = 50000000l;
}
if(Strings.isNullOrEmpty(name)) {
name = "Test";
}
Set<List<Integer>> parameters = Sets.cartesianProduct(threads, depth, forkAtDepth, counters, meters,
histograms, timers);
Comparator<List<Integer>> comparator = new Comparator<List<Integer>>() {
@Override public int compare(List<Integer> o1, List<Integer> o2) {
Iterator<Integer> it1 = o1.iterator();
Iterator<Integer> it2 = o2.iterator();
while(it1.hasNext() && it2.hasNext()) {
int compare = Integer.compare(it1.next(), it2.next());
if(compare != 0) {
return compare;
}
}
if(it1.hasNext()) {
return 1;
} else if(it2.hasNext()) {
return -1;
} else {
return 0;
}
}
};
TreeMap<List<Integer>, Double> results = Maps.newTreeMap(comparator);
for(List<Integer> p : parameters) {
Preconditions.checkArgument(p.size() == 7, "Parameter list should be of size 7.");
results.put(p, singleTest().threads(p.get(0)).depth(p.get(1)).forkAtDepth(p.get(2)).counters(p.get(3)).
meters(p.get(4)).histograms(p.get(5)).timers(p.get(6)).queries(queries).run());
}
System.out.println("===========================");
System.out.println(name);
System.out.println("===========================");
System.out.println("Threads\tDepth\tForkAtDepth\tCounters\tMeters\tHistograms\tTimers\tQPS");
for(Map.Entry<List<Integer>, Double> result : results.entrySet()) {
List<Integer> p = result.getKey();
System.out.println(String
.format("%d\t%d\t%d\t%d\t%d\t%d\t%d\t%f", p.get(0), p.get(1), p.get(2), p.get(3), p.get(4), p.get(5),
p.get(6), result.getValue()));
}
}
/**
* Runs a single performance test. Creates a {@link org.apache.gobblin.metrics.MetricContext} tree, spawns a number of threads,
* uses and {@link Incrementer} to update the metrics repeatedly, then determines the achieved QPS in number
* of iterations of {@link Incrementer} per second.
*
* @param threads Number of threads to spawn. Each thread will have an {@link Incrementer} and update metrics.
* @param depth Depth of the {@link org.apache.gobblin.metrics.MetricContext} tree.
* @param forkAtDepth If multiple threads, each thread has its own {@link org.apache.gobblin.metrics.MetricContext}. This
* parameter sets the first level in the tree where the per-thread MetricContexts branch off.
* @param counters Number of counters to generate per thread.
* @param meters Number of meters to generate per thread.
* @param histograms Number of histograms to generate per thread.
* @param timers Number of timers to generate per thread.
* @param queries Number of increments to do, divided among all threads.
* @return total QPS achieved (e.g. total increments per second in the {@link Incrementer}s)
* @throws Exception
*/
@Builder(buildMethodName = "run", builderMethodName = "singleTest")
public static double _singleTest(int threads, int depth, int forkAtDepth, int counters, int meters, int histograms,
int timers, long queries) throws Exception {
System.gc();
ExecutorService executorService = ExecutorsUtils.loggingDecorator(Executors.newFixedThreadPool(threads));
if(queries == 0) {
queries = 50000000l;
}
long queriesPerThread = queries / threads;
long actualQueries = queriesPerThread * threads;
MetricsUpdater commonUpdater = MetricsUpdater.builder().depth(forkAtDepth).build();
List<Incrementer> incrementerList = Lists.newArrayList();
while(incrementerList.size() < threads) {
final MetricsUpdater metricsUpdater = MetricsUpdater.builder().baseContext(commonUpdater.getContext()).
depth(depth - forkAtDepth).counters(counters).meters(meters).histograms(histograms).timers(timers).build();
incrementerList.add(new Incrementer(queriesPerThread, new Runnable() {
@Override public void run() {
metricsUpdater.run();
}
}));
}
List<Future<Long>> incrementerFutures = Lists.newArrayList();
for(Incrementer incrementer : incrementerList) {
incrementerFutures.add(executorService.submit(incrementer));
}
long totalTime = 0;
for(Future<Long> future : incrementerFutures) {
totalTime += future.get();
}
double averageTime = (double) totalTime / threads;
double qps = 1000 * (double)actualQueries / averageTime;
log.info(String.format("Average qps: %f.", qps));
executorService.shutdown();
return qps;
}
}
| 4,374 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/performance/MetricsPerformanceTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.performance;
import org.testng.annotations.Test;
import com.google.common.collect.Sets;
/**
* Class for running Gobblin metrics performance tests.
*/
@Test(groups = { "performance" })
public class MetricsPerformanceTest {
public void counterPerformance() throws Exception {
PerformanceUtils.multiTest().name("CounterPerformance").threads(1).depth(Sets.newHashSet(0, 1, 2, 3)).
forkAtDepth(0).counters(Sets.newHashSet(0, 1, 2, 3)).run();
}
public void meterPerformance() throws Exception {
PerformanceUtils.multiTest().name("MeterPerformance").threads(1).depth(Sets.newHashSet(0, 1, 2, 3)).
forkAtDepth(0).meters(Sets.newHashSet(0, 1, 2, 3)).queries(20000000l).run();
}
public void histogramPerformance() throws Exception {
PerformanceUtils.multiTest().name("HistogramPerformance").threads(1).depth(Sets.newHashSet(0, 1, 2, 3)).
forkAtDepth(0).histograms(Sets.newHashSet(0, 1, 2, 3)).queries(10000000l).run();
}
public void timerPerformance() throws Exception {
PerformanceUtils.multiTest().name("TimerPerformance").threads(1).depth(Sets.newHashSet(0, 1, 2, 3)).
forkAtDepth(0).timers(Sets.newHashSet(0, 1, 2, 3)).queries(10000000l).run();
}
public void parallelizationTest() throws Exception {
PerformanceUtils.multiTest().name("ParallelizationTest").threads(Sets.newHashSet(1, 2, 3, 4, 5, 6, 7, 8)).
forkAtDepth(Sets.newHashSet(0, 3)).depth(4).counters(1).run();
}
public void forkLevelPerformance() throws Exception {
PerformanceUtils.multiTest().name("ForkLevelPerformance").threads(8).depth(4).counters(1).
forkAtDepth(Sets.newHashSet(0, 1, 2, 3)).run();
}
}
| 4,375 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/performance/Incrementer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.performance;
import lombok.extern.slf4j.Slf4j;
import java.util.concurrent.Callable;
/**
* {@link Callable} that keeps incrementing a count up to a max_count. At each iteration, it calls a custom
* callback.
*/
@Slf4j
public class Incrementer implements Callable<Long> {
private final long max_count;
private final Runnable runnable;
private final long logInterval;
/**
* @param max_count number of iterations of increment to run in call() method.
* @param runnable the {@link Runnable#run} method will be called for each iteration.
*/
public Incrementer(long max_count, Runnable runnable) {
this.max_count = max_count;
this.runnable = runnable;
this.logInterval = max_count / 10;
}
@Override public Long call() throws Exception {
long count = 0;
long startTime = System.currentTimeMillis();
long nextLog = this.logInterval;
while(count < this.max_count) {
if(count >= nextLog) {
log.info(String.format("Incremented %d of %d times.", count, this.max_count));
nextLog += this.logInterval;
}
onIteration();
count++;
}
long endTime = System.currentTimeMillis();
return endTime - startTime;
}
protected void onIteration() {
this.runnable.run();
}
}
| 4,376 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric/filter/MetricTypeFilterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.metric.filter;
import static org.mockito.Mockito.mock;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Meter;
import com.codahale.metrics.Timer;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link MetricTypeFilter}.
*/
@Test
public class MetricTypeFilterTest {
@Test
public void matchesTest() {
Counter counter = mock(Counter.class);
Gauge gauge = mock(Gauge.class);
Histogram histogram = mock(Histogram.class);
Meter meter = mock(Meter.class);
Timer timer = mock(Timer.class);
MetricTypeFilter noMetricTypeFilter = new MetricTypeFilter(null);
Assert.assertTrue(noMetricTypeFilter.matches("", counter));
Assert.assertTrue(noMetricTypeFilter.matches("", gauge));
Assert.assertTrue(noMetricTypeFilter.matches("", histogram));
Assert.assertTrue(noMetricTypeFilter.matches("", meter));
Assert.assertTrue(noMetricTypeFilter.matches("", timer));
MetricTypeFilter counterMetricTypeFilter = new MetricTypeFilter("COUNTER");
Assert.assertTrue(counterMetricTypeFilter.matches("", counter));
Assert.assertFalse(counterMetricTypeFilter.matches("", gauge));
Assert.assertFalse(counterMetricTypeFilter.matches("", histogram));
Assert.assertFalse(counterMetricTypeFilter.matches("", meter));
Assert.assertFalse(counterMetricTypeFilter.matches("", timer));
MetricTypeFilter allMetricTypeFilter = new MetricTypeFilter("COUNTER,GAUGE,HISTOGRAM,METER,TIMER");
Assert.assertTrue(allMetricTypeFilter.matches("", counter));
Assert.assertTrue(allMetricTypeFilter.matches("", gauge));
Assert.assertTrue(allMetricTypeFilter.matches("", histogram));
Assert.assertTrue(allMetricTypeFilter.matches("", meter));
Assert.assertTrue(allMetricTypeFilter.matches("", timer));
}
}
| 4,377 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric/filter/MetricNameRegexFilterTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.metric.filter;
import static org.mockito.Mockito.mock;
import com.codahale.metrics.Metric;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link MetricNameRegexFilter}.
*/
@Test
public class MetricNameRegexFilterTest {
@Test
public void matchesTest() {
MetricNameRegexFilter metricNameRegexFilter1 = new MetricNameRegexFilter(".*");
Assert.assertTrue(metricNameRegexFilter1.matches("test1", mock(Metric.class)));
Assert.assertTrue(metricNameRegexFilter1.matches("test2", mock(Metric.class)));
Assert.assertTrue(metricNameRegexFilter1.matches("test3", mock(Metric.class)));
MetricNameRegexFilter metricNameRegexFilter2 = new MetricNameRegexFilter("test1");
Assert.assertTrue(metricNameRegexFilter2.matches("test1", mock(Metric.class)));
Assert.assertFalse(metricNameRegexFilter2.matches("test2", mock(Metric.class)));
Assert.assertFalse(metricNameRegexFilter2.matches("test3", mock(Metric.class)));
}
}
| 4,378 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/metric/filter/MetricFiltersTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.metric.filter;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricFilter;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Tests for {@link MetricFilters}.
*/
@Test
public class MetricFiltersTest {
@Test
public void andTest() {
MetricFilter trueMetricFilter = mock(MetricFilter.class);
when(trueMetricFilter.matches(any(String.class), any(Metric.class))).thenReturn(true);
MetricFilter falseMetricFilter = mock(MetricFilter.class);
when(falseMetricFilter.matches(any(String.class), any(Metric.class))).thenReturn(false);
Assert.assertTrue(MetricFilters.and(trueMetricFilter, trueMetricFilter).matches("", mock(Metric.class)));
Assert.assertFalse(MetricFilters.and(trueMetricFilter, falseMetricFilter).matches("", mock(Metric.class)));
Assert.assertFalse(MetricFilters.and(falseMetricFilter, trueMetricFilter).matches("", mock(Metric.class)));
Assert.assertFalse(MetricFilters.and(falseMetricFilter, falseMetricFilter).matches("", mock(Metric.class)));
}
}
| 4,379 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/event/CountEventBuilderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.event;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import javax.annotation.Nullable;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.notification.EventNotification;
import org.apache.gobblin.metrics.notification.Notification;
public class CountEventBuilderTest {
@Test
public void test() {
String name = "TestName";
int count = 10;
MetricContext context = new MetricContext.Builder("name").build();
CountEventBuilder countEventBuilder = new CountEventBuilder(name, count);
context.addNotificationTarget(new com.google.common.base.Function<Notification, Void>() {
@Nullable
@Override
public Void apply(@Nullable Notification input) {
if (input instanceof EventNotification) {
GobblinTrackingEvent event = ((EventNotification) input).getEvent();
Map<String, String> metadata = event.getMetadata();
Assert.assertEquals(metadata.containsKey(GobblinEventBuilder.EVENT_TYPE), true);
Assert.assertEquals(metadata.containsKey(CountEventBuilder.COUNT_KEY), true);
Assert.assertEquals(metadata.get(GobblinEventBuilder.EVENT_TYPE), CountEventBuilder.COUNT_EVENT_TYPE);
Assert.assertEquals(event.getName(), name);
Assert.assertEquals(event.getNamespace(), GobblinEventBuilder.NAMESPACE);
Assert.assertEquals(Integer.parseInt(metadata.get(CountEventBuilder.COUNT_KEY)), count);
}
return null;
}
});
EventSubmitter.submit(context, countEventBuilder);
}
@Test
public void fromEventTest() {
String name = "TestName";
int count = 10;
CountEventBuilder countEventBuilder = new CountEventBuilder(name, count);
GobblinTrackingEvent event = countEventBuilder.build();
//Count Event
CountEventBuilder builderFromEvent = CountEventBuilder.fromEvent(event);
Assert.assertEquals(CountEventBuilder.isCountEvent(event), true);
Assert.assertNotNull(builderFromEvent);
Assert.assertEquals(builderFromEvent.getName(), name);
Assert.assertEquals(builderFromEvent.getCount(), count);
// General Event
event = new GobblinTrackingEvent();
countEventBuilder = CountEventBuilder.fromEvent(event);
Assert.assertEquals(CountEventBuilder.isCountEvent(event), false);
Assert.assertEquals(countEventBuilder, null);
}
}
| 4,380 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/event/TimingEventTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.event;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import javax.annotation.Nullable;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.notification.EventNotification;
import org.apache.gobblin.metrics.notification.Notification;
public class TimingEventTest {
@Test
public void test() {
String name = "TestName";
String namepace = "TestNamespace";
MetricContext context = new MetricContext.Builder("name").build();
context.addNotificationTarget(new com.google.common.base.Function<Notification, Void>() {
@Nullable
@Override
public Void apply(@Nullable Notification input) {
if (input instanceof EventNotification) {
GobblinTrackingEvent event = ((EventNotification) input).getEvent();
Map<String, String> metadata = event.getMetadata();
Assert.assertEquals(event.getNamespace(), namepace);
Assert.assertEquals(metadata.containsKey(GobblinEventBuilder.EVENT_TYPE), true);
Assert.assertEquals(metadata.containsKey(TimingEvent.METADATA_START_TIME), true);
Assert.assertEquals(metadata.containsKey(TimingEvent.METADATA_END_TIME), true);
Assert.assertEquals(metadata.containsKey(TimingEvent.METADATA_DURATION), true);
Assert.assertEquals(metadata.get(GobblinEventBuilder.EVENT_TYPE), TimingEvent.METADATA_TIMING_EVENT);
Assert.assertEquals(event.getName(), name);
}
return null;
}
});
TimingEvent timingEvent = new TimingEvent(new EventSubmitter.Builder(context, namepace).build(), name);
timingEvent.close();
}
@Test
public void fromEventTest() {
String name = "TestName";
String namepace = "TestNamespace";
MetricContext context = new MetricContext.Builder("name").build();
TimingEvent timingEventBuilder = new TimingEvent(new EventSubmitter.Builder(context, namepace).build(), name);
GobblinTrackingEvent event = timingEventBuilder.build();
timingEventBuilder.close();
//Timing Event
TimingEvent builderFromEvent = TimingEvent.fromEvent(event);
Assert.assertEquals(TimingEvent.isTimingEvent(event), true);
Assert.assertNotNull(builderFromEvent);
Assert.assertEquals(builderFromEvent.getName(), name);
Assert.assertTrue(builderFromEvent.getStartTime() <= System.currentTimeMillis());
Assert.assertTrue(builderFromEvent.getEndTime() >= builderFromEvent.getStartTime());
Assert.assertTrue(builderFromEvent.getDuration() >= 0);
// General Event
event = new GobblinTrackingEvent();
timingEventBuilder = TimingEvent.fromEvent(event);
Assert.assertEquals(TimingEvent.isTimingEvent(event), false);
Assert.assertEquals(timingEventBuilder, null);
}
}
| 4,381 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/event/GobblinEventTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.event;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
/**
* Test all {@link GobblinEventBuilder}s
*/
public class GobblinEventTest {
@Test
public void testJobStateEvent() {
// Test build JobStateEvent
/// build without status
String jobUrl = "jobUrl";
JobStateEventBuilder eventBuilder = new JobStateEventBuilder(JobStateEventBuilder.MRJobState.MR_JOB_STATE);
eventBuilder.jobTrackingURL = jobUrl;
GobblinTrackingEvent event = eventBuilder.build();
Assert.assertEquals(event.getName(), "MRJobState");
Assert.assertNull(event.getNamespace());
Map<String, String> metadata = event.getMetadata();
Assert.assertEquals(metadata.size(), 2);
Assert.assertEquals(metadata.get("eventType"), "JobStateEvent");
Assert.assertEquals(metadata.get("jobTrackingURL"), jobUrl);
Assert.assertNull(metadata.get("jobState"));
/// build with status
eventBuilder.status = JobStateEventBuilder.Status.FAILED;
event = eventBuilder.build();
metadata = event.getMetadata();
Assert.assertEquals(metadata.size(), 3);
Assert.assertEquals(metadata.get("jobState"), "FAILED");
// Test parse from GobblinTrackingEvent
JobStateEventBuilder parsedEvent = JobStateEventBuilder.fromEvent(event);
Assert.assertEquals(parsedEvent.status, JobStateEventBuilder.Status.FAILED);
Assert.assertEquals(parsedEvent.jobTrackingURL, jobUrl);
Assert.assertEquals(parsedEvent.getMetadata().size(), 1);
}
@Test
public void testEntityMissingEvent() {
// Test build EntityMissingEvent
String instance = "mytopic";
String eventClass = "TopicMissing";
EntityMissingEventBuilder eventBuilder = new EntityMissingEventBuilder(eventClass, instance);
GobblinTrackingEvent event = eventBuilder.build();
Assert.assertEquals(event.getName(), eventClass);
Assert.assertNull(event.getNamespace());
Map<String, String> metadata = event.getMetadata();
Assert.assertEquals(metadata.size(), 2);
Assert.assertEquals(metadata.get("eventType"), "EntityMissingEvent");
Assert.assertEquals(metadata.get("entityInstance"), instance);
// Test parse from GobblinTrackingEvent
Assert.assertNull(JobStateEventBuilder.fromEvent(event));
EntityMissingEventBuilder parsedEvent = EntityMissingEventBuilder.fromEvent(event);
Assert.assertEquals(parsedEvent.getName(), eventClass);
Assert.assertEquals(parsedEvent.getInstance(), instance);
Assert.assertEquals(parsedEvent.getMetadata().size(), 1);
}
}
| 4,382 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/event | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/metrics/event/lineage/LineageEventTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics.event.lineage;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.gobblin.broker.SharedResourcesBrokerFactory;
import org.apache.gobblin.broker.gobblin_scopes.GobblinScopeTypes;
import org.apache.gobblin.broker.gobblin_scopes.JobScopeInstance;
import org.apache.gobblin.broker.gobblin_scopes.TaskScopeInstance;
import org.apache.gobblin.broker.iface.SharedResourcesBroker;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.dataset.DatasetConstants;
import org.apache.gobblin.dataset.DatasetDescriptor;
import org.apache.gobblin.dataset.Descriptor;
import org.apache.gobblin.dataset.PartitionDescriptor;
import org.apache.gobblin.metrics.GobblinTrackingEvent;
import org.apache.gobblin.metrics.event.GobblinEventBuilder;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.typesafe.config.ConfigFactory;
/**
* Test for loading linage events from state
*/
public class LineageEventTest {
@Test
public void testEvent() {
final String topic = "testTopic";
final String kafka = "kafka";
final String hdfs = "hdfs";
final String mysql = "mysql";
final String branch = "branch";
State state0 = new State();
LineageInfo lineageInfo = getLineageInfo();
DatasetDescriptor source = new DatasetDescriptor(kafka, topic);
lineageInfo.setSource(source, state0);
DatasetDescriptor destination00 = new DatasetDescriptor(hdfs, "/data/tracking");
destination00.addMetadata(branch, "0");
lineageInfo.putDestination(destination00, 0, state0);
DatasetDescriptor destination01 = new DatasetDescriptor(mysql, "kafka.testTopic");
destination01.addMetadata(branch, "1");
lineageInfo.putDestination(destination01, 1, state0);
Map<String, Set<LineageEventBuilder>> events = LineageInfo.load(state0);
verify(first(events.get("0")), topic, source, destination00);
verify(first(events.get("1")), topic, source, destination01);
State state1 = new State();
lineageInfo.setSource(source, state1);
List<State> states = Lists.newArrayList();
states.add(state0);
states.add(state1);
// Test only full fledged lineage events are loaded
Collection<LineageEventBuilder> eventsList = LineageInfo.load(states);
Assert.assertTrue(eventsList.size() == 2);
Assert.assertEquals(getLineageEvent(eventsList, 0, hdfs), first(events.get("0")));
Assert.assertEquals(getLineageEvent(eventsList, 1, mysql), first(events.get("1")));
// There are 3 full fledged lineage events
DatasetDescriptor destination12 = new DatasetDescriptor(mysql, "kafka.testTopic2");
destination12.addMetadata(branch, "2");
lineageInfo.putDestination(destination12, 2, state1);
eventsList = LineageInfo.load(states);
Assert.assertTrue(eventsList.size() == 3);
Assert.assertEquals(getLineageEvent(eventsList, 0, hdfs), first(events.get("0")));
Assert.assertEquals(getLineageEvent(eventsList, 1, mysql), first(events.get("1")));
verify(getLineageEvent(eventsList, 2, mysql), topic, source, destination12);
// There 5 lineage events put, but only 4 unique lineage events
DatasetDescriptor destination10 = destination12;
lineageInfo.putDestination(destination10, 0, state1);
DatasetDescriptor destination11 = new DatasetDescriptor("hive", "kafka.testTopic1");
destination11.addMetadata(branch, "1");
lineageInfo.putDestination(destination11, 1, state1);
eventsList = LineageInfo.load(states);
Assert.assertTrue(eventsList.size() == 4);
Assert.assertEquals(getLineageEvent(eventsList, 0, hdfs), first(events.get("0")));
Assert.assertEquals(getLineageEvent(eventsList, 1, mysql), first(events.get("1")));
// Either branch 0 or 2 of state 1 is selected
LineageEventBuilder event12 = getLineageEvent(eventsList, 0, mysql);
if (event12 == null) {
event12 = getLineageEvent(eventsList, 2, mysql);
}
verify(event12, topic, source, destination12);
verify(getLineageEvent(eventsList, 1, "hive"), topic, source, destination11);
}
@Test
public void testEventForPartitionedDataset() {
final String topic = "testTopic";
final String kafka = "kafka";
final String hdfs = "hdfs";
final String path = "/data/tracking/PageViewEvent";
final String partitionName = "hourly/2018/08/15/15";
State state = new State();
LineageInfo lineageInfo = getLineageInfo();
DatasetDescriptor source = new DatasetDescriptor(kafka, topic);
lineageInfo.setSource(source, state);
DatasetDescriptor destinationDataset = new DatasetDescriptor(hdfs, path);
PartitionDescriptor destination = new PartitionDescriptor(partitionName, destinationDataset);
lineageInfo.putDestination(destination, 0, state);
Map<String, Set<LineageEventBuilder>> events = LineageInfo.load(state);
LineageEventBuilder event = first(events.get("0"));
verify(event, topic, source, destination);
// Verify gobblin tracking event
GobblinTrackingEvent trackingEvent = event.build();
Assert.assertEquals(LineageEventBuilder.isLineageEvent(trackingEvent), true);
Assert.assertEquals(LineageEventBuilder.fromEvent(trackingEvent), event);
}
@Test
public void testMultiPuts() {
final String topic = "testTopic";
final String kafka = "kafka";
final String hdfs = "hdfs";
final String path = "/data/tracking/PageViewEvent";
final String partitionName = "hourly/2018/08/15/15";
State state = new State();
LineageInfo lineageInfo = getLineageInfo();
DatasetDescriptor source = new DatasetDescriptor(kafka, topic);
lineageInfo.setSource(source, state);
DatasetDescriptor destinationDataset = new DatasetDescriptor(hdfs, path);
PartitionDescriptor destination = new PartitionDescriptor(partitionName, destinationDataset);
lineageInfo.putDestination(Lists.newArrayList(destination), 0, state);
// Put another destination
DatasetDescriptor destinationDataset2 = new DatasetDescriptor(kafka, "nextTopic");
lineageInfo.putDestination(Lists.newArrayList(destinationDataset2), 0, state);
Map<String, Set<LineageEventBuilder>> eventsMap = LineageInfo.load(state);
Assert.assertEquals(eventsMap.size(), 1);
Set<LineageEventBuilder> events = eventsMap.get("0");
Assert.assertEquals(events.size(), 2);
verifyOne(events, topic, source, destination);
verifyOne(events, topic, source, destinationDataset2);
}
private LineageEventBuilder getLineageEvent(Collection<LineageEventBuilder> events, int branchId, String destinationPlatform) {
for (LineageEventBuilder event : events) {
DatasetDescriptor descriptor = (DatasetDescriptor) event.getDestination();
if (descriptor.getPlatform().equals(destinationPlatform) &&
descriptor.getMetadata().get(DatasetConstants.BRANCH).equals(String.valueOf(branchId))) {
return event;
}
}
return null;
}
private LineageInfo getLineageInfo() {
SharedResourcesBroker<GobblinScopeTypes> instanceBroker = SharedResourcesBrokerFactory
.createDefaultTopLevelBroker(ConfigFactory.empty(), GobblinScopeTypes.GLOBAL.defaultScopeInstance());
SharedResourcesBroker<GobblinScopeTypes> jobBroker = instanceBroker
.newSubscopedBuilder(new JobScopeInstance("LineageEventTest", String.valueOf(System.currentTimeMillis())))
.build();
SharedResourcesBroker<GobblinScopeTypes> taskBroker = jobBroker
.newSubscopedBuilder(new TaskScopeInstance("LineageEventTestTask" + String.valueOf(System.currentTimeMillis())))
.build();
LineageInfo obj1 = LineageInfo.getLineageInfo(jobBroker).get();
LineageInfo obj2 = LineageInfo.getLineageInfo(taskBroker).get();
Assert.assertTrue(obj1 == obj2);
return obj2;
}
private void verify(LineageEventBuilder event, String name, Descriptor source, Descriptor destination) {
Assert.assertEquals(event.getName(), name);
Assert.assertEquals(event.getNamespace(), LineageEventBuilder.LIENAGE_EVENT_NAMESPACE);
Assert.assertEquals(event.getMetadata().get(GobblinEventBuilder.EVENT_TYPE), LineageEventBuilder.LINEAGE_EVENT_TYPE);
Assert.assertTrue(event.getSource().equals(source));
Assert.assertTrue(event.getDestination().equals(destination));
}
private <T> T first(Collection<T> collection) {
return collection.iterator().next();
}
private void verifyOne(Collection<LineageEventBuilder> collection, String name, Descriptor source, Descriptor destination) {
for (LineageEventBuilder event : collection) {
if (event.getDestination().equals(destination)) {
verify(event, name, source, destination);
return;
}
}
Assert.fail("Could not find a matching lineage with destination: " + destination);
}
}
| 4,383 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/test/java/org/apache/gobblin/filesystem/MetricsFileSystemInstrumentationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.filesystem;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.UUID;
import lombok.Getter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.testng.Assert;
import org.testng.annotations.Test;
public class MetricsFileSystemInstrumentationTest {
private final String originalURI = "hdfs://localhost:9000";
private final String instrumentedURI = "instrumented-hdfs://localhost:9000";
public class HDFSRoot {
@Getter
private FileSystem fs;
@Getter
private Path rootPath;
@Getter
private Path filePath1, filePath2, filePath3, filePath4, filePath5, filePath6, filePath7, filePath8;
@Getter
private Path dirPath1, dirPath2, dirPath3, dirPath4, dirPath5, dirPath6;
// /tmp -> root -> file1
// -> file2
// -> dir1
// -> dir2.ext -> file3
// -> file4
// -> dir3 -> file5.ext
// -> file6.ext
// -> dir4 -> dir5
// -> dir6 -> file7.ext
// -> file8.ext
public HDFSRoot(String root) throws IOException, URISyntaxException {
this.fs = FileSystem.get(new URI(originalURI), new Configuration());
this.rootPath = new Path(root);
fs.delete(rootPath, true);
fs.mkdirs(rootPath);
// Test absolute paths
String file1 = "file1";
filePath1 = new Path(root, file1);
fs.createNewFile(filePath1);
String file2 = "file2";
filePath2 = new Path(root, file2);
fs.createNewFile(filePath2);
String dir1 = "dir1";
dirPath1 = new Path(root, dir1);
fs.mkdirs(dirPath1);
String dir2 = "dir2";
dirPath2 = new Path(root, dir2 + ".ext");
fs.mkdirs(dirPath2);
String dir3 = "dir3";
dirPath3 = new Path(root, dir3);
fs.mkdirs(dirPath3);
String file3 = "file3";
filePath3 = new Path(dirPath2, file3);
fs.createNewFile(filePath3);
String file4 = "file4";
filePath4 = new Path(dirPath2, file4);
fs.createNewFile(filePath4);
String file5 = "file5";
filePath5 = new Path(dirPath3, file5 + ".ext");
fs.createNewFile(filePath5);
String file6 = "file6";
filePath6 = new Path(dirPath3, file6 + ".ext");
fs.createNewFile(filePath6);
String dir4 = "dir4";
dirPath4 = new Path(dirPath3, dir4);
fs.mkdirs(dirPath4);
String dir5 = "dir5";
dirPath5 = new Path(dirPath4, dir5);
fs.mkdirs(dirPath5);
String dir6 = "dir6";
dirPath6 = new Path(dirPath4, dir6);
fs.mkdirs(dirPath6);
String file7 = "file7";
filePath7 = new Path(dirPath6, file7 + ".ext");
fs.createNewFile(filePath7);
String file8 = "file8";
filePath8 = new Path(root, file8 + ".ext");
fs.createNewFile(filePath8);
}
public void cleanupRoot() throws IOException {
fs.delete(rootPath, true);
}
}
/**
* This test is disabled because it requires a local hdfs cluster at localhost:8020, which requires installation and setup.
* Changes to {@link MetricsFileSystemInstrumentation} should be followed by a manual run of this tests.
*
* TODO: figure out how to fully automate this test.
* @throws Exception
*/
@Test(enabled = false)
public void test() throws Exception {
String uri = "instrumented-hdfs://localhost:9000";
FileSystem fs = FileSystem.get(new URI(uri), new Configuration());
String name = UUID.randomUUID().toString();
fs.mkdirs(new Path("/tmp"));
// Test absolute paths
Path absolutePath = new Path("/tmp", name);
Assert.assertFalse(fs.exists(absolutePath));
fs.createNewFile(absolutePath);
Assert.assertTrue(fs.exists(absolutePath));
Assert.assertEquals(fs.getFileStatus(absolutePath).getLen(), 0);
fs.delete(absolutePath, false);
Assert.assertFalse(fs.exists(absolutePath));
// Test fully qualified paths
Path fqPath = new Path(uri + "/tmp", name);
Assert.assertFalse(fs.exists(fqPath));
fs.createNewFile(fqPath);
Assert.assertTrue(fs.exists(fqPath));
Assert.assertEquals(fs.getFileStatus(fqPath).getLen(), 0);
fs.delete(fqPath, false);
Assert.assertFalse(fs.exists(fqPath));
}
@Test(enabled = false)
public void testListStatusPath() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPath");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path rootPath = hdfsRoot.getRootPath();
FileStatus[] status = fs.listStatus(rootPath);
Assert.assertEquals(fs.listStatusTimer.getCount(), 1);
Assert.assertEquals(status.length, 6);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testListStatusPathError() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPathError");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
try {
fs.listStatus(new Path("/tmp/nonexistence"));
} catch (Exception e) {
// stop search when a non-existed directory was encountered, the visit of non-existed path is sill considered as one visit.
Assert.assertEquals(fs.listStatusTimer.getCount(), 1);
} finally {
hdfsRoot.cleanupRoot();
}
}
@Test(enabled = false)
public void testListStatusPaths() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPaths");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path[] paths = {hdfsRoot.filePath2, hdfsRoot.dirPath2};
FileStatus[] status = fs.listStatus(paths);
Assert.assertEquals(fs.listStatusTimer.getCount(), 2);
Assert.assertEquals(status.length, 3);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testListStatusPathsError() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPathsError");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
try {
Path[] paths = {hdfsRoot.dirPath2, new Path("/tmp/nonexistence"), hdfsRoot.filePath2};
fs.listStatus(paths);
} catch (Exception e) {
// stop search when a non-existed directory was encountered, the visit of non-existed path is sill considered as one visit.
Assert.assertEquals(fs.listStatusTimer.getCount(), 2);
} finally {
hdfsRoot.cleanupRoot();
}
}
@Test(enabled = false)
public void testListStatusPathWithFilter() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPathWithFilter");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FileStatus[] status = fs.listStatus(hdfsRoot.getDirPath3(), new PathFilter() {
@Override
public boolean accept(Path path) {
return path.toString().endsWith(".ext");
}
});
Assert.assertEquals(fs.listStatusTimer.getCount(), 1);
Assert.assertEquals(status.length, 2);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testListStatusPathsWithFilter() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListStatusPathsWithFilter");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path[] paths = {hdfsRoot.filePath2, hdfsRoot.dirPath2, hdfsRoot.dirPath3};
FileStatus[] status = fs.listStatus(paths, new PathFilter() {
@Override
public boolean accept(Path path) {
return path.toString().endsWith(".ext");
}
});
Assert.assertEquals(fs.listStatusTimer.getCount(), 3);
Assert.assertEquals(status.length, 2);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testListFiles() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/ListFiles");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
fs.listFiles(hdfsRoot.getRootPath(), true);
Assert.assertEquals(fs.listFilesTimer.getCount(), 1);
Assert.assertEquals(fs.listStatusTimer.getCount(), 0);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testGlobStatus() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/GlobStatus");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FileStatus[] status = fs.globStatus(new Path("/tmp/GlobStatus/*/*.ext"));
Assert.assertEquals(fs.globStatusTimer.getCount(), 1);
Assert.assertEquals(status.length, 2);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testGlobStatusWithFilter() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/GlobStatusWithFilter");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FileStatus[] status = fs.globStatus(new Path("/tmp/GlobStatusWithFilter/*/*"), new PathFilter() {
@Override
public boolean accept(Path path) {
return path.toString().endsWith(".ext");
}
});
Assert.assertEquals(fs.globStatusTimer.getCount(), 1);
Assert.assertEquals(status.length, 2);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testMakeDirWithPermission() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/mkdirWithPermission");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newDir = new Path (hdfsRoot.getRootPath(), new Path("X"));
fs.mkdirs(newDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ));
Assert.assertEquals(fs.mkdirTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testMakeDirs() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/mkdirs");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newDir = new Path (hdfsRoot.getRootPath(), new Path("X/Y/Z"));
fs.mkdirs(newDir);
Assert.assertEquals(fs.mkdirTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testMakeDirsWithPermission() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/mkdirsWithPermission");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newDir = new Path (hdfsRoot.getRootPath(), new Path("X/Y/Z"));
fs.mkdirs(newDir, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ));
Assert.assertEquals(fs.mkdirTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testDelete() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/delete");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
fs.delete(hdfsRoot.getDirPath3(), true);
Assert.assertEquals(fs.deleteTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testRename() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/rename");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newDir = new Path("/tmp/rename/AfterRename");
fs.rename(hdfsRoot.getDirPath3(), newDir);
Assert.assertEquals(fs.renameTimer.getCount(), 1);
Assert.assertFalse(fs.exists(hdfsRoot.getDirPath3()));
Assert.assertTrue(fs.exists(newDir));
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate1() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate2() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, true);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate3() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, true, 300);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate4() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, true, 300, null);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate5() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, true, 300, (short)1, 1048576);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate6() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, true, 300, (short)1, 1048576, null);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate7() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ);
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, permission, true, 100, (short)2, 1048576, null);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate8() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, (short)2);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate9() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, (short)2, null);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testCreate10() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/create");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path("/tmp/create/newFile");
FSDataOutputStream fstream = fs.create(newFile, null);
Assert.assertEquals(fs.createTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testOpen1() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/Open");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path(hdfsRoot.getRootPath(), new Path("file8.ext"));
FSDataInputStream fstream = fs.open(newFile);
Assert.assertEquals(fs.openTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testOpen2() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/Open");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path(hdfsRoot.getRootPath(), new Path("file8.ext"));
FSDataInputStream fstream = fs.open(newFile, 100);
Assert.assertEquals(fs.openTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testSetOwner() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/setOwner");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
Path newFile = new Path(hdfsRoot.getRootPath(), new Path("file8.ext"));
fs.setOwner(newFile, "someone", "linkedin");
Assert.assertEquals(fs.setOwnerTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testGetFileStatus() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/getFileStatus");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
fs.getFileStatus(hdfsRoot.getFilePath8());
Assert.assertEquals(fs.getFileStatusTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testSetPermission() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/permission");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FsPermission permission = new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.READ);
fs.setPermission(hdfsRoot.getFilePath8(), permission);
Assert.assertEquals(fs.setPermissionTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testSetTimes() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/setTimes");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
fs.setTimes(hdfsRoot.getFilePath8(), System.currentTimeMillis(), System.currentTimeMillis());
Assert.assertEquals(fs.setTimesTimer.getCount(), 1);
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testAppend1() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/append");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FSDataOutputStream fstream = fs.append(hdfsRoot.getFilePath8());
Assert.assertEquals(fs.appendTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testAppend2() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/append");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FSDataOutputStream fstream = fs.append(hdfsRoot.getFilePath8(), 100);
Assert.assertEquals(fs.appendTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
@Test(enabled = false)
public void testAppend3() throws IOException, URISyntaxException {
HDFSRoot hdfsRoot = new HDFSRoot("/tmp/append");
MetricsFileSystemInstrumentation
fs = (MetricsFileSystemInstrumentation) FileSystem.get(new URI(instrumentedURI), new Configuration());
FSDataOutputStream fstream = fs.append(hdfsRoot.getFilePath8(), 100, null);
Assert.assertEquals(fs.appendTimer.getCount(), 1);
fstream.close();
hdfsRoot.cleanupRoot();
}
}
| 4,384 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ContextAwareGauge.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import lombok.experimental.Delegate;
import com.codahale.metrics.Gauge;
import org.apache.gobblin.metrics.metric.InnerMetric;
/**
* A type of {@link com.codahale.metrics.Gauge}s that that are aware of their {@link MetricContext}
* and can have associated {@link Tag}s.
*
* <p>
* This class wraps a {@link com.codahale.metrics.Gauge} and delegates calls to public methods of
* {@link com.codahale.metrics.Gauge} to the wrapped {@link com.codahale.metrics.Gauge}.
* </p>
*
* @param <T> the type of the {@link ContextAwareGauge}'s value
*
* @author Yinan Li
*/
public class ContextAwareGauge<T> implements Gauge<T>, ContextAwareMetric {
@Delegate
private final InnerGauge<T> innerGauge;
private final MetricContext context;
ContextAwareGauge(MetricContext context, String name, Gauge<T> gauge) {
this.innerGauge = new InnerGauge<T>(context, name, gauge, this);
this.context = context;
}
@Override
public MetricContext getContext() {
return this.context;
}
@Override public InnerMetric getInnerMetric() {
return this.innerGauge;
}
}
| 4,385 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ContextAwareMetricFactoryArgs.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.concurrent.TimeUnit;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* A class which wraps all arguments required by {@link ContextAwareMetricFactory}s.
*
* A concrete {@link ContextAwareMetricFactory} knows how to interpret this class into its corresponding sub-type.
*/
@AllArgsConstructor
@Getter
public class ContextAwareMetricFactoryArgs {
protected final MetricContext context;
protected final String name;
@Getter
public static class SlidingTimeWindowArgs extends ContextAwareMetricFactoryArgs {
protected final long windowSize;
protected final TimeUnit unit;
public SlidingTimeWindowArgs(MetricContext context, String name, long windowSize, TimeUnit unit) {
super(context, name);
this.windowSize = windowSize;
this.unit = unit;
}
}
} | 4,386 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/InnerTimer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.lang.ref.WeakReference;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.SlidingTimeWindowReservoir;
import com.codahale.metrics.Timer;
import com.google.common.base.Optional;
import org.apache.gobblin.metrics.metric.InnerMetric;
/**
* Implementation of {@link InnerMetric} for {@link Timer}.
*/
public class InnerTimer extends Timer implements InnerMetric {
private final String name;
private final Optional<ContextAwareTimer> parentTimer;
private final WeakReference<ContextAwareTimer> timer;
InnerTimer(MetricContext context, String name, ContextAwareTimer contextAwareTimer) {
this.name = name;
Optional<MetricContext> parentContext = context.getParent();
if (parentContext.isPresent()) {
this.parentTimer = Optional.fromNullable(parentContext.get().contextAwareTimer(name));
} else {
this.parentTimer = Optional.absent();
}
this.timer = new WeakReference<>(contextAwareTimer);
}
InnerTimer(MetricContext context, String name, ContextAwareTimer contextAwareTimer, long windowSize, TimeUnit unit) {
super(new SlidingTimeWindowReservoir(windowSize, unit));
this.name = name;
Optional<MetricContext> parentContext = context.getParent();
if (parentContext.isPresent()) {
this.parentTimer = Optional.fromNullable(parentContext.get().contextAwareTimer(name, windowSize, unit));
} else {
this.parentTimer = Optional.absent();
}
this.timer = new WeakReference<>(contextAwareTimer);
}
@Override
public void update(long duration, TimeUnit unit) {
super.update(duration, unit);
if (this.parentTimer.isPresent()) {
this.parentTimer.get().update(duration, unit);
}
}
public String getName() {
return this.name;
}
@Override
public ContextAwareMetric getContextAwareMetric() {
return this.timer.get();
}
}
| 4,387 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ContextAwareCounter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import com.codahale.metrics.Counter;
import org.apache.gobblin.metrics.metric.InnerMetric;
import org.apache.gobblin.metrics.metric.ProxyMetric;
import lombok.experimental.Delegate;
/**
* A type of {@link Counter}s that are aware of their {@link MetricContext} and can have associated
* {@link Tag}s.
*
* <p>
* Any updates to a {@link ContextAwareCounter} will be applied automatically to the
* {@link ContextAwareCounter} of the same name in the parent {@link MetricContext}.
* </p>
*
* <p>
* This class wraps a {@link com.codahale.metrics.Counter} and delegates calls to public methods of
* {@link com.codahale.metrics.Counter} to the wrapped {@link com.codahale.metrics.Counter}.
* </p>
*
* @author Yinan Li
*/
public class ContextAwareCounter extends Counter implements ProxyMetric, ContextAwareMetric {
private final MetricContext metricContext;
@Delegate
private final InnerCounter innerCounter;
ContextAwareCounter(MetricContext context, String name) {
this.innerCounter = new InnerCounter(context, name, this);
this.metricContext = context;
}
@Override
public MetricContext getContext() {
return this.metricContext;
}
@Override public InnerMetric getInnerMetric() {
return this.innerCounter;
}
}
| 4,388 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ReporterType.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
public enum ReporterType {
METRIC, EVENT, METRIC_EVENT, CUSTOM;
public static boolean isReporterTypeMetric(ReporterType t) {
return t.equals(ReporterType.METRIC) || t.equals(ReporterType.METRIC_EVENT);
}
public static boolean isReporterTypeEvent(ReporterType t) {
return t.equals(ReporterType.EVENT) || t.equals(ReporterType.METRIC_EVENT);
}
} | 4,389 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/MetricTagNames.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
public class MetricTagNames {
public static final String METRIC_BACKEND_REPRESENTATION = "metricBackendRepresentation";
}
| 4,390 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ContextAwareHistogram.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.concurrent.TimeUnit;
import lombok.experimental.Delegate;
import com.codahale.metrics.ExponentiallyDecayingReservoir;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.SlidingTimeWindowReservoir;
import org.apache.gobblin.metrics.metric.InnerMetric;
/**
* A type of {@link com.codahale.metrics.Histogram} that are aware of their {@link org.apache.gobblin.metrics.MetricContext}
* and can have associated {@link Tag}s.
*
* <p>
* Any updates to a {@link ContextAwareHistogram} will be applied automatically to the
* {@link ContextAwareHistogram} of the same name in the parent {@link MetricContext}.
* </p>
*
* <p>
* This class wraps a {@link com.codahale.metrics.Histogram} and delegates calls to public methods of
* {@link com.codahale.metrics.Histogram} to the wrapped {@link com.codahale.metrics.Histogram}.
* </p>
*
* @author Yinan Li
*/
public class ContextAwareHistogram extends Histogram implements ContextAwareMetric {
@Delegate
private final InnerHistogram innerHistogram;
private final MetricContext context;
ContextAwareHistogram(MetricContext context, String name) {
super(new ExponentiallyDecayingReservoir());
this.innerHistogram = new InnerHistogram(context, name, this);
this.context = context;
}
ContextAwareHistogram(MetricContext context, String name, long windowSize, TimeUnit unit) {
super(new SlidingTimeWindowReservoir(windowSize, unit));
this.innerHistogram = new InnerHistogram(context, name, this, windowSize, unit);
this.context = context;
}
@Override
public MetricContext getContext() {
return this.context;
}
@Override public InnerMetric getInnerMetric() {
return this.innerHistogram;
}
}
| 4,391 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/InnerMeter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.lang.ref.WeakReference;
import com.codahale.metrics.Meter;
import com.google.common.base.Optional;
import org.apache.gobblin.metrics.metric.InnerMetric;
/**
* Implementation of {@link InnerMetric} for {@link Meter}.
*/
public class InnerMeter extends Meter implements InnerMetric {
private final String name;
private final Optional<ContextAwareMeter> parentMeter;
private final WeakReference<ContextAwareMeter> contextAwareMeter;
InnerMeter(MetricContext context, String name, ContextAwareMeter contextAwareMeter) {
this.name = name;
Optional<MetricContext> parentContext = context.getParent();
if (parentContext.isPresent()) {
this.parentMeter = Optional.fromNullable(parentContext.get().contextAwareMeter(name));
} else {
this.parentMeter = Optional.absent();
}
this.contextAwareMeter = new WeakReference<>(contextAwareMeter);
}
@Override
public void mark(long n) {
super.mark(n);
if (this.parentMeter.isPresent()) {
this.parentMeter.get().mark(n);
}
}
public String getName() {
return this.name;
}
@Override public ContextAwareMetric getContextAwareMetric() {
return this.contextAwareMeter.get();
}
}
| 4,392 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/Tag.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.AbstractMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
/**
* A class representing a dimension or property associated with a {@link Taggable}.
*
* @param <T> type of the tag value
*
* @author Yinan Li
*/
public class Tag<T> extends AbstractMap.SimpleEntry<String, T> {
private static final long serialVersionUID = -5083709915031933607L;
private static final char KEY_VALUE_SEPARATOR = ':';
/**
* Reverse of Tag.toString(). Parses a string of the form "key:value" into a {@link Tag}.
*
* <p>
* If there are multiple ":" in the input string, the key will be the substring up to the first ":", and the
* value will be the substring after the first ":".
* </p>
*
* @param tagKeyValue String of the form "key:value".
* @return {@link org.apache.gobblin.metrics.Tag} parsed from input.
*/
public static Tag<String> fromString(String tagKeyValue) {
List<String> splitKeyValue = Splitter.on(KEY_VALUE_SEPARATOR).limit(2).omitEmptyStrings().splitToList(tagKeyValue);
if(splitKeyValue.size() == 2) {
return new Tag<>(splitKeyValue.get(0), splitKeyValue.get(1));
} else {
return null;
}
}
public Tag(String key, T value) {
super(key, value);
}
public Tag(Map.Entry<? extends String, ? extends T> entry) {
super(entry);
}
/**
* Converts a {@link Map} of key, value pairs to a {@link List} of {@link Tag}s. Each key, value pair will be used to
* create a new {@link Tag}.
*
* @param tagsMap a {@link Map} of key, value pairs that should be converted into {@link Tag}s
*
* @return a {@link List} of {@link Tag}s
*/
public static <T> List<Tag<T>> fromMap(Map<? extends String, T> tagsMap) {
ImmutableList.Builder<Tag<T>> tagsBuilder = ImmutableList.builder();
for (Map.Entry<? extends String, T> entry : tagsMap.entrySet()) {
tagsBuilder.add(new Tag<>(entry));
}
return tagsBuilder.build();
}
/**
* Converts a {@link List} of {@link Tag}s to a {@link Map} of key, value pairs.
*
* @param tags a {@link List} of {@link Tag}s that should be converted to key, value pairs
*
* @return a {@link Map} of key, value pairs
*/
public static <T> Map<? extends String, T> toMap(List<Tag<T>> tags) {
ImmutableMap.Builder<String, T> tagsMapBuilder = ImmutableMap.builder();
for (Tag<T> tag : tags) {
tagsMapBuilder.put(tag.getKey(), tag.getValue());
}
return tagsMapBuilder.build();
}
/**
* Converts a {@link List} of wildcard {@link Tag}s to a {@link List} of {@link String} {@link Tag}s.
*
* @param tags a {@link List} of {@link Tag}s that should be converted to {@link Tag}s with value of type {@link String}
*
* @return a {@link List} of {@link Tag}s
*
* @see #tagValueToString(Tag)
*/
public static List<Tag<String>> tagValuesToString(List<? extends Tag<?>> tags) {
return Lists.transform(tags, new Function<Tag<?>, Tag<String>>() {
@Nullable @Override public Tag<String> apply(Tag<?> input) {
return input == null ? null : Tag.tagValueToString(input);
}
});
}
/**
* Converts a wildcard {@link Tag} to a {@link String} {@link Tag}. This method uses the {@link Object#toString()}
* method to convert the wildcard type to a {@link String}.
*
* @param tag a {@link Tag} that should be converted to a {@link Tag} with value of type {@link String}
*
* @return a {@link Tag} with a {@link String} value
*/
public static Tag<String> tagValueToString(Tag<?> tag) {
return new Tag<>(tag.getKey(), tag.getValue().toString());
}
@Override
public String toString() {
return getKey() + KEY_VALUE_SEPARATOR + getValue();
}
}
| 4,393 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/Counters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.Arrays;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
/**
* Class that holds {@link Counter}s for each value of an enum. The metric generated with have the name of the enum.
*
* @see Counters#initialize(MetricContext, Class, Class)
* @param <E>
*/
public class Counters<E extends Enum<E>> {
private ImmutableMap<E, Counter> counters;
/**
* Creates a {@link Counter} for every value of the enumClass.
* Use {@link #inc(Enum, long)} to increment the counter associated with a enum value
*
* @param metricContext that {@link Counter}s will be registered
* @param enumClass that define the names of {@link Counter}s. One counter is created per value
* @param instrumentedClass name that will be prefixed in the metric name
*/
public void initialize(final MetricContext metricContext, final Class<E> enumClass, final Class<?> instrumentedClass) {
Builder<E, Counter> builder = ImmutableMap.builder();
for (E e : Arrays.asList(enumClass.getEnumConstants())) {
builder.put(e, metricContext.counter(MetricRegistry.name(instrumentedClass, e.name())));
}
counters = builder.build();
}
/**
* Increment the counter associated with enum value passed.
*
* @param e Counter to increment.
* @param n the value to increment
*/
public void inc(E e, long n) {
if (counters != null && counters.containsKey(e)) {
counters.get(e).inc(n);
}
}
/**
* Get count for counter associated with enum value passed.
* @param e Counter to query.
* @return the count for this counter.
*/
public long getCount(E e) {
if (counters.containsKey(e)) {
return counters.get(e).getCount();
} else {
return 0l;
}
}
}
| 4,394 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/ConsoleEventReporterFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.IOException;
import java.util.Properties;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import org.apache.gobblin.metrics.reporter.OutputStreamEventReporter;
/**
* A reporter factory to report event to console.
*
* <p>
* Set metrics.reporting.custom.builders=org.apache.gobblin.metrics.ConsoleEventReporterFactory to report event to console
* </p>
*/
public class ConsoleEventReporterFactory implements CustomCodahaleReporterFactory {
@Override
public ScheduledReporter newScheduledReporter(MetricRegistry registry, Properties properties) throws IOException {
try {
return OutputStreamEventReporter.forContext(MetricContext.class.cast(registry)).build();
} catch (ClassCastException cce) {
throw new IOException(cce);
}
}
}
| 4,395 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/CustomCodahaleReporterFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.io.IOException;
import java.util.Properties;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
/**
* BuilderFactory for custom {@link ScheduledReporter}. Implementations should have a parameter-less constructor.
*/
public interface CustomCodahaleReporterFactory {
/**
* Builds and returns a new {@link ScheduledReporter}.
*
* @param registry {@link MetricRegistry} for which metrics should be reported.
* @param properties {@link Properties} used to build the reporter.
* @return new {@link ScheduledReporter}.
*/
public ScheduledReporter newScheduledReporter(MetricRegistry registry, Properties properties) throws IOException;
}
| 4,396 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/RootMetricContext.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Optional;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.MoreExecutors;
import org.apache.gobblin.metrics.context.ContextWeakReference;
import org.apache.gobblin.metrics.context.NameConflictException;
import org.apache.gobblin.metrics.notification.MetricContextCleanupNotification;
import org.apache.gobblin.metrics.notification.NewMetricContextNotification;
import org.apache.gobblin.metrics.reporter.ContextAwareReporter;
import org.apache.gobblin.util.ExecutorsUtils;
/**
* Special singleton {@link MetricContext} used as the root of the {@link MetricContext} tree. This is the only
* {@link MetricContext} that is allowed to not have a parent. Any {@link MetricContext} that does not explicitly
* have a parent will automatically become a child of the {@link RootMetricContext}.
*/
@Slf4j
public class RootMetricContext extends MetricContext {
public static final String ROOT_METRIC_CONTEXT = "RootMetricContext";
@Getter
private final ReferenceQueue<MetricContext> referenceQueue;
private final Set<InnerMetricContext> innerMetricContexts;
private final ScheduledExecutorService referenceQueueExecutorService;
@Getter
private final Set<ContextAwareReporter> reporters;
private volatile boolean reportingStarted;
private RootMetricContext(List<Tag<?>> tags) throws NameConflictException {
super(ROOT_METRIC_CONTEXT, null, tags, true);
this.innerMetricContexts = Sets.newConcurrentHashSet();
this.referenceQueue = new ReferenceQueue<>();
this.referenceQueueExecutorService = ExecutorsUtils.loggingDecorator(MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1,
ExecutorsUtils.newThreadFactory(Optional.of(log), Optional.of("GobblinMetrics-ReferenceQueue")))));
this.referenceQueueExecutorService.scheduleWithFixedDelay(new CheckReferenceQueue(), 0, 2, TimeUnit.SECONDS);
this.reporters = Sets.newConcurrentHashSet();
this.reportingStarted = false;
addShutdownHook();
}
private static void initialize(List<Tag<?>> tags) {
try {
INSTANCE = new RootMetricContext(tags);
} catch (NameConflictException nce) {
// Should never happen, as there is no parent, so no conflict.
throw new IllegalStateException("Failed to generate root metric context. This is an error in the code.", nce);
}
}
/**
* Get the singleton {@link RootMetricContext}.
* @return singleton instance of {@link RootMetricContext}.
*/
public synchronized static RootMetricContext get() {
return get(Lists.<Tag<?>>newArrayList());
}
/**
* Get the singleton {@link RootMetricContext}, adding the specified tags if and only if this is the first call.
* @return singleton instance of {@link RootMetricContext}.
*/
public synchronized static RootMetricContext get(List<Tag<?>> tags) {
if (INSTANCE == null) {
initialize(tags);
}
return INSTANCE;
}
private static RootMetricContext INSTANCE;
/**
* Checks the {@link ReferenceQueue} to find any {@link MetricContext}s that have been garbage collected, and sends a
* {@link MetricContextCleanupNotification} to all targets.
*/
private class CheckReferenceQueue implements Runnable {
@Override
public void run() {
Reference<? extends MetricContext> reference;
while((reference = referenceQueue.poll()) != null) {
ContextWeakReference contextReference = (ContextWeakReference)reference;
sendNotification(new MetricContextCleanupNotification(contextReference.getInnerContext()));
innerMetricContexts.remove(contextReference.getInnerContext());
}
}
}
/**
* Add a new {@link ContextAwareReporter} to the {@link RootMetricContext} for it to manage.
* @param reporter {@link ContextAwareReporter} to manage.
*/
public void addNewReporter(ContextAwareReporter reporter) {
this.reporters.add(this.closer.register(reporter));
if (this.reportingStarted) {
reporter.start();
}
}
/**
* Remove {@link ContextAwareReporter} from the set of managed reporters.
* @param reporter {@link ContextAwareReporter} to remove.
*/
public void removeReporter(ContextAwareReporter reporter) {
if (this.reporters.contains(reporter)) {
reporter.stop();
this.reporters.remove(reporter);
}
}
/**
* Start all {@link ContextAwareReporter}s managed by the {@link RootMetricContext}.
*/
public void startReporting() {
this.reportingStarted = true;
for (ContextAwareReporter reporter : this.reporters) {
try {
reporter.start();
} catch (Throwable throwable) {
log.error(String.format("Failed to start reporter with class %s", reporter.getClass().getCanonicalName()),
throwable);
}
}
}
/**
* Stop all {@link ContextAwareReporter}s managed by the {@link RootMetricContext}.
*/
public void stopReporting() {
this.reportingStarted = false;
for (ContextAwareReporter reporter : this.reporters) {
try {
reporter.stop();
} catch (Throwable throwable) {
log.error(String.format("Failed to stop reporter with class %s", reporter.getClass().getCanonicalName()),
throwable);
}
}
}
protected void addMetricContext(MetricContext context) {
this.innerMetricContexts.add(context.getInnerMetricContext());
this.sendNotification(new NewMetricContextNotification(context, context.getInnerMetricContext()));
}
/**
* Add a shutwon hook that first invokes {@link #stopReporting()} and then closes the {@link RootMetricContext}. This
* ensures all reporting started on the {@link RootMetricContext} stops properly and any resources obtained by the
* {@link RootMetricContext} are released.
*/
private void addShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
stopReporting();
try {
close();
} catch (IOException e) {
log.warn("Unable to close " + this.getClass().getCanonicalName(), e);
}
}
});
}
}
| 4,397 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/MetricContextUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.concurrent.atomic.AtomicLong;
import com.codahale.metrics.Gauge;
import com.google.common.util.concurrent.AtomicDouble;
public class MetricContextUtils {
private MetricContextUtils() {
}
public static void registerGauge(MetricContext metricContext, String metricName, AtomicLong atomicLong) {
Gauge<Long> gauge = metricContext.newContextAwareGauge(metricName, () -> atomicLong.get());
metricContext.register(metricName, gauge);
}
public static void registerGauge(MetricContext metricContext, String metricName, AtomicDouble atomicDouble) {
Gauge<Double> gauge = metricContext.newContextAwareGauge(metricName, () -> atomicDouble.get());
metricContext.register(metricName, gauge);
}
} | 4,398 |
0 | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/TagBasedMetricFilter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.metrics;
import java.util.Collection;
import com.codahale.metrics.Metric;
import com.codahale.metrics.MetricFilter;
import com.google.common.collect.ImmutableSet;
/**
* A type of {@link com.codahale.metrics.MetricFilter}s that matches {@link com.codahale.metrics.Metric}s
* of type {@link Taggable} that have some given {@link Tag}s.
*
* @author Yinan Li
*/
public class TagBasedMetricFilter implements MetricFilter {
private final Collection<Tag<?>> tags;
public TagBasedMetricFilter(Collection<Tag<?>> tags) {
this.tags = ImmutableSet.copyOf(tags);
}
@Override
public boolean matches(String name, Metric metric) {
return metric instanceof Taggable && ((Taggable) metric).getTags().containsAll(this.tags);
}
}
| 4,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.