index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin/admin/AdminWebServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.admin;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AbstractIdleService;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.Properties;
/**
* Serves the admin UI interface using embedded Jetty.
*/
public class AdminWebServer extends AbstractIdleService {
private static final Logger LOGGER = LoggerFactory.getLogger(AdminWebServer.class);
private final URI restServerUri;
private final URI serverUri;
private final String hideJobsWithoutTasksByDefault;
private final long refreshInterval;
protected Server server;
public AdminWebServer(Properties properties, URI restServerUri) {
Preconditions.checkNotNull(properties);
Preconditions.checkNotNull(restServerUri);
this.restServerUri = restServerUri;
int port = getPort(properties);
this.serverUri = URI.create(String.format("http://%s:%d", getHost(properties), port));
this.hideJobsWithoutTasksByDefault = properties.getProperty(
ConfigurationKeys.ADMIN_SERVER_HIDE_JOBS_WITHOUT_TASKS_BY_DEFAULT_KEY,
ConfigurationKeys.DEFAULT_ADMIN_SERVER_HIDE_JOBS_WITHOUT_TASKS_BY_DEFAULT);
this.refreshInterval = getRefreshInterval(properties);
}
@Override
protected void startUp() throws Exception {
LOGGER.info("Starting the admin web server");
this.server = new Server(new InetSocketAddress(this.serverUri.getHost(), this.serverUri.getPort()));
HandlerCollection handlerCollection = new HandlerCollection();
handlerCollection.addHandler(buildSettingsHandler());
handlerCollection.addHandler(buildStaticResourceHandler());
this.server.setHandler(handlerCollection);
this.server.start();
}
private Handler buildSettingsHandler() {
final String responseTemplate = "var Gobblin = window.Gobblin || {};" + "Gobblin.settings = {restServerUrl:\"%s\", hideJobsWithoutTasksByDefault:%s, refreshInterval:%s}";
return new AbstractHandler() {
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
throws IOException, ServletException {
if (request.getRequestURI().equals("/js/settings.js")) {
response.setContentType("application/javascript");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println(String.format(responseTemplate, AdminWebServer.this.restServerUri.toString(),
AdminWebServer.this.hideJobsWithoutTasksByDefault, AdminWebServer.this.refreshInterval));
baseRequest.setHandled(true);
}
}
};
}
private ResourceHandler buildStaticResourceHandler() {
ResourceHandler staticResourceHandler = new ResourceHandler();
staticResourceHandler.setDirectoriesListed(true);
staticResourceHandler.setWelcomeFiles(new String[] { "index.html" });
String staticDir = getClass().getClassLoader().getResource("static").toExternalForm();
staticResourceHandler.setResourceBase(staticDir);
return staticResourceHandler;
}
@Override
protected void shutDown() throws Exception {
if (this.server != null) {
this.server.stop();
}
}
private static int getPort(Properties properties) {
return Integer.parseInt(
properties.getProperty(ConfigurationKeys.ADMIN_SERVER_PORT_KEY, ConfigurationKeys.DEFAULT_ADMIN_SERVER_PORT));
}
private static String getHost(Properties properties) {
return properties.getProperty(ConfigurationKeys.ADMIN_SERVER_HOST_KEY, ConfigurationKeys.DEFAULT_ADMIN_SERVER_HOST);
}
private static long getRefreshInterval(Properties properties) {
return Long.parseLong(
properties.getProperty(ConfigurationKeys.ADMIN_SERVER_REFRESH_INTERVAL_KEY,
"" + ConfigurationKeys.DEFAULT_ADMIN_SERVER_REFRESH_INTERVAL));
}
}
| 2,700 |
0 | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin/cli/CliTablePrinter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.cli;
import java.util.List;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
/**
* A format helper for CLI output. Unfortunately it only supports strings, so
* values need to be converted previous to passing in. This is done in order to
* support table-like formatting.
* <p/>
* It's recommended that this class is built using the inner {@link Builder} class.
*
* @author ahollenbach@nerdwallet.com
*/
public class CliTablePrinter {
/**
* Labels for each columns
*/
private List<String> labels;
/**
* A list of sprintf-style flag strings (corresponding to each column)
*/
private List<String> flags;
/**
* Overall indentation of a table
*/
private int indentation;
/**
* Number of spaces to place between columns
*/
private int delimiterWidth;
/**
* Table of data to print
*/
private List<List<String>> data;
/**
* The row format (generated by the constructor).
*/
private String rowFormat;
public CliTablePrinter(List<String> labels, List<String> flags, int indentation, int delimiterWidth,
List<List<String>> data) {
Preconditions.checkArgument(data.size() > 0);
Preconditions.checkArgument(data.get(0).size() > 0);
if (labels != null) {
Preconditions.checkArgument(data.get(0).size() == labels.size());
}
if (flags != null) {
Preconditions.checkArgument(data.get(0).size() == flags.size());
}
this.labels = labels;
this.flags = flags;
this.indentation = indentation;
this.delimiterWidth = delimiterWidth;
this.data = data;
this.rowFormat = getRowFormat(getColumnMaxWidths());
}
/**
* Used to build a {@link CliTablePrinter} object.
*/
public static final class Builder {
private List<String> labels;
private List<String> flags;
private int indentation;
private int delimiterWidth;
private List<List<String>> data;
public Builder() {
// Set defaults
this.delimiterWidth = 1;
}
public Builder labels(List<String> labels) {
this.labels = labels;
return this;
}
public Builder data(List<List<String>> data) {
this.data = data;
return this;
}
public Builder indentation(int indentation) {
this.indentation = indentation;
return this;
}
public Builder delimiterWidth(int delimiterWidth) {
this.delimiterWidth = delimiterWidth;
return this;
}
public Builder flags(List<String> flags) {
this.flags = flags;
return this;
}
public CliTablePrinter build() {
return new CliTablePrinter(this.labels, this.flags, this.indentation, this.delimiterWidth, this.data);
}
}
/**
* Prints the table of data
*/
public void printTable() {
if (this.labels != null) {
System.out.printf(this.rowFormat, this.labels.toArray());
}
for (List<String> row : this.data) {
System.out.printf(this.rowFormat, row.toArray());
}
}
/**
* A function for determining the max widths of columns, accounting for labels and data.
*
* @return An array of maximum widths for the strings in each column
*/
private List<Integer> getColumnMaxWidths() {
int numCols = data.get(0).size();
int[] widths = new int[numCols];
if (this.labels != null) {
for (int i=0; i<numCols; i++) {
widths[i] = this.labels.get(i).length();
}
}
for (List<String> row : this.data) {
for (int i=0;i<row.size(); i++) {
if (row.get(i) == null) {
widths[i] = Math.max(widths[i], 4);
} else {
widths[i] = Math.max(widths[i], row.get(i).length());
}
}
}
return Ints.asList(widths);
}
/**
* Generates a simple row format string given a set of widths
*
* @param widths A list of widths for each column in the table
* @return A row format for each row in the table
*/
private String getRowFormat(List<Integer> widths) {
StringBuilder rowFormat = new StringBuilder(spaces(this.indentation));
for (int i=0; i< widths.size(); i++) {
rowFormat.append("%");
rowFormat.append(this.flags != null ? this.flags.get(i) : "");
rowFormat.append(widths.get(i).toString());
rowFormat.append("s");
rowFormat.append(spaces(this.delimiterWidth));
}
rowFormat.append("\n");
return rowFormat.toString();
}
private static String spaces(int numSpaces) {
StringBuilder sb = new StringBuilder();
for (int i=0; i<numSpaces; i++) {
sb.append(" ");
}
return sb.toString();
}
}
| 2,701 |
0 | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin/cli/AdminClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.cli;
import com.google.common.base.Optional;
import com.google.common.io.Closer;
import com.linkedin.r2.RemoteInvocationException;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.rest.*;
import java.io.IOException;
import java.net.URI;
import java.util.Collections;
import java.util.List;
/**
* Simple wrapper around the JobExecutionInfoClient
*/
public class AdminClient {
private final JobExecutionInfoClient client;
private Closer closer;
/**
* Creates a new client with the host and port specified.
*/
public AdminClient(String host, int port) {
this.closer = Closer.create();
URI serverUri = URI.create(String.format("http://%s:%d/", host, port));
this.client = new JobExecutionInfoClient(serverUri.toString());
this.closer.register(this.client);
}
/**
* Close connections to the REST server
*/
public void close() {
try {
this.closer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Retrieve a Gobblin job by its id.
*
* @param id Id of the job to retrieve
* @return JobExecutionInfo representing the job
*/
public Optional<JobExecutionInfo> queryByJobId(String id) throws RemoteInvocationException {
JobExecutionQuery query = new JobExecutionQuery();
query.setIdType(QueryIdTypeEnum.JOB_ID);
query.setId(JobExecutionQuery.Id.create(id));
query.setLimit(1);
List<JobExecutionInfo> results = executeQuery(query);
return getFirstFromQueryResults(results);
}
/**
* Retrieve all jobs
*
* @param lookupType Query type
* @return List of all jobs (limited by results limit)
*/
public List<JobExecutionInfo> queryAllJobs(QueryListType lookupType, int resultsLimit)
throws RemoteInvocationException {
JobExecutionQuery query = new JobExecutionQuery();
query.setIdType(QueryIdTypeEnum.LIST_TYPE);
query.setId(JobExecutionQuery.Id.create(lookupType));
// Disable properties and task executions (prevents response size from ballooning)
query.setJobProperties(ConfigurationKeys.JOB_RUN_ONCE_KEY + "," + ConfigurationKeys.JOB_SCHEDULE_KEY);
query.setIncludeTaskExecutions(false);
query.setLimit(resultsLimit);
return executeQuery(query);
}
/**
* Query jobs by name
*
* @param name Name of the job to query for
* @param resultsLimit Max # of results to return
* @return List of jobs with the name (empty list if none can be found)
*/
public List<JobExecutionInfo> queryByJobName(String name, int resultsLimit) throws RemoteInvocationException {
JobExecutionQuery query = new JobExecutionQuery();
query.setIdType(QueryIdTypeEnum.JOB_NAME);
query.setId(JobExecutionQuery.Id.create(name));
query.setIncludeTaskExecutions(false);
query.setLimit(resultsLimit);
return executeQuery(query);
}
/**
* Execute a query and coerce the result into a java List
* @param query Query to execute
* @return List of jobs that matched the query. (Empty list if none did).
* @throws RemoteInvocationException If the server throws an error
*/
private List<JobExecutionInfo> executeQuery(JobExecutionQuery query) throws RemoteInvocationException {
JobExecutionQueryResult result = this.client.get(query);
if (result != null && result.hasJobExecutions()) {
return result.getJobExecutions();
}
return Collections.emptyList();
}
private static Optional<JobExecutionInfo> getFirstFromQueryResults(List<JobExecutionInfo> results) {
if (results == null || results.size() == 0) {
return Optional.absent();
}
return Optional.of(results.get(0));
}
}
| 2,702 |
0 | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin/cli/JobCommand.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.cli;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.linkedin.r2.RemoteInvocationException;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.rest.JobExecutionInfo;
import org.apache.gobblin.rest.QueryListType;
import org.apache.gobblin.runtime.cli.CliApplication;
/**
* Logic to print out job state
*/
@Slf4j
@Alias(value = "jobs", description = "Command line job info and operations")
public class JobCommand implements CliApplication {
private Options options;
private static class CommandException extends Exception {
private static final long serialVersionUID = 1L;
public CommandException(String msg) {
super(msg);
}
}
private interface SubCommand {
void execute(CommandLine parsedArgs, AdminClient adminClient, int resultsLimit)
throws CommandException;
}
private static final String ADMIN_SERVER = "host";
private static final String DEFAULT_ADMIN_SERVER = "localhost";
private static final int DEFAULT_ADMIN_PORT = 8080;
private static final String ADMIN_PORT = "port";
private static final String HELP_OPT = "help";
private static final String DETAILS_OPT = "details";
private static final String LIST_OPT = "list";
private static final String NAME_OPT = "name";
private static final String ID_OPT = "id";
private static final String PROPS_OPT = "properties";
private static final String RECENT_OPT = "recent";
private static final int DEFAULT_RESULTS_LIMIT = 10;
private static final Map<String, SubCommand> subCommandMap =
ImmutableMap.of(
LIST_OPT, new ListAllItemsCommand(),
DETAILS_OPT, new ListOneItemWithDetails(),
PROPS_OPT, new ListItemsWithPropertiesCommand()
);
private SubCommand getAction(CommandLine parsedOpts) {
for (Map.Entry<String, SubCommand> entry : subCommandMap.entrySet()) {
if (parsedOpts.hasOption(entry.getKey())) {
return entry.getValue();
}
}
printHelpAndExit("Unknown subcommand", false);
throw new IllegalStateException("unreached...");
}
@Override
public void run(String[] args) throws Exception {
this.options = createCommandLineOptions();
DefaultParser parser = new DefaultParser();
AdminClient adminClient = null;
try {
CommandLine parsedOpts = parser.parse(options, args);
int resultLimit = parseResultsLimit(parsedOpts);
String host = parsedOpts.hasOption(ADMIN_SERVER) ?
parsedOpts.getOptionValue(ADMIN_SERVER) : DEFAULT_ADMIN_SERVER;
int port = DEFAULT_ADMIN_PORT;
try {
if (parsedOpts.hasOption(ADMIN_PORT)) {
port = Integer.parseInt(parsedOpts.getOptionValue(ADMIN_PORT));
}
} catch (NumberFormatException e) {
printHelpAndExit("The port must be a valid integer.", false);
}
adminClient = new AdminClient(host, port);
try {
getAction(parsedOpts).execute(parsedOpts, adminClient, resultLimit);
} catch (CommandException e) {
printHelpAndExit(e.getMessage(), false);
}
} catch (ParseException e) {
printHelpAndExit("Failed to parse jobs arguments: " + e.getMessage(), true);
} finally {
if (adminClient != null) adminClient.close();
}
}
private static class ListAllItemsCommand implements SubCommand {
@Override
public void execute(CommandLine parsedOpts, AdminClient adminClient, int resultsLimit)
throws CommandException {
try {
if (parsedOpts.hasOption(NAME_OPT)) {
JobInfoPrintUtils.printJobRuns(adminClient.queryByJobName(parsedOpts.getOptionValue(NAME_OPT), resultsLimit));
} else if (parsedOpts.hasOption(RECENT_OPT)) {
JobInfoPrintUtils.printAllJobs(adminClient.queryAllJobs(QueryListType.RECENT, resultsLimit), resultsLimit);
} else {
JobInfoPrintUtils.printAllJobs(adminClient.queryAllJobs(QueryListType.DISTINCT, resultsLimit), resultsLimit);
}
} catch (RemoteInvocationException e) {
throw new CommandException("Error talking to adminServer: " + e.getMessage());
}
}
}
private static class ListOneItemWithDetails implements SubCommand {
@Override
public void execute(CommandLine parsedOpts, AdminClient adminClient, int resultsLimit)
throws CommandException {
try {
if (parsedOpts.hasOption(ID_OPT)) {
JobInfoPrintUtils.printJob(
adminClient.queryByJobId(parsedOpts.getOptionValue(ID_OPT))
);
} else {
throw new CommandException("Please specify an id");
}
} catch (RemoteInvocationException e) {
throw new CommandException("Error talking to adminServer: " + e.getMessage());
}
}
}
private static class ListItemsWithPropertiesCommand implements SubCommand {
@Override
public void execute(CommandLine parsedOpts, AdminClient adminClient, int resultsLimit) throws CommandException {
try {
if (parsedOpts.hasOption(ID_OPT)) {
JobInfoPrintUtils.printJobProperties(
adminClient.queryByJobId(parsedOpts.getOptionValue(ID_OPT))
);
} else if (parsedOpts.hasOption(NAME_OPT)) {
List<JobExecutionInfo> infos = adminClient.queryByJobName(parsedOpts.getOptionValue(NAME_OPT), 1);
if (infos.size() == 0) {
System.out.println("No job by that name found");
} else {
JobInfoPrintUtils.printJobProperties(Optional.of(infos.get(0)));
}
} else {
throw new CommandException("Please specify a job id or name");
}
} catch (RemoteInvocationException e) {
throw new CommandException("Error talking to adminServer: " + e.getMessage());
}
}
}
private Options createCommandLineOptions() {
Options options = new Options();
OptionGroup actionGroup = new OptionGroup();
actionGroup.addOption(new Option("h", HELP_OPT, false, "Shows the help message."));
actionGroup.addOption(new Option("d", DETAILS_OPT, false, "Show details about a job/task."));
actionGroup.addOption(new Option("l", LIST_OPT, false, "List jobs/tasks."));
actionGroup.addOption(new Option("p", PROPS_OPT, false, "Fetch properties with the query."));
actionGroup.setRequired(true);
options.addOptionGroup(actionGroup);
OptionGroup idGroup = new OptionGroup();
idGroup.addOption(new Option("j", NAME_OPT, true, "Find job(s) matching given job name."));
idGroup.addOption(new Option("i", ID_OPT, true, "Find the job/task with the given id."));
options.addOptionGroup(idGroup);
options.addOption("n", true, "Limit the number of results returned. (default:" + DEFAULT_RESULTS_LIMIT + ")");
options.addOption("r", RECENT_OPT, false, "List the most recent jobs (instead of a list of unique jobs)");
options.addOption("H", ADMIN_SERVER, true, "hostname of admin server");
options.addOption("P", ADMIN_PORT, true, "port of admin server");
return options;
}
private int parseResultsLimit(CommandLine parsedOpts) {
if (parsedOpts.hasOption("n")) {
try {
return Integer.parseInt(parsedOpts.getOptionValue("n"));
} catch (NumberFormatException e) {
printHelpAndExit("Could not parse integer value for option n.", false);
return 0;
}
} else {
return DEFAULT_RESULTS_LIMIT;
}
}
/**
* Print help and exit with the specified code.
*/
private void printHelpAndExit(String errorMsg, boolean printHelp) {
System.out.println(errorMsg);
if (printHelp) {
HelpFormatter hf = new HelpFormatter();
hf.printHelp("gobblin-admin.sh jobs [options]", this.options);
}
System.exit(1);
}
}
| 2,703 |
0 | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-admin/src/main/java/org/apache/gobblin/cli/JobInfoPrintUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.cli;
import com.google.common.base.Optional;
import com.linkedin.data.template.StringMap;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metrics.MetricNames;
import org.apache.gobblin.rest.*;
import org.joda.time.Period;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.ISODateTimeFormat;
import org.joda.time.format.PeriodFormat;
import org.joda.time.format.PeriodFormatter;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Utility methods to print out various pieces of info about jobs
*/
public class JobInfoPrintUtils {
private static NumberFormat decimalFormatter = new DecimalFormat("#0.00");
private static DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.dateHourMinuteSecond();
private static PeriodFormatter periodFormatter = PeriodFormat.getDefault();
/**
* Extracts the schedule from a job execution.
* <p/>
* If the job was in run once mode, it will return that, otherwise it will return the schedule.
*
* @param jobInfo A job execution info to extract from
* @return "RUN_ONCE", the Quartz schedule string, or "UNKNOWN" if there were no job properties
*/
public static String extractJobSchedule(JobExecutionInfo jobInfo) {
if (jobInfo.hasJobProperties() && jobInfo.getJobProperties().size() > 0) {
StringMap props = jobInfo.getJobProperties();
if (props.containsKey(ConfigurationKeys.JOB_RUN_ONCE_KEY) ||
!props.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) {
return "RUN_ONCE";
} else if (props.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) {
return props.get(ConfigurationKeys.JOB_SCHEDULE_KEY);
}
}
return "UNKNOWN";
}
/**
* Print a table describing a bunch of individual job executions.
* @param jobExecutionInfos Job execution status to print
*/
public static void printJobRuns(List<JobExecutionInfo> jobExecutionInfos) {
if (jobExecutionInfos == null) {
System.err.println("No job executions found.");
System.exit(1);
}
List<String> labels = Arrays.asList("Job Id", "State", "Schedule", "Completed Tasks", "Launched Tasks",
"Start Time", "End Time", "Duration (s)");
List<String> flags = Arrays.asList("-", "-", "-", "", "", "-", "-", "-");
List<List<String>> data = new ArrayList<>();
for (JobExecutionInfo jobInfo : jobExecutionInfos) {
List<String> entry = new ArrayList<>();
entry.add(jobInfo.getJobId());
entry.add(jobInfo.getState().toString());
entry.add(extractJobSchedule(jobInfo));
entry.add(jobInfo.getCompletedTasks().toString());
entry.add(jobInfo.getLaunchedTasks().toString());
entry.add(dateTimeFormatter.print(jobInfo.getStartTime()));
entry.add(dateTimeFormatter.print(jobInfo.getEndTime()));
entry.add(jobInfo.getState() == JobStateEnum.COMMITTED ?
decimalFormatter.format(jobInfo.getDuration() / 1000.0) : "-");
data.add(entry);
}
new CliTablePrinter.Builder()
.labels(labels)
.data(data)
.flags(flags)
.delimiterWidth(2)
.build()
.printTable();
}
/**
* Print summary information about a bunch of jobs in the system
* @param jobExecutionInfos List of jobs
* @param resultsLimit original result limit
*/
public static void printAllJobs(List<JobExecutionInfo> jobExecutionInfos, int resultsLimit) {
if (jobExecutionInfos == null) {
System.err.println("No jobs found.");
System.exit(1);
}
List<String> labels = Arrays.asList("Job Name", "State", "Last Run Started", "Last Run Completed",
"Schedule", "Last Run Records Processed", "Last Run Records Failed");
List<String> flags = Arrays.asList("-", "-", "-", "-", "-", "", "");
List<List<String>> data = new ArrayList<>();
for (JobExecutionInfo jobInfo : jobExecutionInfos) {
List<String> entry = new ArrayList<>();
entry.add(jobInfo.getJobName());
entry.add(jobInfo.getState().toString());
entry.add(dateTimeFormatter.print(jobInfo.getStartTime()));
entry.add(dateTimeFormatter.print(jobInfo.getEndTime()));
entry.add(extractJobSchedule(jobInfo));
// Add metrics
MetricArray metrics = jobInfo.getMetrics();
Double recordsProcessed = null;
Double recordsFailed = null;
try {
for (Metric metric : metrics) {
if (metric.getName().equals(MetricNames.ExtractorMetrics.RECORDS_READ_METER)) {
recordsProcessed = Double.parseDouble(metric.getValue());
} else if (metric.getName().equals(MetricNames.ExtractorMetrics.RECORDS_FAILED_METER)) {
recordsFailed = Double.parseDouble(metric.getValue());
}
}
if (recordsProcessed != null && recordsFailed != null) {
entry.add(recordsProcessed.toString());
entry.add(recordsFailed.toString());
}
} catch (NumberFormatException ex) {
System.err.println("Failed to process metrics");
}
if (recordsProcessed == null || recordsFailed == null) {
entry.add("-");
entry.add("-");
}
data.add(entry);
}
new CliTablePrinter.Builder()
.labels(labels)
.data(data)
.flags(flags)
.delimiterWidth(2)
.build()
.printTable();
if (jobExecutionInfos.size() == resultsLimit) {
System.out.println("\nWARNING: There may be more jobs (# of results is equal to the limit)");
}
}
/**
* Print information about one specific job.
* @param jobExecutionInfoOptional Job info to print
*/
public static void printJob(Optional<JobExecutionInfo> jobExecutionInfoOptional) {
if (!jobExecutionInfoOptional.isPresent()) {
System.err.println("Job id not found.");
return;
}
JobExecutionInfo jobExecutionInfo = jobExecutionInfoOptional.get();
List<List<String>> data = new ArrayList<>();
List<String> flags = Arrays.asList("", "-");
data.add(Arrays.asList("Job Name", jobExecutionInfo.getJobName()));
data.add(Arrays.asList("Job Id", jobExecutionInfo.getJobId()));
data.add(Arrays.asList("State", jobExecutionInfo.getState().toString()));
data.add(Arrays.asList("Completed/Launched Tasks",
String.format("%d/%d", jobExecutionInfo.getCompletedTasks(), jobExecutionInfo.getLaunchedTasks())));
data.add(Arrays.asList("Start Time", dateTimeFormatter.print(jobExecutionInfo.getStartTime())));
data.add(Arrays.asList("End Time", dateTimeFormatter.print(jobExecutionInfo.getEndTime())));
data.add(Arrays.asList("Duration", jobExecutionInfo.getState() == JobStateEnum.COMMITTED ? periodFormatter
.print(new Period(jobExecutionInfo.getDuration().longValue())) : "-"));
data.add(Arrays.asList("Tracking URL", jobExecutionInfo.getTrackingUrl()));
data.add(Arrays.asList("Launcher Type", jobExecutionInfo.getLauncherType().name()));
new CliTablePrinter.Builder()
.data(data)
.flags(flags)
.delimiterWidth(2)
.build()
.printTable();
JobInfoPrintUtils.printMetrics(jobExecutionInfo.getMetrics());
}
/**
* Print properties of a specific job
* @param jobExecutionInfoOptional
*/
public static void printJobProperties(Optional<JobExecutionInfo> jobExecutionInfoOptional) {
if (!jobExecutionInfoOptional.isPresent()) {
System.err.println("Job not found.");
return;
}
List<List<String>> data = new ArrayList<>();
List<String> flags = Arrays.asList("", "-");
List<String> labels = Arrays.asList("Property Key", "Property Value");
for (Map.Entry<String, String> entry : jobExecutionInfoOptional.get().getJobProperties().entrySet()) {
data.add(Arrays.asList(entry.getKey(), entry.getValue()));
}
new CliTablePrinter.Builder()
.labels(labels)
.data(data)
.flags(flags)
.delimiterWidth(2)
.build()
.printTable();
}
/**
* Print out various metrics
* @param metrics Metrics to print
*/
private static void printMetrics(MetricArray metrics) {
System.out.println();
if (metrics.size() == 0) {
System.out.println("No metrics found.");
return;
}
List<List<String>> data = new ArrayList<>();
List<String> flags = Arrays.asList("", "-");
for (Metric metric : metrics) {
data.add(Arrays.asList(metric.getName(), metric.getValue()));
}
new CliTablePrinter.Builder()
.data(data)
.flags(flags)
.delimiterWidth(2)
.build()
.printTable();
}
}
| 2,704 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/test/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/test/java/org/apache/gobblin/config/client/TestConfigClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValue;
import org.apache.gobblin.config.client.api.VersionStabilityPolicy;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
@Test(groups = { "gobblin.config.common.impl" })
public class TestConfigClient {
private ConfigStore mockConfigStore;
private final String version = "V1.0";
private final ConfigKeyPath data = SingleLinkedListConfigKeyPath.ROOT.createChild("data");
private final ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
private final ConfigKeyPath tag2 = SingleLinkedListConfigKeyPath.ROOT.createChild("tag2");
private final ConfigKeyPath databases = data.createChild("databases");
private final ConfigKeyPath identity = databases.createChild("identity");
private final ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
private final ConfigKeyPath espressoTag = tag.createChild("espressoTag");
private final ConfigKeyPath nertzTag2 = tag2.createChild("nertzTag2");
private void printConfig(Config config){
Set<Map.Entry<String,ConfigValue>> entrySet = config.entrySet();
for(Map.Entry<String,ConfigValue> entry: entrySet){
System.out.println("key: " + entry.getKey() + ", value: " + entry.getValue());
}
}
@BeforeClass
public void setup(){
// Topology for mock up config store
// βββ data
// βΒ Β βββ databases
// βΒ Β βββ identity
// βββ tag
// βΒ Β βββ espressoTag
// βΒ Β βββ highPriorityTag
// βββ tag2
// βββ nertzTag2
mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
List<ConfigKeyPath> emptyList = Collections.emptyList();
// mock up parent/children topology
List<ConfigKeyPath> rootChildren = new ArrayList<ConfigKeyPath>();
rootChildren.add(data);
rootChildren.add(tag);
rootChildren.add(tag2);
when(mockConfigStore.getChildren(SingleLinkedListConfigKeyPath.ROOT, version)).thenReturn(rootChildren);
List<ConfigKeyPath> dataChildren = new ArrayList<ConfigKeyPath>();
dataChildren.add(databases);
when(mockConfigStore.getChildren(data, version)).thenReturn(dataChildren);
List<ConfigKeyPath> databasesChildren = new ArrayList<ConfigKeyPath>();
databasesChildren.add(identity);
when(mockConfigStore.getChildren(databases, version)).thenReturn(databasesChildren);
when(mockConfigStore.getChildren(identity, version)).thenReturn(emptyList);
List<ConfigKeyPath> tagChildren = new ArrayList<ConfigKeyPath>();
tagChildren.add(highPriorityTag);
tagChildren.add(espressoTag);
when(mockConfigStore.getChildren(tag, version)).thenReturn(tagChildren);
when(mockConfigStore.getChildren(highPriorityTag, version)).thenReturn(emptyList);
when(mockConfigStore.getChildren(espressoTag, version)).thenReturn(emptyList);
List<ConfigKeyPath> tag2Children = new ArrayList<ConfigKeyPath>();
tag2Children.add(nertzTag2);
when(mockConfigStore.getChildren(tag2, version)).thenReturn(tag2Children);
when(mockConfigStore.getChildren(nertzTag2, version)).thenReturn(emptyList);
// mock up import links
// identity import espressoTag and highPriorityTag
List<ConfigKeyPath> identityImports = new ArrayList<ConfigKeyPath>();
identityImports.add(espressoTag);
identityImports.add(highPriorityTag);
when(mockConfigStore.getOwnImports(identity, version)).thenReturn(identityImports);
// espressoTag imports nertzTag2
List<ConfigKeyPath> espressoImports = new ArrayList<ConfigKeyPath>();
espressoImports.add(nertzTag2);
when(mockConfigStore.getOwnImports(espressoTag, version)).thenReturn(espressoImports);
mockupConfigValues();
}
private void mockupConfigValues(){
/**
* each node will have a common key "generalKey" with value as "generalValue_${node}"
* this key will be overwrite
*
* each node will have own key "keyOf_${node}" with value "valueOf_${node}"
* this key will be inherent
*/
// mock up the configuration values for root
Map<String, String> rootMap = new HashMap<>();
rootMap.put("keyOf_Root", "valueOf_Root");
rootMap.put("generalKey", "generalValue_root"); // keys will be overwrite
when(mockConfigStore.getOwnConfig(SingleLinkedListConfigKeyPath.ROOT, version)).thenReturn(ConfigFactory.parseMap(rootMap));
// mock up the configuration values for /data
Map<String, String> dataMap = new HashMap<>();
dataMap.put("keyOf_data", "valueOf_data");
dataMap.put("generalKey", "generalValue_data");
when(mockConfigStore.getOwnConfig(data, version)).thenReturn(ConfigFactory.parseMap(dataMap));
// mock up the configuration values for /data/databases
Map<String, String> databasesMap = new HashMap<>();
databasesMap.put("keyOf_databases", "valueOf_databases");
databasesMap.put("generalKey", "generalValue_data_databases");
when(mockConfigStore.getOwnConfig(databases, version)).thenReturn(ConfigFactory.parseMap(databasesMap));
// mock up the configuration values for /data/databases/identity
Map<String, String> identityMap = new HashMap<>();
identityMap.put("keyOf_identity", "valueOf_identity");
identityMap.put("generalKey", "generalValue_data_databases_identity");
when(mockConfigStore.getOwnConfig(identity, version)).thenReturn(ConfigFactory.parseMap(identityMap));
// mock up the configuration values for /tag
Map<String, String> tagMap = new HashMap<>();
tagMap.put("keyOf_tag", "valueOf_tag");
tagMap.put("generalKey", "generalValue_tag");
when(mockConfigStore.getOwnConfig(tag, version)).thenReturn(ConfigFactory.parseMap(tagMap));
// mock up the configuration values for /tag/espressoTag
Map<String, String> espressoTagMap = new HashMap<>();
espressoTagMap.put("keyOf_espressoTag", "valueOf_espressoTag");
espressoTagMap.put("generalKey", "generalValue_tag_espressoTag");
when(mockConfigStore.getOwnConfig(espressoTag, version)).thenReturn(ConfigFactory.parseMap(espressoTagMap));
// mock up the configuration values for /tag/highPriorityTag
Map<String, String> highPriorityTagMap = new HashMap<>();
highPriorityTagMap.put("keyOf_highPriorityTag", "valueOf_highPriorityTag");
highPriorityTagMap.put("generalKey", "generalValue_tag_highPriorityTag");
when(mockConfigStore.getOwnConfig(highPriorityTag, version)).thenReturn(ConfigFactory.parseMap(highPriorityTagMap));
// mock up the configuration values for /tag2
Map<String, String> tag2Map = new HashMap<>();
tag2Map.put("keyOf_tag2", "valueOf_tag2");
tag2Map.put("generalKey", "generalValue_tag2");
when(mockConfigStore.getOwnConfig(tag2, version)).thenReturn(ConfigFactory.parseMap(tag2Map));
// mock up the configuration values for /tag2/nertzTag2
Map<String, String> nertzTag2Map = new HashMap<>();
nertzTag2Map.put("keyOf_nertzTag2", "valueOf_nertzTag2");
nertzTag2Map.put("generalKey", "generalValue_tag2_nertzTag2");
when(mockConfigStore.getOwnConfig(nertzTag2, version)).thenReturn(ConfigFactory.parseMap(nertzTag2Map));
}
private void checkValuesForIdentity(Config resolvedConfig){
Assert.assertTrue(resolvedConfig.getString("keyOf_data").equals("valueOf_data"));
Assert.assertTrue(resolvedConfig.getString("keyOf_identity").equals("valueOf_identity"));
Assert.assertTrue(resolvedConfig.getString("keyOf_espressoTag").equals("valueOf_espressoTag"));
Assert.assertTrue(resolvedConfig.getString("keyOf_Root").equals("valueOf_Root"));
Assert.assertTrue(resolvedConfig.getString("keyOf_nertzTag2").equals("valueOf_nertzTag2"));
Assert.assertTrue(resolvedConfig.getString("keyOf_highPriorityTag").equals("valueOf_highPriorityTag"));
Assert.assertTrue(resolvedConfig.getString("keyOf_tag2").equals("valueOf_tag2"));
Assert.assertTrue(resolvedConfig.getString("keyOf_tag").equals("valueOf_tag"));
Assert.assertTrue(resolvedConfig.getString("keyOf_databases").equals("valueOf_databases"));
Assert.assertTrue(resolvedConfig.getString("generalKey").equals("generalValue_data_databases_identity"));
}
@Test
private void testFromClient() throws Exception{
ConfigStoreFactoryRegister mockConfigStoreFactoryRegister;
ConfigStoreFactory mockConfigStoreFactory;
URI relativeURI = new URI("etl-hdfs:///data/databases/identity");
URI absoluteURI = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/data/databases/identity");
when(mockConfigStore.getStoreURI()).thenReturn(new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest"));
mockConfigStoreFactory = mock(ConfigStoreFactory.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStoreFactory.getScheme()).thenReturn("etl-hdfs");
when(mockConfigStoreFactory.createConfigStore(absoluteURI)).thenReturn(mockConfigStore);
when(mockConfigStoreFactory.createConfigStore(relativeURI)).thenReturn(mockConfigStore);
mockConfigStoreFactoryRegister = mock(ConfigStoreFactoryRegister.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStoreFactoryRegister.getConfigStoreFactory("etl-hdfs")).thenReturn(mockConfigStoreFactory);
ConfigClient client = new ConfigClient(VersionStabilityPolicy.STRONG_LOCAL_STABILITY, mockConfigStoreFactoryRegister);
Config resolved = client.getConfig(relativeURI);
checkValuesForIdentity(resolved);
resolved = client.getConfig(absoluteURI);
checkValuesForIdentity(resolved);
// importedBy using relative URI
String[] expectedImportedBy = {"etl-hdfs:/tag/espressoTag", "etl-hdfs:/data/databases/identity"};
URI nertzTagURI = new URI("etl-hdfs:///tag2/nertzTag2");
Collection<URI> importedBy = client.getImportedBy(nertzTagURI, false);
Assert.assertEquals(importedBy.size(), 1);
Assert.assertEquals(importedBy.iterator().next().toString(), expectedImportedBy[0]);
importedBy = client.getImportedBy(nertzTagURI, true);
Assert.assertEquals(importedBy.size(), 2);
for(URI u: importedBy){
Assert.assertTrue(u.toString().equals(expectedImportedBy[0]) ||
u.toString().equals(expectedImportedBy[1]));
}
// importedBy using abs URI
String[] expectedImportedBy_abs = {"etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/tag/espressoTag",
"etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/data/databases/identity"};
nertzTagURI = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/tag2/nertzTag2");
importedBy = client.getImportedBy(nertzTagURI, false);
Assert.assertEquals(importedBy.size(), 1);
Assert.assertEquals(importedBy.iterator().next().toString(), expectedImportedBy_abs[0]);
importedBy = client.getImportedBy(nertzTagURI, true);
Assert.assertEquals(importedBy.size(), 2);
for(URI u: importedBy){
Assert.assertTrue(u.toString().equals(expectedImportedBy_abs[0]) ||
u.toString().equals(expectedImportedBy_abs[1]));
}
}
}
| 2,705 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/test/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/test/java/org/apache/gobblin/config/client/TestConfigClientUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import java.net.URI;
import java.net.URISyntaxException;
import org.mockito.Mockito;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
@Test(groups = { "gobblin.config.client.api" })
public class TestConfigClientUtils {
private ConfigStore mockConfigStore;
private final String version = "V1.0";
@BeforeClass
public void setup() throws Exception{
mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
URI configStorURI = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest");
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
when(mockConfigStore.getStoreURI()).thenReturn(configStorURI);
}
@Test
public void testGetConfigKeyPath() throws URISyntaxException{
String expected = "/datasets/a1/a2";
URI clientAbsURI = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2");
ConfigKeyPath result = ConfigClientUtils.buildConfigKeyPath(clientAbsURI, mockConfigStore);
Assert.assertEquals(result.toString(), expected);
URI clientRelativeURI = new URI("etl-hdfs:///datasets/a1/a2");
result = ConfigClientUtils.buildConfigKeyPath(clientRelativeURI, mockConfigStore);
Assert.assertEquals(result.toString(), expected);
clientRelativeURI = new URI("etl-hdfs:/datasets/a1/a2");
result = ConfigClientUtils.buildConfigKeyPath(clientRelativeURI, mockConfigStore);
Assert.assertEquals(result.toString(), expected);
ConfigKeyPath configKey = SingleLinkedListConfigKeyPath.ROOT.createChild("data").createChild("databases").createChild("Identity");
// client app pass URI without authority
URI adjusted = ConfigClientUtils.buildUriInClientFormat(configKey, mockConfigStore, false);
Assert.assertTrue(adjusted.toString().equals("etl-hdfs:/data/databases/Identity"));
// client app pass URI with authority
adjusted = ConfigClientUtils.buildUriInClientFormat(configKey, mockConfigStore, true);
Assert.assertTrue(adjusted.toString().equals("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/data/databases/Identity"));
}
@Test
public void testIsAncestorOrSame() throws Exception{
//Path ancestor = new Path("/");
//Path descendant = new Path("/");
URI ancestor = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/");
URI descendant = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2");
Assert.assertTrue(ConfigClientUtils.isAncestorOrSame(descendant, ancestor));
// ends with "/"
descendant = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2/");
Assert.assertTrue(ConfigClientUtils.isAncestorOrSame(descendant, ancestor));
// wrong authority
descendant = new URI("etl-hdfs://ltx1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2");
Assert.assertTrue(!ConfigClientUtils.isAncestorOrSame(descendant, ancestor));
// wrong path
descendant = new URI("etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/sahil/HdfsBasedConfigTest/datasets/a1/a2");
Assert.assertTrue(!ConfigClientUtils.isAncestorOrSame(descendant, ancestor));
ConfigKeyPath data = SingleLinkedListConfigKeyPath.ROOT.createChild("data");
ConfigKeyPath data2 = SingleLinkedListConfigKeyPath.ROOT.createChild("data2");
ConfigKeyPath identity = SingleLinkedListConfigKeyPath.ROOT.createChild("data").createChild("databases").createChild("Identity");
Assert.assertTrue(ConfigClientUtils.isAncestorOrSame(identity, data));
Assert.assertTrue(ConfigClientUtils.isAncestorOrSame(identity, SingleLinkedListConfigKeyPath.ROOT));
Assert.assertTrue(!ConfigClientUtils.isAncestorOrSame(identity, data2));
}
@Test (expectedExceptions = java.lang.IllegalArgumentException.class )
public void testInvalidSchemeURI() throws URISyntaxException {
URI clientURI = new URI("hdfs:///datasets/a1/a2");
ConfigClientUtils.buildConfigKeyPath(clientURI, mockConfigStore);
}
@Test (expectedExceptions = java.lang.IllegalArgumentException.class )
public void testInvalidAuthortiyURI() throws URISyntaxException {
URI clientURI = new URI("etl-hdfs://ltx1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2");
ConfigClientUtils.buildConfigKeyPath(clientURI, mockConfigStore);
}
} | 2,706 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/ConfigClientCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.gobblin.config.client.api.VersionStabilityPolicy;
/**
* Caches {@link ConfigClient}s for every {@link VersionStabilityPolicy}.
*/
public class ConfigClientCache {
private static final Cache<VersionStabilityPolicy, ConfigClient> CONFIG_CLIENTS_CACHE = CacheBuilder.newBuilder()
.maximumSize(VersionStabilityPolicy.values().length).build();
public static ConfigClient getClient(final VersionStabilityPolicy policy) {
try {
return CONFIG_CLIENTS_CACHE.get(policy, new Callable<ConfigClient>() {
@Override
public ConfigClient call() throws Exception {
return ConfigClient.createConfigClient(policy);
}
});
} catch (ExecutionException e) {
throw new RuntimeException("Failed to get Config client", e);
}
}
}
| 2,707 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/ConfigClientUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.typesafe.config.Config;
/**
* Utility class to transfer {@link URI} to {@link ConfigKeyPath} and vice versa
*
* @author mitu
*
*/
public class ConfigClientUtils {
/**
*
* @param configKeyURI - URI provided by client , which could missing authority/store root directory
* @param cs - ConfigStore corresponding to the input URI. Require input URI's scheme/authority name
* match ConfigStore's scheme/authority
* @return - {@link ConfigKeyPath} for the relative path
*/
public static ConfigKeyPath buildConfigKeyPath(URI configKeyURI, ConfigStore cs) {
checkMatchingSchemeAndAuthority(configKeyURI, cs);
// Example store root is etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest
URI relative = cs.getStoreURI().relativize(configKeyURI);
return getConfigKeyPath(relative.getPath());
}
/**
* Build the URI based on the {@link ConfigStore} or input cnofigKeyURI
*
* @param configKeyPath : relative path to the input config store cs
* @param returnURIWithAuthority : return the URI with input config store's authority and absolute path
* @param cs : the config store of the input configKeyURI
* @return : return the URI of the same format with the input configKeyURI
*
* for example, configKeyPath is /tags/retention,
* with returnURIWithAuthority as false, return "etl-hdfs:///tags/retention
* with returnURIWithAuthority as true , then return
* etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/tags/retention
*/
public static URI buildUriInClientFormat(ConfigKeyPath configKeyPath, ConfigStore cs, boolean returnURIWithAuthority) {
try {
if (!returnURIWithAuthority) {
return new URI(cs.getStoreURI().getScheme(), null, configKeyPath.getAbsolutePathString(), null, null);
}
URI storeRoot = cs.getStoreURI();
// if configKeyPath is root, the configKeyPath.getAbsolutePathString().substring(1) will return "" and
// will cause the Path creation failure if not handled here
if (configKeyPath.isRootPath()) {
return storeRoot;
}
Path absPath = new Path(storeRoot.getPath(), configKeyPath.getAbsolutePathString().substring(1)); // remote the first "/";
return new URI(storeRoot.getScheme(), storeRoot.getAuthority(), absPath.toString(), null, null);
} catch (URISyntaxException e) {
// should not come here
throw new RuntimeException("Can not build URI based on " + configKeyPath);
}
}
public static Collection<URI> buildUriInClientFormat(Collection<ConfigKeyPath> configKeyPaths, ConfigStore cs,
boolean returnURIWithAuthority) {
Collection<URI> result = new ArrayList<>();
if (configKeyPaths == null) {
return result;
}
for (ConfigKeyPath p : configKeyPaths) {
result.add(buildUriInClientFormat(p, cs, returnURIWithAuthority));
}
return result;
}
/**
* Build the {@link ConfigKeyPath} based on the absolute/relative path
* @param input - absolute/relative file path
* @return - {@link ConfigKeyPath} corresponding to the input
*/
public static ConfigKeyPath getConfigKeyPath(String input) {
ConfigKeyPath result = SingleLinkedListConfigKeyPath.ROOT;
String[] paths = input.split("/");
for (String p : paths) {
// in case input start with "/", some elements could be "", which should be skip
if (p.equals("")) {
continue;
}
result = result.createChild(p);
}
return result;
}
public static List<ConfigKeyPath> getConfigKeyPath(List<String> input) {
List<ConfigKeyPath> result = new ArrayList<>();
for (String s : input) {
result.add(getConfigKeyPath(s));
}
return result;
}
private static void checkMatchingSchemeAndAuthority(URI configKeyURI, ConfigStore cs) {
Preconditions.checkNotNull(configKeyURI, "input can not be null");
Preconditions.checkNotNull(cs, "input can not be null");
Preconditions.checkArgument(configKeyURI.getScheme().equals(cs.getStoreURI().getScheme()), "Scheme name not match");
boolean authorityCheck =
configKeyURI.getAuthority() == null || configKeyURI.getAuthority().equals(cs.getStoreURI().getAuthority());
Preconditions.checkArgument(authorityCheck, "Authority not match");
}
/**
* Utility method to check whether one URI is the ancestor of the other
*
* return true iff both URI's scheme/authority name match and ancestor's path is the prefix of the descendant's path
* @param descendant: the descendant URI to check
* @param ancestor : the ancestor URI to check
* @return
*/
public static boolean isAncestorOrSame(URI descendant, URI ancestor) {
Preconditions.checkNotNull(descendant, "input can not be null");
Preconditions.checkNotNull(ancestor, "input can not be null");
if (!stringSame(descendant.getScheme(), ancestor.getScheme())) {
return false;
}
if (!stringSame(descendant.getAuthority(), ancestor.getAuthority())) {
return false;
}
return isAncestorOrSame(getConfigKeyPath(descendant.getPath()), getConfigKeyPath(ancestor.getPath()));
}
public static boolean stringSame(String l, String r) {
if (l == null && r == null) {
return true;
}
if (l == null || r == null) {
return false;
}
return l.equals(r);
}
public static boolean isAncestorOrSame(ConfigKeyPath descendant, ConfigKeyPath ancestor) {
Preconditions.checkNotNull(descendant, "input can not be null");
Preconditions.checkNotNull(ancestor, "input can not be null");
if (descendant.equals(ancestor))
return true;
if (descendant.isRootPath())
return false;
return isAncestorOrSame(descendant.getParent(), ancestor);
}
public static Optional<Config> getOptionalRuntimeConfig(Properties properties) {
Properties runtimeConfigProps = new Properties();
for (Map.Entry<Object, Object> entry : properties.entrySet()) {
if (entry.getKey().toString().startsWith(ConfigurationKeys.CONFIG_RUNTIME_PREFIX)) {
runtimeConfigProps.put(entry.getKey().toString().replace(ConfigurationKeys.CONFIG_RUNTIME_PREFIX, ""),
entry.getValue().toString());
}
}
if (runtimeConfigProps.size() == 0) {
return Optional.<Config>absent();
}
Config config = ConfigUtils.propertiesToConfig(runtimeConfigProps);
return Optional.fromNullable(config);
}
}
| 2,708 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/ConfigStoreFactoryRegister.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import java.util.HashMap;
import java.util.Map;
import java.util.ServiceLoader;
import org.apache.log4j.Logger;
public class ConfigStoreFactoryRegister {
private static final Logger LOG = Logger.getLogger(ConfigStoreFactoryRegister.class);
//key is the configStore scheme name, value is the ConfigStoreFactory
@SuppressWarnings("rawtypes")
private final Map<String, ConfigStoreFactory> configStoreFactoryMap = new HashMap<>();
@SuppressWarnings("rawtypes")
public ConfigStoreFactoryRegister() {
ServiceLoader<ConfigStoreFactory> loader = ServiceLoader.load(ConfigStoreFactory.class);
for (ConfigStoreFactory f : loader) {
this.configStoreFactoryMap.put(f.getScheme(), f);
LOG.info("Created the config store factory with scheme name " + f.getScheme());
}
}
@SuppressWarnings("rawtypes")
public ConfigStoreFactory getConfigStoreFactory(String scheme) {
return this.configStoreFactoryMap.get(scheme);
}
@SuppressWarnings("rawtypes")
public void register(ConfigStoreFactory factory) {
this.configStoreFactoryMap.put(factory.getScheme(), factory);
LOG.info("Registered the config store factory with scheme name " + factory.getScheme());
}
}
| 2,709 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/ConfigClientCLI.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import java.net.URI;
import java.net.URISyntaxException;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigRenderOptions;
import org.apache.gobblin.annotation.Alias;
import org.apache.gobblin.config.client.api.VersionStabilityPolicy;
import org.apache.gobblin.runtime.cli.CliApplication;
import org.apache.gobblin.runtime.cli.CliObjectFactory;
import org.apache.gobblin.runtime.cli.CliObjectSupport;
import org.apache.gobblin.runtime.cli.ConstructorAndPublicMethodsCliObjectFactory;
/**
* A CLI for the {@link ConfigClient}. Can be used to get resolved configurations for a uri.
*/
@Alias(value = "config", description = "Query the config library")
public class ConfigClientCLI implements CliApplication {
@Override
public void run(String[] args) throws Exception {
CliObjectFactory<Command> factory = new ConstructorAndPublicMethodsCliObjectFactory<>(Command.class);
Command command = factory.buildObject(args, 1, true, args[0]);
ConfigClient configClient = ConfigClient.createConfigClient(VersionStabilityPolicy.READ_FRESHEST);
if (command.resolvedConfig) {
Config resolvedConfig = configClient.getConfig(command.uri);
System.out.println(resolvedConfig.root().render(ConfigRenderOptions.defaults()));
}
}
/**
* The parsed user command.
*/
public static class Command {
private final URI uri;
private boolean resolvedConfig = false;
@CliObjectSupport(argumentNames = "configUri")
public Command(String uri) throws URISyntaxException {
this.uri = new URI(uri);
}
public void resolvedConfig() {
this.resolvedConfig = true;
}
}
}
| 2,710 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/ConfigClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import org.apache.log4j.Logger;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.typesafe.config.Config;
import org.apache.gobblin.config.client.api.ConfigStoreFactoryDoesNotExistsException;
import org.apache.gobblin.config.client.api.VersionStabilityPolicy;
import org.apache.gobblin.config.common.impl.ConfigStoreBackedTopology;
import org.apache.gobblin.config.common.impl.ConfigStoreBackedValueInspector;
import org.apache.gobblin.config.common.impl.ConfigStoreTopologyInspector;
import org.apache.gobblin.config.common.impl.ConfigStoreValueInspector;
import org.apache.gobblin.config.common.impl.InMemoryTopology;
import org.apache.gobblin.config.common.impl.InMemoryValueInspector;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import org.apache.gobblin.config.store.api.ConfigStoreWithStableVersioning;
import org.apache.gobblin.config.store.api.VersionDoesNotExistException;
/**
* This class is used by Client to access the Configuration Management core library.
*
*
* @author mitu
*
*/
public class ConfigClient {
private static final Logger LOG = Logger.getLogger(ConfigClient.class);
private final VersionStabilityPolicy policy;
/** Normally key is the ConfigStore.getStoreURI(), value is the ConfigStoreAccessor
*
* However, there may be two entries for a specific config store, for example
* if user pass in URI like "etl-hdfs:///datasets/a1/a2" and the etl-hdfs config store factory using
* default authority/default config store root normalized the URI to
* "etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/datasets/a1/a2"
* where /user/mitu/HdfsBasedConfigTest is the config store root
*
* Then there will be two entries in the Map which point to the same value
* key1: "etl-hdfs:/"
* key2: "etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/"
*
*/
private final TreeMap<URI, ConfigStoreAccessor> configStoreAccessorMap = new TreeMap<>();
private final ConfigStoreFactoryRegister configStoreFactoryRegister;
private ConfigClient(VersionStabilityPolicy policy) {
this(policy, new ConfigStoreFactoryRegister());
}
@VisibleForTesting
ConfigClient(VersionStabilityPolicy policy, ConfigStoreFactoryRegister register) {
this.policy = policy;
this.configStoreFactoryRegister = register;
}
/**
* Create the {@link ConfigClient} based on the {@link VersionStabilityPolicy}.
* @param policy - {@link VersionStabilityPolicy} to specify the stability policy which control the caching layer creation
* @return - {@link ConfigClient} for client to use to access the {@link ConfigStore}
*/
public static ConfigClient createConfigClient(VersionStabilityPolicy policy) {
return new ConfigClient(policy);
}
/**
* Get the resolved {@link Config} based on the input URI.
*
* @param configKeyUri - The URI for the configuration key. There are two types of URI:
*
* 1. URI missing authority and configuration store root , for example "etl-hdfs:///datasets/a1/a2". It will get
* the configuration based on the default {@link ConfigStore} in etl-hdfs {@link ConfigStoreFactory}
* 2. Complete URI: for example "etl-hdfs://eat1-nertznn01.grid.linkedin.com:9000/user/mitu/HdfsBasedConfigTest/"
*
* @return the resolved {@link Config} based on the input URI.
*
* @throws ConfigStoreFactoryDoesNotExistsException: if missing scheme name or the scheme name is invalid
* @throws ConfigStoreCreationException: Specified {@link ConfigStoreFactory} can not create required {@link ConfigStore}
* @throws VersionDoesNotExistException: Required version does not exist anymore ( may get deleted by retention job )
*/
public Config getConfig(URI configKeyUri)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
return getConfig(configKeyUri, Optional.<Config>absent());
}
public Config getConfig(URI configKeyUri, Optional<Config> runtimeConfig)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
ConfigStoreAccessor accessor = this.getConfigStoreAccessor(configKeyUri);
ConfigKeyPath configKeypath = ConfigClientUtils.buildConfigKeyPath(configKeyUri, accessor.configStore);
return accessor.valueInspector.getResolvedConfig(configKeypath, runtimeConfig);
}
/**
* batch process for {@link #getConfig(URI)} method
* @param configKeyUris
* @return
* @throws ConfigStoreFactoryDoesNotExistsException
* @throws ConfigStoreCreationException
* @throws VersionDoesNotExistException
*/
public Map<URI, Config> getConfigs(Collection<URI> configKeyUris)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
if (configKeyUris == null || configKeyUris.size() == 0)
return Collections.emptyMap();
Map<URI, Config> result = new HashMap<>();
Multimap<ConfigStoreAccessor, ConfigKeyPath> partitionedAccessor = ArrayListMultimap.create();
// map contains the mapping between ConfigKeyPath back to original URI , partitioned by ConfigStoreAccessor
Map<ConfigStoreAccessor, Map<ConfigKeyPath, URI>> reverseMap = new HashMap<>();
// partitioned the ConfigKeyPaths which belongs to the same store to one accessor
for (URI u : configKeyUris) {
ConfigStoreAccessor accessor = this.getConfigStoreAccessor(u);
ConfigKeyPath configKeypath = ConfigClientUtils.buildConfigKeyPath(u, accessor.configStore);
partitionedAccessor.put(accessor, configKeypath);
if (!reverseMap.containsKey(accessor)) {
reverseMap.put(accessor, new HashMap<ConfigKeyPath, URI>());
}
reverseMap.get(accessor).put(configKeypath, u);
}
for (Map.Entry<ConfigStoreAccessor, Collection<ConfigKeyPath>> entry : partitionedAccessor.asMap().entrySet()) {
Map<ConfigKeyPath, Config> batchResult = entry.getKey().valueInspector.getResolvedConfigs(entry.getValue());
for (Map.Entry<ConfigKeyPath, Config> resultEntry : batchResult.entrySet()) {
// get the original URI from reverseMap
URI orgURI = reverseMap.get(entry.getKey()).get(resultEntry.getKey());
result.put(orgURI, resultEntry.getValue());
}
}
return result;
}
/**
* Convenient method to get resolved {@link Config} based on String input.
*/
public Config getConfig(String configKeyStr) throws ConfigStoreFactoryDoesNotExistsException,
ConfigStoreCreationException, VersionDoesNotExistException, URISyntaxException {
return getConfig(configKeyStr, Optional.<Config>absent());
}
public Config getConfig(String configKeyStr, Optional<Config> runtimeConfig)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException,
URISyntaxException {
return this.getConfig(new URI(configKeyStr), runtimeConfig);
}
/**
* batch process for {@link #getConfig(String)} method
* @param configKeyStrs
* @return
* @throws ConfigStoreFactoryDoesNotExistsException
* @throws ConfigStoreCreationException
* @throws VersionDoesNotExistException
* @throws URISyntaxException
*/
public Map<URI, Config> getConfigsFromStrings(Collection<String> configKeyStrs)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException,
URISyntaxException {
if (configKeyStrs == null || configKeyStrs.size() == 0)
return Collections.emptyMap();
Collection<URI> configKeyUris = new ArrayList<>();
for (String s : configKeyStrs) {
configKeyUris.add(new URI(s));
}
return getConfigs(configKeyUris);
}
/**
* Get the import links of the input URI.
*
* @param configKeyUri - The URI for the configuration key.
* @param recursive - Specify whether to get direct import links or recursively import links
* @return the import links of the input URI.
*
* @throws ConfigStoreFactoryDoesNotExistsException: if missing scheme name or the scheme name is invalid
* @throws ConfigStoreCreationException: Specified {@link ConfigStoreFactory} can not create required {@link ConfigStore}
* @throws VersionDoesNotExistException: Required version does not exist anymore ( may get deleted by retention job )
*/
public Collection<URI> getImports(URI configKeyUri, boolean recursive)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
return getImports(configKeyUri, recursive, Optional.<Config>absent());
}
public Collection<URI> getImports(URI configKeyUri, boolean recursive, Optional<Config> runtimeConfig)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
ConfigStoreAccessor accessor = this.getConfigStoreAccessor(configKeyUri);
ConfigKeyPath configKeypath = ConfigClientUtils.buildConfigKeyPath(configKeyUri, accessor.configStore);
Collection<ConfigKeyPath> result;
if (!recursive) {
result = accessor.topologyInspector.getOwnImports(configKeypath, runtimeConfig);
} else {
result = accessor.topologyInspector.getImportsRecursively(configKeypath, runtimeConfig);
}
return ConfigClientUtils.buildUriInClientFormat(result, accessor.configStore, configKeyUri.getAuthority() != null);
}
/**
* Get the URIs which imports the input URI
*
* @param configKeyUri - The URI for the configuration key.
* @param recursive - Specify whether to get direct or recursively imported by links
* @return the URIs which imports the input URI
*
* @throws ConfigStoreFactoryDoesNotExistsException: if missing scheme name or the scheme name is invalid
* @throws ConfigStoreCreationException: Specified {@link ConfigStoreFactory} can not create required {@link ConfigStore}
* @throws VersionDoesNotExistException: Required version does not exist anymore ( may get deleted by retention job )
*/
public Collection<URI> getImportedBy(URI configKeyUri, boolean recursive)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
return getImportedBy(configKeyUri, recursive, Optional.<Config>absent());
}
public Collection<URI> getImportedBy(URI configKeyUri, boolean recursive, Optional<Config> runtimeConfig)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
ConfigStoreAccessor accessor = this.getConfigStoreAccessor(configKeyUri);
ConfigKeyPath configKeypath = ConfigClientUtils.buildConfigKeyPath(configKeyUri, accessor.configStore);
Collection<ConfigKeyPath> result;
if (!recursive) {
result = accessor.topologyInspector.getImportedBy(configKeypath, runtimeConfig);
} else {
result = accessor.topologyInspector.getImportedByRecursively(configKeypath, runtimeConfig);
}
return ConfigClientUtils.buildUriInClientFormat(result, accessor.configStore, configKeyUri.getAuthority() != null);
}
private URI getMatchedFloorKeyFromCache(URI configKeyURI) {
URI floorKey = this.configStoreAccessorMap.floorKey(configKeyURI);
if (floorKey == null) {
return null;
}
// both scheme name and authority name, if present, should match
// or both authority should be null
if (ConfigClientUtils.isAncestorOrSame(configKeyURI, floorKey)) {
return floorKey;
}
return null;
}
private ConfigStoreAccessor createNewConfigStoreAccessor(URI configKeyURI)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
LOG.info("Create new config store accessor for URI " + configKeyURI);
ConfigStoreAccessor result;
ConfigStoreFactory<ConfigStore> csFactory = this.getConfigStoreFactory(configKeyURI);
ConfigStore cs = csFactory.createConfigStore(configKeyURI);
if (!isConfigStoreWithStableVersion(cs)) {
if (this.policy == VersionStabilityPolicy.CROSS_JVM_STABILITY) {
throw new RuntimeException(String.format("with policy set to %s, can not connect to unstable config store %s",
VersionStabilityPolicy.CROSS_JVM_STABILITY, cs.getStoreURI()));
}
}
String currentVersion = cs.getCurrentVersion();
LOG.info("Current config store version number: " + currentVersion);
// topology related
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(cs, currentVersion);
InMemoryTopology inMemoryTopology = new InMemoryTopology(csTopology);
// value related
ConfigStoreBackedValueInspector rawValueInspector =
new ConfigStoreBackedValueInspector(cs, currentVersion, inMemoryTopology);
InMemoryValueInspector inMemoryValueInspector;
// ConfigStoreWithStableVersioning always create Soft reference cache
if (isConfigStoreWithStableVersion(cs) || this.policy == VersionStabilityPolicy.WEAK_LOCAL_STABILITY) {
inMemoryValueInspector = new InMemoryValueInspector(rawValueInspector, false);
result = new ConfigStoreAccessor(cs, inMemoryValueInspector, inMemoryTopology);
}
// Non ConfigStoreWithStableVersioning but require STRONG_LOCAL_STABILITY, use Strong reference cache
else if (this.policy == VersionStabilityPolicy.STRONG_LOCAL_STABILITY) {
inMemoryValueInspector = new InMemoryValueInspector(rawValueInspector, true);
result = new ConfigStoreAccessor(cs, inMemoryValueInspector, inMemoryTopology);
}
// Require No cache
else {
result = new ConfigStoreAccessor(cs, rawValueInspector, inMemoryTopology);
}
return result;
}
private static boolean isConfigStoreWithStableVersion(ConfigStore cs) {
for (Annotation annotation : cs.getClass().getDeclaredAnnotations()) {
if (annotation instanceof ConfigStoreWithStableVersioning) {
return true;
}
}
return false;
}
private ConfigStoreAccessor getConfigStoreAccessor(URI configKeyURI)
throws ConfigStoreFactoryDoesNotExistsException, ConfigStoreCreationException, VersionDoesNotExistException {
URI matchedFloorKey = getMatchedFloorKeyFromCache(configKeyURI);
ConfigStoreAccessor result;
if (matchedFloorKey != null) {
result = this.configStoreAccessorMap.get(matchedFloorKey);
return result;
}
result = createNewConfigStoreAccessor(configKeyURI);
ConfigStore cs = result.configStore;
// put default root URI in cache as well for the URI which missing authority
if (configKeyURI.getAuthority() == null) {
// configKeyURI is missing authority/configstore root "etl-hdfs:///datasets/a1/a2"
try {
this.configStoreAccessorMap.put(new URI(configKeyURI.getScheme(), null, "/", null, null), result);
} catch (URISyntaxException e) {
// should not come here
throw new RuntimeException("Can not build URI based on " + configKeyURI);
}
} else {
// need to check Config Store's root is the prefix of input configKeyURI
if (!ConfigClientUtils.isAncestorOrSame(configKeyURI, cs.getStoreURI())) {
throw new RuntimeException(
String.format("Config Store root URI %s is not the prefix of input %s", cs.getStoreURI(), configKeyURI));
}
}
// put to cache
this.configStoreAccessorMap.put(cs.getStoreURI(), result);
return result;
}
// use serviceLoader to load configStoreFactories
@SuppressWarnings("unchecked")
private ConfigStoreFactory<ConfigStore> getConfigStoreFactory(URI configKeyUri)
throws ConfigStoreFactoryDoesNotExistsException {
@SuppressWarnings("rawtypes")
ConfigStoreFactory csf = this.configStoreFactoryRegister.getConfigStoreFactory(configKeyUri.getScheme());
if (csf == null) {
throw new ConfigStoreFactoryDoesNotExistsException(configKeyUri.getScheme(), "scheme name does not exists");
}
return csf;
}
static class ConfigStoreAccessor {
final ConfigStore configStore;
final ConfigStoreValueInspector valueInspector;
final ConfigStoreTopologyInspector topologyInspector;
ConfigStoreAccessor(ConfigStore cs, ConfigStoreValueInspector valueInspector,
ConfigStoreTopologyInspector topologyInspector) {
this.configStore = cs;
this.valueInspector = valueInspector;
this.topologyInspector = topologyInspector;
}
}
}
| 2,711 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config | Create_ds/gobblin/gobblin-config-management/gobblin-config-client/src/main/java/org/apache/gobblin/config/client/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains the config config client implementation for Gobblin config management.
*/
package org.apache.gobblin.config.client;
//TODO: Remove once we commit any other classes
class DummyClassForJavadoc {
}
| 2,712 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/TestEnvironment.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config;
import org.testng.annotations.BeforeSuite;
import org.testng.annotations.Test;
import org.apache.gobblin.config.common.impl.TestConfigStoreValueInspector;
import org.apache.gobblin.config.store.hdfs.SimpleHdfsConfigStoreTest;
@Test
public class TestEnvironment {
@BeforeSuite
public void setup() {
System.setProperty(SimpleHdfsConfigStoreTest.TAG_NAME_SYS_PROP_KEY, SimpleHdfsConfigStoreTest.TAG_NAME_SYS_PROP_VALUE);
System.setProperty(TestConfigStoreValueInspector.VALUE_INSPECTOR_SYS_PROP_KEY, TestConfigStoreValueInspector.VALUE_INSPECTOR_SYS_PROP_VALUE);
}
}
| 2,713 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common/impl/TestCircularDependency.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
@Test(groups = { "gobblin.config.common.impl" })
public class TestCircularDependency {
private final String version = "V1.0";
List<ConfigKeyPath> emptyList = Collections.emptyList();
private static void addConfigStoreChildren(ConfigStore mockup, String version, ConfigKeyPath parent,
ConfigKeyPath... configKeyPaths) {
List<ConfigKeyPath> children = new ArrayList<ConfigKeyPath>();
for (ConfigKeyPath p : configKeyPaths) {
children.add(p);
}
when(mockup.getChildren(parent, version)).thenReturn(children);
}
private static void addConfigStoreImports(ConfigStore mockup, String version, ConfigKeyPath self,
ConfigKeyPath... configKeyPaths) {
List<ConfigKeyPath> ownImports = new ArrayList<ConfigKeyPath>();
for (ConfigKeyPath p : configKeyPaths) {
ownImports.add(p);
}
when(mockup.getOwnImports(self, version)).thenReturn(ownImports);
}
@Test
public void testSelfImportSelf() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag);
// self import self
addConfigStoreImports(mockConfigStore, version, tag, tag);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
try {
inMemory.getImportsRecursively(tag);
Assert.fail("Did not catch expected CircularDependencyException");
} catch (CircularDependencyException e) {
Assert.assertTrue(e.getMessage().indexOf("/tag") > 0);
}
}
@Test
public void testSelfImportChild() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag);
addConfigStoreChildren(mockConfigStore, version, tag, highPriorityTag);
// parent import direct child
addConfigStoreImports(mockConfigStore, version, tag, highPriorityTag);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
try {
inMemory.getImportsRecursively(tag);
Assert.fail("Did not catch expected CircularDependencyException");
} catch (CircularDependencyException e) {
Assert.assertTrue(e.getMessage().indexOf("/tag/highPriorityTag") > 0 && e.getMessage().indexOf("/tag ") > 0);
}
}
@Test
public void testSelfImportDescendant() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
ConfigKeyPath nertzHighPriorityTag = highPriorityTag.createChild("nertzHighPriorityTag");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag);
addConfigStoreChildren(mockConfigStore, version, tag, highPriorityTag);
addConfigStoreChildren(mockConfigStore, version, highPriorityTag, nertzHighPriorityTag);
// self import descendant
// formed the loop /tag -> /tag/highPriorityTag/nertzHighPriorityTag -> /tag/highPriorityTag -> /tag
addConfigStoreImports(mockConfigStore, version, tag, nertzHighPriorityTag);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
try {
inMemory.getImportsRecursively(tag);
Assert.fail("Did not catch expected CircularDependencyException");
} catch (CircularDependencyException e) {
Assert.assertTrue(e.getMessage().indexOf("/tag/highPriorityTag/nertzHighPriorityTag") > 0
&& e.getMessage().indexOf("/tag/highPriorityTag ") > 0 && e.getMessage().indexOf("/tag ") > 0);
}
}
@Test
public void testSelfIndirectlyImportDescendant() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
ConfigKeyPath nertzHighPriorityTag = highPriorityTag.createChild("nertzHighPriorityTag");
ConfigKeyPath tag2 = SingleLinkedListConfigKeyPath.ROOT.createChild("tag2");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag, tag2);
addConfigStoreChildren(mockConfigStore, version, tag, highPriorityTag);
addConfigStoreChildren(mockConfigStore, version, highPriorityTag, nertzHighPriorityTag);
// self import descendant
// formed the loop /tag -> /tag2 -> /tag/highPriorityTag/nertzHighPriorityTag -> /tag/highPriorityTag -> /tag
addConfigStoreImports(mockConfigStore, version, tag, tag2);
addConfigStoreImports(mockConfigStore, version, tag2, nertzHighPriorityTag);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
try {
inMemory.getImportsRecursively(tag);
Assert.fail("Did not catch expected CircularDependencyException");
} catch (CircularDependencyException e) {
Assert.assertTrue(e.getMessage().indexOf("/tag/highPriorityTag/nertzHighPriorityTag") > 0
&& e.getMessage().indexOf("/tag/highPriorityTag ") > 0 && e.getMessage().indexOf("/tag ") > 0
&& e.getMessage().indexOf("/tag2 ") > 0);
}
}
@Test
public void testLoops() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigKeyPath subTag1 = tag.createChild("subTag1");
ConfigKeyPath subTag2 = tag.createChild("subTag2");
ConfigKeyPath subTag3 = tag.createChild("subTag3");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag);
addConfigStoreChildren(mockConfigStore, version, tag, subTag1, subTag2, subTag3);
// self import descendant
// formed loop /tag/subTag1 -> /tag/subTag2 -> /tag/subTag3 -> /tag/subTag1
addConfigStoreImports(mockConfigStore, version, subTag1, subTag2);
addConfigStoreImports(mockConfigStore, version, subTag2, subTag3);
addConfigStoreImports(mockConfigStore, version, subTag3, subTag1);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
try {
inMemory.getImportsRecursively(subTag1);
Assert.fail("Did not catch expected CircularDependencyException");
} catch (CircularDependencyException e) {
Assert.assertTrue(e.getMessage().indexOf("/tag/subTag1") > 0 && e.getMessage().indexOf("/tag/subTag2") > 0
&& e.getMessage().indexOf("/tag/subTag3") > 0);
}
}
@Test
public void testNoCircular() {
ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
ConfigKeyPath nertzHighPriorityTag = highPriorityTag.createChild("nertzHighPriorityTag");
ConfigKeyPath tag2 = SingleLinkedListConfigKeyPath.ROOT.createChild("tag2");
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
addConfigStoreChildren(mockConfigStore, version, SingleLinkedListConfigKeyPath.ROOT, tag, tag2);
addConfigStoreChildren(mockConfigStore, version, tag, highPriorityTag);
addConfigStoreChildren(mockConfigStore, version, highPriorityTag, nertzHighPriorityTag);
// mock up imports, point to same node but without circular
addConfigStoreImports(mockConfigStore, version, nertzHighPriorityTag, tag2);
addConfigStoreImports(mockConfigStore, version, tag2, tag);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
List<ConfigKeyPath> result = inMemory.getImportsRecursively(nertzHighPriorityTag);
Assert.assertEquals(result.size(), 4);
Iterator<ConfigKeyPath> it = result.iterator();
Assert.assertEquals(it.next(), tag2);
Assert.assertEquals(it.next(), tag);
Assert.assertTrue(it.next().isRootPath());
Assert.assertEquals(it.next(), highPriorityTag);
}
}
| 2,714 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common/impl/TestSingleLinkedListConfigKeyPath.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.HashSet;
import java.util.Set;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.testng.Assert;
import org.testng.annotations.Test;
@Test(groups = { "gobblin.config.common.impl" })
public class TestSingleLinkedListConfigKeyPath {
@Test
public void testRootPath() {
Assert.assertEquals(SingleLinkedListConfigKeyPath.ROOT.getAbsolutePathString(), "/");
Assert.assertEquals(SingleLinkedListConfigKeyPath.ROOT.getOwnPathName(), "");
}
@Test(expectedExceptions = java.lang.UnsupportedOperationException.class)
public void testGetParentOfRoot() {
SingleLinkedListConfigKeyPath.ROOT.getParent();
}
@Test
public void testNonRoot() {
ConfigKeyPath data = SingleLinkedListConfigKeyPath.ROOT.createChild("data");
Assert.assertEquals(data.getAbsolutePathString(), "/data");
ConfigKeyPath profile = data.createChild("databases").createChild("identity").createChild("profile");
Assert.assertEquals(profile.toString(), "/data/databases/identity/profile");
}
@Test
public void testHash() {
ConfigKeyPath data = SingleLinkedListConfigKeyPath.ROOT.createChild("data");
ConfigKeyPath profile1 = data.createChild("databases").createChild("identity").createChild("profile");
ConfigKeyPath profile2 = data.createChild("databases").createChild("identity").createChild("profile");
Assert.assertFalse(profile1 == profile2);
Assert.assertTrue(profile1.equals(profile2));
Assert.assertEquals(profile1.hashCode(), profile2.hashCode());
Set<ConfigKeyPath> testSet = new HashSet<ConfigKeyPath>();
testSet.add(profile1);
Assert.assertTrue(testSet.contains(profile2));
}
}
| 2,715 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common/impl/TestInMemoryTopology.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValue;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
@Test(groups = { "gobblin.config.common.impl" })
public class TestInMemoryTopology {
private ConfigStore mockConfigStore;
private final String version = "V1.0";
private final ConfigKeyPath data = SingleLinkedListConfigKeyPath.ROOT.createChild("data");
private final ConfigKeyPath tag = SingleLinkedListConfigKeyPath.ROOT.createChild("tag");
private final ConfigKeyPath tag2 = SingleLinkedListConfigKeyPath.ROOT.createChild("tag2");
private final ConfigKeyPath databases = data.createChild("databases");
private final ConfigKeyPath identity = databases.createChild("identity");
private final ConfigKeyPath highPriorityTag = tag.createChild("highPriorityTag");
private final ConfigKeyPath espressoTag = tag.createChild("espressoTag");
private final ConfigKeyPath nertzTag2 = tag2.createChild("nertzTag2");
private void printConfig(Config config){
Set<Map.Entry<String,ConfigValue>> entrySet = config.entrySet();
for(Map.Entry<String,ConfigValue> entry: entrySet){
System.out.println("key: " + entry.getKey() + ", value: " + entry.getValue());
}
}
@BeforeClass
public void setup(){
// Topology for mock up config store
// βββ data
// βΒ Β βββ databases
// βΒ Β βββ identity
// βββ tag
// βΒ Β βββ espressoTag
// βΒ Β βββ highPriorityTag
// βββ tag2
// βββ nertzTag2
mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
List<ConfigKeyPath> emptyList = Collections.emptyList();
// mock up parent/children topology
List<ConfigKeyPath> rootChildren = new ArrayList<ConfigKeyPath>();
rootChildren.add(data);
rootChildren.add(tag);
rootChildren.add(tag2);
when(mockConfigStore.getChildren(SingleLinkedListConfigKeyPath.ROOT, version)).thenReturn(rootChildren);
List<ConfigKeyPath> dataChildren = new ArrayList<ConfigKeyPath>();
dataChildren.add(databases);
when(mockConfigStore.getChildren(data, version)).thenReturn(dataChildren);
List<ConfigKeyPath> databasesChildren = new ArrayList<ConfigKeyPath>();
databasesChildren.add(identity);
when(mockConfigStore.getChildren(databases, version)).thenReturn(databasesChildren);
when(mockConfigStore.getChildren(identity, version)).thenReturn(emptyList);
List<ConfigKeyPath> tagChildren = new ArrayList<ConfigKeyPath>();
tagChildren.add(highPriorityTag);
tagChildren.add(espressoTag);
when(mockConfigStore.getChildren(tag, version)).thenReturn(tagChildren);
when(mockConfigStore.getChildren(highPriorityTag, version)).thenReturn(emptyList);
when(mockConfigStore.getChildren(espressoTag, version)).thenReturn(emptyList);
List<ConfigKeyPath> tag2Children = new ArrayList<ConfigKeyPath>();
tag2Children.add(nertzTag2);
when(mockConfigStore.getChildren(tag2, version)).thenReturn(tag2Children);
when(mockConfigStore.getChildren(nertzTag2, version)).thenReturn(emptyList);
// mock up import links
// identity import espressoTag and highPriorityTag
List<ConfigKeyPath> identityImports = new ArrayList<ConfigKeyPath>();
identityImports.add(espressoTag);
identityImports.add(highPriorityTag);
when(mockConfigStore.getOwnImports(identity, version)).thenReturn(identityImports);
// espressoTag imports nertzTag2
List<ConfigKeyPath> espressoImports = new ArrayList<ConfigKeyPath>();
espressoImports.add(nertzTag2);
when(mockConfigStore.getOwnImports(espressoTag, version)).thenReturn(espressoImports);
mockupConfigValues();
}
private void mockupConfigValues(){
// mock up the configuration values for root
Map<String, String> rootMap = new HashMap<>();
rootMap.put("keyInRoot", "valueInRoot");
when(mockConfigStore.getOwnConfig(SingleLinkedListConfigKeyPath.ROOT, version)).thenReturn(ConfigFactory.parseMap(rootMap));
Collection<ConfigKeyPath> currentLevel = mockConfigStore.getChildren(SingleLinkedListConfigKeyPath.ROOT, version);
while(!currentLevel.isEmpty()){
Collection<ConfigKeyPath> nextLevel = new ArrayList<ConfigKeyPath>();
for(ConfigKeyPath p: currentLevel){
mockupConfigValueForKey(p);
nextLevel.addAll(mockConfigStore.getChildren(p, version));
}
currentLevel = nextLevel;
}
}
private void mockupConfigValueForKey(ConfigKeyPath configKey){
final String generalKey = "generalKey";
Map<String, String> valueMap = new HashMap<>();
// key in all the nodes
valueMap.put(generalKey, "valueOf_" +generalKey +"_"+configKey.getOwnPathName() );
// key in self node
valueMap.put("keyOf_" + configKey.getOwnPathName(), "valueOf_" + configKey.getOwnPathName());
when(mockConfigStore.getOwnConfig(configKey, version)).thenReturn(ConfigFactory.parseMap(valueMap));
}
@Test
public void testNonRootTopology() {
Assert.assertEquals(mockConfigStore.getCurrentVersion(), version);
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(this.mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
Collection<ConfigKeyPath> result = inMemory.getChildren(data);
Assert.assertTrue(result.size()==1);
Assert.assertEquals(result.iterator().next(), databases);
// test own imports
result = inMemory.getOwnImports(identity);
Assert.assertTrue(result.size()==2);
Iterator<ConfigKeyPath> it = result.iterator();
Assert.assertEquals(it.next(), espressoTag);
Assert.assertEquals(it.next(), highPriorityTag);
// test import recursively
result = inMemory.getImportsRecursively(identity);
Assert.assertEquals(result.size(), 8);
Assert.assertEquals(result, Lists.newArrayList(highPriorityTag, tag, SingleLinkedListConfigKeyPath.ROOT,
espressoTag, nertzTag2, tag2, databases, data));
// test own imported by
result = inMemory.getImportedBy(nertzTag2);
Assert.assertTrue(result.size()==1);
Assert.assertEquals(result.iterator().next(), espressoTag);
// test imported by recursively, as the imported by recursively do not care about
// order, need to use HashSet to test
result = inMemory.getImportedByRecursively(nertzTag2);
Set<ConfigKeyPath> expected = new HashSet<ConfigKeyPath>();
expected.add(espressoTag);
expected.add(identity);
Assert.assertTrue(result.size()==2);
it = result.iterator();
while(it.hasNext()){
ConfigKeyPath tmp = it.next();
Assert.assertTrue(expected.contains(tmp));
expected.remove(tmp);
}
}
@Test
public void testNonRootValues() {
ConfigStoreBackedTopology csTopology = new ConfigStoreBackedTopology(this.mockConfigStore, this.version);
InMemoryTopology inMemory = new InMemoryTopology(csTopology);
ConfigStoreBackedValueInspector rawValueInspector = new ConfigStoreBackedValueInspector(this.mockConfigStore, this.version, inMemory);
InMemoryValueInspector inMemoryStrongRef = new InMemoryValueInspector(rawValueInspector, true);
InMemoryValueInspector inMemoryWeakRef = new InMemoryValueInspector(rawValueInspector, false);
// test values for Identity
testValuesForIdentity(rawValueInspector);
testValuesForIdentity(inMemoryStrongRef);
testValuesForIdentity(inMemoryWeakRef);
// test values for Espresso Tag
testValuesForEspressoTag(rawValueInspector);
testValuesForEspressoTag(inMemoryStrongRef);
testValuesForEspressoTag(inMemoryWeakRef);
// test for batch
Collection<ConfigKeyPath> inputs = new ArrayList<ConfigKeyPath>();
inputs.add(espressoTag);
inputs.add(identity);
Map<ConfigKeyPath, Config> resultMap = rawValueInspector.getOwnConfigs(inputs);
Assert.assertEquals(resultMap.size(), 2);
testValuesForEspressoTagOwnConfig(resultMap.get(espressoTag));
checkValuesForIdentityOwnConfig(resultMap.get(identity));
resultMap = rawValueInspector.getResolvedConfigs(inputs);
Assert.assertEquals(resultMap.size(), 2);
testValuesForEspressoTagResolvedConfig(resultMap.get(espressoTag));
checkValuesForIdentityResolvedConfig(resultMap.get(identity));
}
private void testValuesForEspressoTag(ConfigStoreValueInspector valueInspector){
Config config = valueInspector.getOwnConfig(this.espressoTag);
testValuesForEspressoTagOwnConfig(config);
config = valueInspector.getResolvedConfig(this.espressoTag);
testValuesForEspressoTagResolvedConfig(config);
}
private void testValuesForEspressoTagOwnConfig(Config config){
Assert.assertTrue(config.getString("keyOf_espressoTag").equals("valueOf_espressoTag"));
Assert.assertTrue(config.getString("generalKey").equals("valueOf_generalKey_espressoTag"));
}
private void testValuesForEspressoTagResolvedConfig(Config config){
Assert.assertTrue(config.getString("keyOf_espressoTag").equals("valueOf_espressoTag"));
Assert.assertTrue(config.getString("generalKey").equals("valueOf_generalKey_espressoTag"));
Assert.assertTrue(config.getString("keyInRoot").equals("valueInRoot"));
Assert.assertTrue(config.getString("keyOf_nertzTag2").equals("valueOf_nertzTag2"));
Assert.assertTrue(config.getString("keyOf_tag2").equals("valueOf_tag2"));
Assert.assertTrue(config.getString("keyOf_tag").equals("valueOf_tag"));
}
private void testValuesForIdentity(ConfigStoreValueInspector valueInspector){
Config ownConfig = valueInspector.getOwnConfig(identity);
checkValuesForIdentityOwnConfig(ownConfig);
Config resolvedConfig = valueInspector.getResolvedConfig(identity);
checkValuesForIdentityResolvedConfig(resolvedConfig);
}
private void checkValuesForIdentityOwnConfig(Config ownConfig){
Assert.assertTrue(ownConfig.entrySet().size() == 2 );
Assert.assertTrue(ownConfig.getString("keyOf_identity").equals("valueOf_identity"));
Assert.assertTrue(ownConfig.getString("generalKey").equals("valueOf_generalKey_identity"));
}
private void checkValuesForIdentityResolvedConfig(Config resolvedConfig){
Assert.assertTrue(resolvedConfig.getString("keyOf_data").equals("valueOf_data"));
Assert.assertTrue(resolvedConfig.getString("keyOf_identity").equals("valueOf_identity"));
Assert.assertTrue(resolvedConfig.getString("keyOf_espressoTag").equals("valueOf_espressoTag"));
Assert.assertTrue(resolvedConfig.getString("generalKey").equals("valueOf_generalKey_identity"));
Assert.assertTrue(resolvedConfig.getString("keyInRoot").equals("valueInRoot"));
Assert.assertTrue(resolvedConfig.getString("keyOf_nertzTag2").equals("valueOf_nertzTag2"));
Assert.assertTrue(resolvedConfig.getString("keyOf_highPriorityTag").equals("valueOf_highPriorityTag"));
Assert.assertTrue(resolvedConfig.getString("keyOf_tag2").equals("valueOf_tag2"));
Assert.assertTrue(resolvedConfig.getString("keyOf_tag").equals("valueOf_tag"));
Assert.assertTrue(resolvedConfig.getString("keyOf_databases").equals("valueOf_databases"));
}
}
| 2,716 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common/impl/TestConfigStoreValueInspector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.apache.gobblin.config.TestEnvironment;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.ConfigFactory;
public class TestConfigStoreValueInspector {
private final String version = "1.0";
/**Set by {@link TestEnvironment#setup()}**/
public static final String VALUE_INSPECTOR_SYS_PROP_KEY = "sysProp.key1";
public static final String VALUE_INSPECTOR_SYS_PROP_VALUE = "sysProp.value1";
@Test
public void testSystemPropertyResolution() {
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
ConfigStoreTopologyInspector mockTopology = mock(ConfigStoreTopologyInspector.class, Mockito.RETURNS_SMART_NULLS);
ConfigStoreBackedValueInspector valueInspector =
new ConfigStoreBackedValueInspector(mockConfigStore, version, mockTopology);
ConfigKeyPath testConfigKeyPath = SingleLinkedListConfigKeyPath.ROOT.createChild("a");
when(mockConfigStore.getOwnConfig(testConfigKeyPath.getParent(), version)).thenReturn(ConfigFactory.empty());
when(mockConfigStore.getOwnConfig(testConfigKeyPath, version)).thenReturn(
ConfigFactory.parseString("configProp = ${?" + VALUE_INSPECTOR_SYS_PROP_KEY + "}"));
Assert.assertEquals(valueInspector.getResolvedConfig(testConfigKeyPath).getString("configProp"), VALUE_INSPECTOR_SYS_PROP_VALUE);
}
@Test
public void testResolveConfigOverridingInChild() {
ConfigStore mockConfigStore = mock(ConfigStore.class, Mockito.RETURNS_SMART_NULLS);
when(mockConfigStore.getCurrentVersion()).thenReturn(version);
ConfigStoreTopologyInspector mockTopology = mock(ConfigStoreTopologyInspector.class, Mockito.RETURNS_SMART_NULLS);
ConfigStoreBackedValueInspector valueInspector =
new ConfigStoreBackedValueInspector(mockConfigStore, version, mockTopology);
ConfigKeyPath keyPathA = SingleLinkedListConfigKeyPath.ROOT.createChild("a");
ConfigKeyPath keyPathA_Slash_B = keyPathA.createChild("b");
when(mockConfigStore.getOwnConfig(keyPathA.getParent(), version)).thenReturn(ConfigFactory.empty());
when(mockConfigStore.getOwnConfig(keyPathA, version)).thenReturn(
ConfigFactory.parseString("key1 = value1InA \n key2 = ${key1}"));
when(mockConfigStore.getOwnConfig(keyPathA_Slash_B, version)).thenReturn(
ConfigFactory.parseString("key1 = value1InB"));
Assert.assertEquals(valueInspector.getResolvedConfig(keyPathA_Slash_B).getString("key2"), "value1InB");
}
}
| 2,717 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/common/impl/ImportTraverserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
public class ImportTraverserTest {
/**
* Test a simple non-tree graph. Check that all expected nodes are included in a traversal and in the correct order.
*/
@Test
public void testSimpleGraph() throws Exception {
// a --> b --> d
// \-> c -/
ListMultimap<String, String> edges = LinkedListMultimap.create();
edges.put("a", "b");
edges.put("a", "c");
edges.put("b", "d");
edges.put("c", "d");
ImportTraverser<String> traverser = new ImportTraverser<String>(s -> edges.get(s), CacheBuilder.newBuilder().build());
List<String> traversal = traverser.traverseGraphRecursively("a");
Assert.assertEquals(traversal, Lists.newArrayList("b", "d", "c"));
traversal = traverser.traverseGraphRecursively("b");
Assert.assertEquals(traversal, Lists.newArrayList("d"));
traversal = traverser.traverseGraphRecursively("c");
Assert.assertEquals(traversal, Lists.newArrayList("d"));
traversal = traverser.traverseGraphRecursively("d");
Assert.assertEquals(traversal, Lists.newArrayList());
}
/**
* Test a graph with cycles. Check attempting to traverse starting at node in the cycle throws exception, while attempting
* to traverse a node outside of a cycle doesn't.
*/
@Test
public void testGraphWithCycle() throws Exception {
// a --> b --> d -> e -> f
// <- c <-/
ListMultimap<String, String> edges = LinkedListMultimap.create();
edges.put("a", "b");
edges.put("b", "d");
edges.put("d", "c");
edges.put("c", "a");
edges.put("d", "e");
edges.put("e", "f");
ImportTraverser<String> traverser = new ImportTraverser<String>(s -> edges.get(s), CacheBuilder.newBuilder().build());
try {
List<String> traversal = traverser.traverseGraphRecursively("a");
Assert.fail();
} catch (CircularDependencyException cde) {
// expected
}
try {
List<String> traversal = traverser.traverseGraphRecursively("d");
Assert.fail();
} catch (CircularDependencyException cde) {
// expected
}
List<String> traversal = traverser.traverseGraphRecursively("e");
Assert.assertEquals(traversal, Lists.newArrayList("f"));
}
}
| 2,718 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store/zip/ZipFileConfigStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.zip;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.sun.nio.zipfs.ZipFileSystem;
import com.typesafe.config.Config;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
/**
* Unit tests for {@link ZipFileConfigStore}
*/
@Test
public class ZipFileConfigStoreTest {
private ZipFileConfigStore store;
private String version = "testVersion";
private ConfigKeyPath rootPath = SingleLinkedListConfigKeyPath.ROOT;
private ConfigKeyPath testPath = rootPath.createChild("test");
private ConfigKeyPath child1Path = testPath.createChild("child1");
private ConfigKeyPath child2Path = testPath.createChild("child2");
/**
* Layout of testing config store:
* /_CONFIG_STORE
* /test
* /child1
* main.conf (gobblin.test.property = "string2")
* includes.conf (test/child1)
* /child2
* main.conf (gobblin.test.property = "string3")
* main.conf
*/
@BeforeClass
public void setUp() throws URISyntaxException, ConfigStoreCreationException, IOException {
Path path = Paths.get(this.getClass().getClassLoader().getResource("zipStoreTest.zip").getPath());
FileSystem fs = FileSystems.newFileSystem(path, null);
this.store = new ZipFileConfigStore((ZipFileSystem) fs, path.toUri(), this.version, "_CONFIG_STORE");
}
@Test
public void testGetOwnConfig() {
Config config1 = this.store.getOwnConfig(this.rootPath, this.version);
Assert.assertEquals(config1.getString("gobblin.property.test1"), "prop1");
Assert.assertEquals(config1.getString("gobblin.property.test2"), "prop2");
Config config2 = this.store.getOwnConfig(this.testPath, this.version);
Assert.assertEquals(config2.getString("gobblin.test.property"), "string1");
Config config3 = this.store.getOwnConfig(this.child1Path, this.version);
Assert.assertEquals(config3.getString("gobblin.test.property"), "string2");
Config config4 = this.store.getOwnConfig(this.child2Path, this.version);
Assert.assertEquals(config4.getString("gobblin.test.property"), "string3");
}
@Test
public void testGetOwnImports() {
Collection<ConfigKeyPath> imports1 = this.store.getOwnImports(this.child1Path, this.version);
Assert.assertEquals(imports1.size(), 1);
Assert.assertTrue(imports1.contains(this.child1Path));
Collection<ConfigKeyPath> imports2 = this.store.getOwnImports(this.child2Path, this.version);
Assert.assertEquals(imports2.size(), 0);
}
@Test
public void testGetChildren() {
Collection<ConfigKeyPath> children1 = this.store.getChildren(this.rootPath, this.version);
Assert.assertEquals(children1.size(), 1);
Assert.assertTrue(children1.contains(this.testPath));
Collection<ConfigKeyPath> children2 = this.store.getChildren(this.testPath, this.version);
Assert.assertEquals(children2.size(), 2);
Assert.assertTrue(children2.contains(this.child1Path));
Assert.assertTrue(children2.contains(this.child2Path));
Collection<ConfigKeyPath> children3 = this.store.getChildren(this.child1Path, this.version);
Assert.assertEquals(children3.size(), 0);
}
}
| 2,719 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store/hdfs/SimpleHdfsConfigStoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.typesafe.config.Config;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
import org.apache.gobblin.config.store.deploy.ClasspathConfigSource;
import org.apache.gobblin.config.store.deploy.FsDeploymentConfig;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.PathUtils;
/**
* Unit tests for {@link SimpleHadoopFilesystemConfigStore} and {@link SimpleHDFSConfigStoreFactory}.
*/
@Test(groups = "gobblin.config.store.hdfs")
public class SimpleHdfsConfigStoreTest {
private static final String CONFIG_DIR_NAME = "configDir";
private static final String VERSION = "v1.0";
private static final Path CONFIG_DIR_PATH =
PathUtils.combinePaths(CONFIG_DIR_NAME, SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME, VERSION);
/**Set by {@link TestEnvironment#setup()}**/
public static final String TAG_NAME_SYS_PROP_KEY = "sysProp.tagName1";
public static final String TAG_NAME_SYS_PROP_VALUE = "tag1";
public static final String TAG_NAME_RUNTIME_PROP_KEY = "runtimeProp.tagName2";
public static final String TAG_NAME_RUNTIME_PROP_VALUE = "tag2";
private FileSystem fs;
private SimpleHadoopFilesystemConfigStore _simpleHadoopFilesystemConfigStore;
@BeforeClass
public void setUp() throws URISyntaxException, ConfigStoreCreationException, IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.fs.mkdirs(CONFIG_DIR_PATH);
SimpleLocalHDFSConfigStoreFactory simpleHDFSConfigStoreConfigFactory = new SimpleLocalHDFSConfigStoreFactory();
URI storeURI = getStoreURI(System.getProperty("user.dir") + File.separator + CONFIG_DIR_NAME);
this._simpleHadoopFilesystemConfigStore = simpleHDFSConfigStoreConfigFactory.createConfigStore(storeURI);
this._simpleHadoopFilesystemConfigStore.deploy(new FsDeploymentConfig(new ClasspathConfigSource(new Properties()), VERSION));
}
@Test
public void testGetCurrentVersion() throws IOException {
Assert.assertEquals(this._simpleHadoopFilesystemConfigStore.getCurrentVersion(), VERSION);
String newVersion = "v1.1";
this._simpleHadoopFilesystemConfigStore.deploy(new FsDeploymentConfig(new ClasspathConfigSource(new Properties()), newVersion));
Assert.assertEquals(this._simpleHadoopFilesystemConfigStore.getCurrentVersion(), newVersion);
}
@Test
public void getStoreURI() {
URI storeURI = this._simpleHadoopFilesystemConfigStore.getStoreURI();
Assert.assertEquals(storeURI.getScheme(), SimpleHDFSConfigStoreFactory.SIMPLE_HDFS_SCHEME_PREFIX + "file");
Assert.assertNull(storeURI.getAuthority());
Assert.assertEquals(storeURI.getPath(), System.getProperty("user.dir") + File.separator + CONFIG_DIR_NAME);
}
@Test
public void testGetChildren() throws IOException, URISyntaxException, ConfigStoreCreationException {
String datasetName = "dataset-test-get-children";
String childDatasetName = "childDataset";
Path datasetPath = new Path(CONFIG_DIR_PATH, datasetName);
try {
this.fs.mkdirs(new Path(datasetPath, childDatasetName));
ConfigKeyPath datasetConfigKey = SingleLinkedListConfigKeyPath.ROOT.createChild(datasetName);
Collection<ConfigKeyPath> children = this._simpleHadoopFilesystemConfigStore.getChildren(datasetConfigKey, VERSION);
Assert.assertEquals(children.size(), 1);
Assert.assertEquals(children.iterator().next().getOwnPathName(), childDatasetName);
} finally {
if (this.fs.exists(datasetPath)) {
this.fs.delete(datasetPath, true);
}
}
}
@Test
public void testGetOwnImports() throws IOException, URISyntaxException, ConfigStoreCreationException {
String datasetName = "dataset-test-get-own-imports";
String tagKey1 = "/path/to/tag1";
String tagKey2 = "/path/to/tag2";
Path datasetPath = new Path(CONFIG_DIR_PATH, datasetName);
try {
this.fs.mkdirs(datasetPath);
BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(this.fs.create(new Path(datasetPath, "includes.conf")), Charsets.UTF_8));
writer.write(tagKey1);
writer.newLine();
writer.write(tagKey2);
writer.close();
ConfigKeyPath datasetConfigKey = SingleLinkedListConfigKeyPath.ROOT.createChild(datasetName);
List<ConfigKeyPath> imports = this._simpleHadoopFilesystemConfigStore.getOwnImports(datasetConfigKey, VERSION);
Assert.assertEquals(imports.size(), 2);
Assert.assertEquals(imports.get(0).getAbsolutePathString(), tagKey2);
Assert.assertEquals(imports.get(1).getAbsolutePathString(), tagKey1);
} finally {
if (this.fs.exists(datasetPath)) {
this.fs.delete(datasetPath, true);
}
}
}
@Test
public void testGetOwnImportsWithRuntimeConfigResolution() throws IOException, URISyntaxException, ConfigStoreCreationException {
String datasetName = "dataset-test-get-own-imports-resolution";
Path datasetPath = new Path(CONFIG_DIR_PATH, datasetName);
Properties prop = new Properties();
prop.put(TAG_NAME_RUNTIME_PROP_KEY, TAG_NAME_RUNTIME_PROP_VALUE);
Optional<Config> runtimeConfig = Optional.fromNullable(ConfigUtils.propertiesToConfig(prop));
try {
this.fs.mkdirs(datasetPath);
BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(this.fs.create(new Path(datasetPath, "includes.conf")), Charsets.UTF_8));
writer.write("/path/to/${?" + TAG_NAME_RUNTIME_PROP_KEY + "}");
writer.close();
ConfigKeyPath datasetConfigKey = SingleLinkedListConfigKeyPath.ROOT.createChild(datasetName);
List<ConfigKeyPath> imports = this._simpleHadoopFilesystemConfigStore.getOwnImports(datasetConfigKey, VERSION, runtimeConfig);
Assert.assertEquals(imports.size(), 1);
Assert.assertEquals(imports.get(0).getAbsolutePathString(), "/path/to/" + TAG_NAME_RUNTIME_PROP_VALUE);
} finally {
if (this.fs.exists(datasetPath)) {
this.fs.delete(datasetPath, true);
}
}
}
@Test
public void testGetOwnImportsWithResolution() throws IOException, URISyntaxException, ConfigStoreCreationException {
String datasetName = "dataset-test-get-own-imports-resolution";
Path datasetPath = new Path(CONFIG_DIR_PATH, datasetName);
try {
this.fs.mkdirs(datasetPath);
BufferedWriter writer = new BufferedWriter(
new OutputStreamWriter(this.fs.create(new Path(datasetPath, "includes.conf")), Charsets.UTF_8));
writer.write("/path/to/${?" + TAG_NAME_SYS_PROP_KEY + "}");
writer.close();
ConfigKeyPath datasetConfigKey = SingleLinkedListConfigKeyPath.ROOT.createChild(datasetName);
List<ConfigKeyPath> imports = this._simpleHadoopFilesystemConfigStore.getOwnImports(datasetConfigKey, VERSION);
Assert.assertEquals(imports.size(), 1);
Assert.assertEquals(imports.get(0).getAbsolutePathString(), "/path/to/" + TAG_NAME_SYS_PROP_VALUE);
} finally {
if (this.fs.exists(datasetPath)) {
this.fs.delete(datasetPath, true);
}
}
}
@Test
public void testGetOwnConfig() throws ConfigStoreCreationException, URISyntaxException, IOException {
String datasetName = "dataset-test-get-own-config";
Path datasetPath = new Path(CONFIG_DIR_PATH, datasetName);
try {
this.fs.mkdirs(datasetPath);
this.fs.create(new Path(datasetPath, "main.conf")).close();
ConfigKeyPath datasetConfigKey = SingleLinkedListConfigKeyPath.ROOT.createChild(datasetName);
Config config = this._simpleHadoopFilesystemConfigStore.getOwnConfig(datasetConfigKey, VERSION);
Assert.assertTrue(config.isEmpty());
} finally {
if (this.fs.exists(datasetPath)) {
this.fs.delete(datasetPath, true);
}
}
}
@Test(dependsOnMethods = { "testGetCurrentVersion" })
public void testDeploy() throws Exception {
Properties props = new Properties();
props.setProperty(ClasspathConfigSource.CONFIG_STORE_CLASSPATH_RESOURCE_NAME_KEY, "_testDeploy");
this._simpleHadoopFilesystemConfigStore.deploy(new FsDeploymentConfig(new ClasspathConfigSource(props), "2.0"));
Path versionPath = PathUtils.combinePaths(CONFIG_DIR_NAME, SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME, "2.0");
Assert.assertTrue(fs.exists(new Path(versionPath, "dir1")));
Assert.assertTrue(fs.exists(new Path(versionPath, "dir1/f1.conf")));
}
@Test
public void testResolveImports() throws Exception {
List<String> unresolved =
ImmutableList.of("/path/to/tag0", "/path/to/${?" + TAG_NAME_SYS_PROP_KEY + "}", "${?" + TAG_NAME_SYS_PROP_KEY
+ "}/${?" + TAG_NAME_SYS_PROP_KEY + "}");
List<String> resolved = SimpleHadoopFilesystemConfigStore.resolveIncludesList(unresolved);
List<String> expected = ImmutableList.of("/path/to/tag0", "/path/to/tag1", "tag1/tag1");
Assert.assertEquals(resolved, expected);
}
@AfterClass
public void tearDown() throws IOException {
if (this.fs.exists(new Path(CONFIG_DIR_NAME))) {
this.fs.delete(new Path(CONFIG_DIR_NAME), true);
}
}
private URI getStoreURI(String configDir) throws URISyntaxException {
return new URI(SimpleHDFSConfigStoreFactory.SIMPLE_HDFS_SCHEME_PREFIX + "file", "localhost:8080", configDir, "", "");
}
}
| 2,720 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store/hdfs/SimpleHDFSStoreMetadataTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import org.apache.gobblin.configuration.ConfigurationKeys;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableMap;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
public class SimpleHDFSStoreMetadataTest {
private static final Path TEST_PATH = new Path("gobblin-config-management/testOutput");
private final FileSystem localFs;
public SimpleHDFSStoreMetadataTest() throws Exception {
this.localFs = FileSystem.get(URI.create(ConfigurationKeys.LOCAL_FS_URI), new Configuration());
}
/**
* Check if the current version of the store gets updated
*/
@Test
public void testVersionUpdate() throws Exception {
Path testVersionUpdatePath = new Path(TEST_PATH, "testVersionUpdate");
this.localFs.mkdirs(testVersionUpdatePath);
SimpleHDFSStoreMetadata simpleHDFSStoreMetadata = new SimpleHDFSStoreMetadata(this.localFs, testVersionUpdatePath);
simpleHDFSStoreMetadata.setCurrentVersion("1.1");
Assert.assertEquals(simpleHDFSStoreMetadata.getCurrentVersion(), "1.1");
simpleHDFSStoreMetadata.setCurrentVersion("1.2");
Assert.assertEquals(simpleHDFSStoreMetadata.getCurrentVersion(), "1.2");
}
/**
* Make sure update version does not affect other metadata
*/
@Test
public void testVersionUpdateWithOtherMetadata() throws Exception {
Path testVersionUpdateWithOtherMetadataPath = new Path(TEST_PATH, "testVersionUpdateWithOtherMetadata");
this.localFs.mkdirs(testVersionUpdateWithOtherMetadataPath);
SimpleHDFSStoreMetadata simpleHDFSStoreMetadata = new SimpleHDFSStoreMetadata(this.localFs, testVersionUpdateWithOtherMetadataPath);
Config conf = ConfigFactory.parseMap(ImmutableMap.of("test.name", "test1", "test.type", "unittest"));
simpleHDFSStoreMetadata.writeMetadata(conf);
simpleHDFSStoreMetadata.setCurrentVersion("1.2");
Assert.assertEquals(simpleHDFSStoreMetadata.getCurrentVersion(), "1.2");
Assert.assertEquals(simpleHDFSStoreMetadata.readMetadata().getString("test.name"), "test1");
Assert.assertEquals(simpleHDFSStoreMetadata.readMetadata().getString("test.type"), "unittest");
}
@AfterClass
@BeforeClass
public void cleanup() throws Exception {
this.localFs.delete(TEST_PATH, true);
}
}
| 2,721 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/test/java/org/apache/gobblin/config/store/hdfs/SimpleHdfsConfigureStoreFactoryTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
/**
* Unit tests for {@link SimpleHDFSConfigStoreFactory}.
*/
@Test(groups = "gobblin.config.store.hdfs", singleThreaded=true)
public class SimpleHdfsConfigureStoreFactoryTest {
@Test
public void testGetDefaults() throws URISyntaxException, ConfigStoreCreationException, IOException {
Path configStoreDir = new Path(SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME);
FileSystem localFS = FileSystem.getLocal(new Configuration());
try {
Assert.assertTrue(localFS.mkdirs(configStoreDir));
DefaultCapableLocalConfigStoreFactory simpleLocalHDFSConfigStoreFactory =
new DefaultCapableLocalConfigStoreFactory();
URI configKey = new URI(simpleLocalHDFSConfigStoreFactory.getScheme(), "", "", "", "");
SimpleHadoopFilesystemConfigStore simpleHadoopFilesystemConfigStore = simpleLocalHDFSConfigStoreFactory.createConfigStore(configKey);
Assert
.assertEquals(simpleHadoopFilesystemConfigStore.getStoreURI().getScheme(), simpleLocalHDFSConfigStoreFactory.getScheme());
Assert.assertNull(simpleHadoopFilesystemConfigStore.getStoreURI().getAuthority());
Assert.assertEquals(simpleHadoopFilesystemConfigStore.getStoreURI().getPath(), System.getProperty("user.dir"));
} finally {
localFS.delete(configStoreDir, true);
}
}
@Test
public void testConfiguration() throws Exception {
FileSystem localFS = FileSystem.getLocal(new Configuration());
Path testRoot = localFS.makeQualified(new Path("testConfiguration"));
Path configRoot = localFS.makeQualified(new Path(testRoot, "dir2"));
Path configStoreRoot = new Path(configRoot,
SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME);
Assert.assertTrue(localFS.mkdirs(configStoreRoot));
try {
Config confConf1 =
ConfigFactory.empty().withValue(SimpleHDFSConfigStoreFactory.DEFAULT_STORE_URI_KEY,
ConfigValueFactory.fromAnyRef(configRoot.toString()));
DefaultCapableLocalConfigStoreFactory confFactory = new DefaultCapableLocalConfigStoreFactory(confConf1);
Assert.assertNotNull(confFactory.getDefaultStoreURI());
Assert.assertEquals(confFactory.getDefaultStoreURI(), configRoot.toUri());
Assert.assertEquals(confFactory.getPhysicalScheme(), "file");
// Valid path
SimpleHadoopFilesystemConfigStore store1 = confFactory.createConfigStore(new URI("default-file:/d"));
Assert.assertEquals(store1.getStoreURI().getScheme(), confFactory.getScheme());
Assert.assertEquals(store1.getStoreURI().getAuthority(),
confFactory.getDefaultStoreURI().getAuthority());
Assert.assertEquals(store1.getStoreURI().getPath(),
confFactory.getDefaultStoreURI().getPath());
// Invalid path
Config confConf2 =
ConfigFactory.empty().withValue(SimpleHDFSConfigStoreFactory.DEFAULT_STORE_URI_KEY,
ConfigValueFactory.fromAnyRef(testRoot.toString()));
try {
new DefaultCapableLocalConfigStoreFactory(confConf2).getDefaultStoreURI();
Assert.fail("Exception expected");
}
catch (IllegalArgumentException e) {
Assert.assertTrue(e.getMessage().contains("is not a config store."));
}
// Empty path
Config confConf3 =
ConfigFactory.empty().withValue(SimpleHDFSConfigStoreFactory.DEFAULT_STORE_URI_KEY,
ConfigValueFactory.fromAnyRef(""));
try {
new DefaultCapableLocalConfigStoreFactory(confConf3).getDefaultStoreURI();
Assert.fail("Exception expected");
}
catch (IllegalArgumentException e) {
Assert.assertTrue(e.getMessage().contains("Default store URI should be non-empty"));
}
}
finally {
localFS.delete(testRoot, true);
}
}
}
| 2,722 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/ConfigStoreBackedTopology.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreWithImportedBy;
import org.apache.gobblin.config.store.api.ConfigStoreWithImportedByRecursively;
import org.apache.gobblin.config.store.api.ConfigStoreWithResolution;
public class ConfigStoreBackedTopology implements ConfigStoreTopologyInspector {
private final ConfigStore cs;
private final String version;
public ConfigStoreBackedTopology(ConfigStore cs, String version) {
this.cs = cs;
this.version = version;
}
public ConfigStore getConfigStore() {
return this.cs;
}
public String getVersion() {
return this.version;
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version
* </p>
*/
@Override
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey) {
return this.cs.getChildren(configKey, this.version);
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version
* </p>
*/
@Override
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey) {
return getOwnImports(configKey, Optional.<Config>absent());
}
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
if (runtimeConfig.isPresent()) {
return this.cs.getOwnImports(configKey, this.version, runtimeConfig);
}
return this.cs.getOwnImports(configKey, this.version);
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version if
* the internal {@link ConfigStore} is {@link ConfigStoreWithImportedBy}, otherwise throws {@link UnsupportedOperationException}
* </p>
*/
@Override
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey) {
return getImportedBy(configKey, Optional.<Config>absent());
}
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
if (this.cs instanceof ConfigStoreWithImportedBy) {
return ((ConfigStoreWithImportedBy) this.cs).getImportedBy(configKey, this.version, runtimeConfig);
}
throw new UnsupportedOperationException("Internal ConfigStore does not support this operation");
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version if
* the internal {@link ConfigStore} is {@link ConfigStoreWithResolution}, otherwise throws {@link UnsupportedOperationException}
* </p>
*/
@Override
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey) {
return getImportsRecursively(configKey, Optional.<Config>absent());
}
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
if (this.cs instanceof ConfigStoreWithResolution) {
return ((ConfigStoreWithResolution) this.cs).getImportsRecursively(configKey, this.version, runtimeConfig);
}
throw new UnsupportedOperationException("Internal ConfigStore does not support this operation");
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version if
* the internal {@link ConfigStore} is {@link ConfigStoreWithImportedByRecursively}, otherwise throws {@link UnsupportedOperationException}
* </p>
*/
@Override
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey) {
return getImportedByRecursively(configKey, Optional.<Config>absent());
}
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
if (this.cs instanceof ConfigStoreWithImportedByRecursively) {
return ((ConfigStoreWithImportedByRecursively) this.cs).getImportedByRecursively(configKey, this.version, runtimeConfig);
}
throw new UnsupportedOperationException("Internal ConfigStore does not support this operation");
}
}
| 2,723 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/InMemoryTopology.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.UncheckedExecutionException;
import com.typesafe.config.Config;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
/**
* InMemoryTopology will return stale data if the internal config store is Not {@link org.apache.gobblin.config.store.api.ConfigStoreWithStableVersioning}
*
* @author ibuenros
*
*/
public class InMemoryTopology implements ConfigStoreTopologyInspector {
private final ConfigStoreTopologyInspector fallback;
// can not use Guava {@link com.google.common.collect.MultiMap} as MultiMap does not store entry pair if the value is empty
private final Cache<ConfigKeyPath, Collection<ConfigKeyPath>> childrenMap = CacheBuilder.newBuilder().build();
private final Cache<ConfigKeyPath, List<ConfigKeyPath>> ownImportMap = CacheBuilder.newBuilder().build();
private final Cache<ConfigKeyPath, LinkedList<ConfigKeyPath>> recursiveImportMap = CacheBuilder.newBuilder().build();
private final Cache<ConfigKeyPath, LinkedList<ConfigKeyPath>> recursiveImportedByMap = CacheBuilder.newBuilder().build();
private final Cache<ConfigKeyPath, Collection<ConfigKeyPath>> ownImportedByMap = CacheBuilder.newBuilder().build();
@SuppressFBWarnings(value = "IS2_INCONSISTENT_SYNC", justification = "Access is in fact thread safe.")
private ImmutableMultimap<ConfigKeyPath, ConfigKeyPath> fullImportedByMap = null;
public InMemoryTopology(ConfigStoreTopologyInspector fallback) {
this.fallback = fallback;
}
private synchronized void computeImportedByMap(Optional<Config> runtimeConfig) {
if (this.fullImportedByMap != null) {
return;
}
ImmutableMultimap.Builder<ConfigKeyPath, ConfigKeyPath> importedByBuilder = ImmutableMultimap.builder();
// breath first search the whole topology to build ownImports map and ownImportedByMap
// calls to retrieve cache / set cache if not present
Collection<ConfigKeyPath> currentLevel = this.getChildren(SingleLinkedListConfigKeyPath.ROOT);
List<ConfigKeyPath> rootImports = this.getOwnImports(SingleLinkedListConfigKeyPath.ROOT, runtimeConfig);
Preconditions.checkArgument(rootImports == null || rootImports.size() == 0,
"Root can not import other nodes, otherwise circular dependency will happen");
while (!currentLevel.isEmpty()) {
Collection<ConfigKeyPath> nextLevel = new ArrayList<>();
for (ConfigKeyPath configKeyPath : currentLevel) {
// calls to retrieve cache / set cache if not present
List<ConfigKeyPath> ownImports = this.getOwnImports(configKeyPath, runtimeConfig);
this.ownImportMap.put(configKeyPath, ownImports);
for (ConfigKeyPath importedPath : ownImports) {
importedByBuilder.put(importedPath, configKeyPath);
}
// calls to retrieve cache / set cache if not present
Collection<ConfigKeyPath> tmp = this.getChildren(configKeyPath);
nextLevel.addAll(tmp);
}
currentLevel = nextLevel;
}
this.fullImportedByMap = importedByBuilder.build();
}
/**
* {@inheritDoc}.
*
* <p>
* If the result is already in cache, return the result.
* Otherwise, delegate the functionality to the fallback object
* </p>
*/
@Override
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey) {
try {
return this.childrenMap.get(configKey, () -> this.fallback.getChildren(configKey));
} catch (ExecutionException ee) {
throw new RuntimeException(ee);
}
}
/**
* {@inheritDoc}.
*
* <p>
* If the result is already in cache, return the result.
* Otherwise, delegate the functionality to the fallback object
* </p>
*/
@Override
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey) {
return getOwnImports(configKey, Optional.<Config>absent());
}
@Override
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
try {
return this.ownImportMap.get(configKey, () -> this.fallback.getOwnImports(configKey, runtimeConfig));
} catch (ExecutionException ee) {
throw new RuntimeException(ee);
}
}
/**
* {@inheritDoc}.
*
* <p>
* If the result is already in cache, return the result.
* Otherwise, delegate the functionality to the fallback object.
*
* If the fallback did not support this operation, will build the entire topology of the {@link org.apache.gobblin.config.store.api.ConfigStore}
* using default breath first search.
* </p>
*/
@Override
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey) {
return getImportedBy(configKey, Optional.<Config>absent());
}
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
if (this.fullImportedByMap != null) {
return this.fullImportedByMap.get(configKey);
}
try {
return this.ownImportedByMap.get(configKey, () -> this.fallback.getImportedBy(configKey, runtimeConfig));
} catch (UncheckedExecutionException exc) {
if (exc.getCause() instanceof UnsupportedOperationException) {
computeImportedByMap(runtimeConfig);
return getImportedBy(configKey, runtimeConfig);
} else {
throw new RuntimeException(exc);
}
} catch (ExecutionException ee) {
throw new RuntimeException(ee);
}
}
/**
* {@inheritDoc}.
*
* <p>
* If the result is already in cache, return the result.
* Otherwise, delegate the functionality to the fallback object.
*
* If the fallback did not support this operation, will build the entire topology of the {@link org.apache.gobblin.config.store.api.ConfigStore}
* using default breath first search.
* </p>
*/
@Override
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey) {
return getImportsRecursively(configKey, Optional.<Config>absent());
}
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
return new ImportTraverser<>(key -> {
if (key.isRootPath()) {
return new LinkedList<>();
}
List<ConfigKeyPath> imports = Lists.newArrayList();
imports.addAll(Lists.reverse(getOwnImports(key, runtimeConfig)));
imports.add(key.getParent());
return imports;
}, this.recursiveImportMap).traverseGraphRecursively(configKey);
}
/**
* {@inheritDoc}.
*
* <p>
* If the result is already in cache, return the result.
* Otherwise, delegate the functionality to the fallback object.
*
* If the fallback did not support this operation, will build the entire topology of the {@link org.apache.gobblin.config.store.api.ConfigStore}
* using default breath first search.
* </p>
*/
@Override
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey) {
return getImportedByRecursively(configKey, Optional.<Config>absent());
}
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
return new ImportTraverser<>(key -> Lists.newLinkedList(getImportedBy(key, runtimeConfig)),
this.recursiveImportedByMap).traverseGraphRecursively(configKey);
}
}
| 2,724 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/ImportTraverser.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import com.google.common.base.Throwables;
import com.google.common.cache.Cache;
import com.google.common.util.concurrent.UncheckedExecutionException;
import lombok.RequiredArgsConstructor;
/**
* This class computes a traversal of a graph. Starting at the provided node, it uses the {@link #traversalFunction}
* to generate a DFS traversal of the graph. The traversal is guaranteed to contain each node at most once. If a cycle
* is detected during traversal, a {@link CircularDependencyException} will be thrown.
*
* Note: This class may dead-lock if used concurrently and there are cycles in the traversed graph.
*/
@RequiredArgsConstructor
class ImportTraverser<T> {
/** The function returning the ordered neighbors for the input node. */
private final Function<T, List<T>> traversalFunction;
/** A cache used for storing traversals at various nodes. */
private final Cache<T, LinkedList<T>> traversalCache;
/**
* Traverse the graph starting at the provided node.
* @param startingNode starting node.
* @return a List containing the DFS traversal starting at the node.
* @throws CircularDependencyException if there is a circular dependency in the loaded traversal.
*/
List<T> traverseGraphRecursively(T startingNode) {
return doTraverseGraphRecursively(startingNode, new NodePath<>(startingNode));
}
private List<T> doTraverseGraphRecursively(T node, NodePath<T> nodePath) {
try {
return this.traversalCache.get(node, () -> computeRecursiveTraversal(node, nodePath));
} catch (ExecutionException | UncheckedExecutionException ee) {
throw unpackExecutionException(ee);
}
}
/**
* Actually compute the traversal if it is not in the cache.
*/
private LinkedList<T> computeRecursiveTraversal(T node, NodePath<T> nodePath) {
try {
LinkedList<T> imports = new LinkedList<>();
Set<T> alreadyIncludedImports = new HashSet<>();
for (T neighbor : this.traversalFunction.apply(node)) {
nodePath.appendNode(neighbor);
addSubtraversal(neighbor, imports, alreadyIncludedImports, nodePath);
nodePath.popTail();
}
return imports;
} catch (ExecutionException ee) {
throw new RuntimeException(ee);
}
}
/**
* Add a sub-traversal for a neighboring node.
*/
private void addSubtraversal(T node, LinkedList<T> imports, Set<T> alreadyIncludedImports, NodePath<T> nodePath)
throws ExecutionException {
if (addNodeIfNotAlreadyIncluded(node, imports, alreadyIncludedImports)) {
for (T inheritedFromParent : doTraverseGraphRecursively(node, nodePath)) {
addNodeIfNotAlreadyIncluded(inheritedFromParent, imports, alreadyIncludedImports);
}
}
}
/**
* Only add node to traversal if it is not already included in it.
*/
private boolean addNodeIfNotAlreadyIncluded(T thisImport,
LinkedList<T> imports, Set<T> alreadyIncludedImports) {
if (alreadyIncludedImports.contains(thisImport)) {
return false;
}
imports.add(thisImport);
alreadyIncludedImports.add(thisImport);
return true;
}
/**
* Due to recursive nature of algorithm, we may end up with multiple layers of exceptions. Unpack them.
*/
private RuntimeException unpackExecutionException(Throwable exc) {
while (exc instanceof ExecutionException || exc instanceof UncheckedExecutionException) {
exc = exc.getCause();
}
return Throwables.propagate(exc);
}
/**
* Stores node path for giving appropriate exception when a cycle is found.
* @param <T>
*/
private static class NodePath<T> {
private final Set<T> nodesSet = new HashSet<>();
private final LinkedList<T> nodesList = new LinkedList<T>();
public NodePath(T initialNode) {
this.nodesSet.add(initialNode);
this.nodesList.add(initialNode);
}
public void appendNode(T node) {
if (this.nodesSet.contains(node)) {
throw new CircularDependencyException("Found cycle in traversal: " + computePath(node));
}
this.nodesSet.add(node);
this.nodesList.add(node);
}
public void popTail() {
T removed = this.nodesList.removeLast();
this.nodesSet.remove(removed);
}
private String computePath(T node) {
StringBuilder sb = new StringBuilder();
for (T t : this.nodesList.subList(this.nodesList.indexOf(node), this.nodesList.size())) {
sb.append(t).append(" -> ");
}
sb.append(node);
return sb.toString();
}
}
}
| 2,725 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/CircularDependencyException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
/** Denotes that circular chain is detected by follow the imports path in {@link ConfigStore}. */
public class CircularDependencyException extends RuntimeException {
private static final long serialVersionUID = -164765448729513949L;
public CircularDependencyException(String message) {
super(message);
}
public CircularDependencyException(String message, Throwable cause) {
super(message, cause);
}
}
| 2,726 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/InMemoryValueInspector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import com.google.common.base.Optional;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.typesafe.config.Config;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
/**
* InMemoryValueInspector provide the caching layer for getting the {@link com.typesafe.config.Config} from {@link ConfigStore}
*
* @author mitu
*
*/
public class InMemoryValueInspector implements ConfigStoreValueInspector{
private final ConfigStoreValueInspector valueFallback;
private final Cache<ConfigKeyPath, Config> ownConfigCache ;
private final Cache<ConfigKeyPath, Config> recursiveConfigCache ;
/**
*
* @param valueFallback - the fall back {@link ConfigStoreValueInspector} which used to get the raw {@link com.typesafe.config.Config}
* @param useStrongRef - if true, use Strong reference in cache, else, use Weak reference in cache
*/
public InMemoryValueInspector (ConfigStoreValueInspector valueFallback, boolean useStrongRef){
this.valueFallback = valueFallback;
if (useStrongRef) {
this.ownConfigCache = CacheBuilder.newBuilder().build();
this.recursiveConfigCache = CacheBuilder.newBuilder().build();
}
else{
this.ownConfigCache = CacheBuilder.newBuilder().softValues().build();
this.recursiveConfigCache = CacheBuilder.newBuilder().softValues().build();
}
}
/**
* {@inheritDoc}.
*
* <p>
* If present in the cache, return the cached {@link com.typesafe.config.Config} for given input
* Otherwise, simply delegate the functionality to the internal {ConfigStoreValueInspector} and store the value into cache
* </p>
*/
@Override
public Config getOwnConfig(final ConfigKeyPath configKey) {
try {
return this.ownConfigCache.get(configKey, new Callable<Config>() {
@Override
public Config call() {
return InMemoryValueInspector.this.valueFallback.getOwnConfig(configKey);
}
});
} catch (ExecutionException e) {
// should NOT come here
throw new RuntimeException("Can not getOwnConfig for " + configKey);
}
}
/**
* {@inheritDoc}.
*
* <p>
* If present in the cache, return the cached {@link com.typesafe.config.Config} for given input
* Otherwise, simply delegate the functionality to the internal {ConfigStoreValueInspector} and store the value into cache
* </p>
*/
@Override
public Config getResolvedConfig(final ConfigKeyPath configKey) {
return getResolvedConfig(configKey, Optional.<Config>absent());
}
@Override
public Config getResolvedConfig(final ConfigKeyPath configKey, final Optional<Config> runtimeConfig) {
try {
return this.recursiveConfigCache.get(configKey, new Callable<Config>() {
@Override
public Config call() {
return InMemoryValueInspector.this.valueFallback.getResolvedConfig(configKey, runtimeConfig);
}
});
} catch (ExecutionException e) {
// should NOT come here
throw new RuntimeException("Can not getOwnConfig for " + configKey);
}
}
/**
* {@inheritDoc}.
*
* <p>
* If present in the cache, return the cached {@link com.typesafe.config.Config} for given input
* Otherwise, simply delegate the functionality to the internal {ConfigStoreValueInspector} and store the value into cache
* </p>
*/
@Override
public Map<ConfigKeyPath, Config> getOwnConfigs(Collection<ConfigKeyPath> configKeys) {
Collection<ConfigKeyPath> configKeysNotInCache = new ArrayList<>();
Map<ConfigKeyPath, Config> result = new HashMap<>();
for(ConfigKeyPath configKey: configKeys){
Config cachedValue = this.ownConfigCache.getIfPresent(configKey);
if(cachedValue==null){
configKeysNotInCache.add(configKey);
}
else{
result.put(configKey, cachedValue);
}
}
// for ConfigKeyPath which are not in cache
if(configKeysNotInCache.size()>0){
Map<ConfigKeyPath, Config> configsFromFallBack = this.valueFallback.getOwnConfigs(configKeysNotInCache);
this.ownConfigCache.putAll(configsFromFallBack);
result.putAll(configsFromFallBack);
}
return result;
}
/**
* {@inheritDoc}.
*
* <p>
* If present in the cache, return the cached {@link com.typesafe.config.Config} for given input
* Otherwise, simply delegate the functionality to the internal {ConfigStoreValueInspector} and store the value into cache
* </p>
*/
@Override
public Map<ConfigKeyPath, Config> getResolvedConfigs(Collection<ConfigKeyPath> configKeys) {
Collection<ConfigKeyPath> configKeysNotInCache = new ArrayList<>();
Map<ConfigKeyPath, Config> result = new HashMap<>();
for(ConfigKeyPath configKey: configKeys){
Config cachedValue = this.recursiveConfigCache.getIfPresent(configKey);
if(cachedValue==null){
configKeysNotInCache.add(configKey);
}
else{
result.put(configKey, cachedValue);
}
}
// for ConfigKeyPath which are not in cache
if(configKeysNotInCache.size()>0){
Map<ConfigKeyPath, Config> configsFromFallBack = this.valueFallback.getResolvedConfigs(configKeysNotInCache);
this.recursiveConfigCache.putAll(configsFromFallBack);
result.putAll(configsFromFallBack);
}
return result;
}
}
| 2,727 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/ConfigStoreBackedValueInspector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.base.Optional;
import com.google.common.collect.Sets;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreWithBatchFetches;
import org.apache.gobblin.config.store.api.ConfigStoreWithResolution;
/**
* ConfigStoreBackedValueInspector always query the underline {@link ConfigStore} to get the freshest
* {@link com.typesafe.config.Config}
* @author mitu
*
*/
public class ConfigStoreBackedValueInspector implements ConfigStoreValueInspector {
private final ConfigStore cs;
private final String version;
private final ConfigStoreTopologyInspector topology;
/**
* @param cs - internal {@link ConfigStore} to retrieve configuration
* @param version - version of the {@link ConfigStore}
* @param topology - corresponding {@link ConfigStoreTopologyInspector} for the input {@link ConfigStore}
*/
public ConfigStoreBackedValueInspector(ConfigStore cs, String version, ConfigStoreTopologyInspector topology) {
this.cs = cs;
this.version = version;
this.topology = topology;
}
public ConfigStore getConfigStore() {
return this.cs;
}
public String getVersion() {
return this.version;
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version
* </p>
*/
@Override
public Config getOwnConfig(ConfigKeyPath configKey) {
return this.cs.getOwnConfig(configKey, this.version);
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version
* if the internal {@link ConfigStore} is {@link ConfigStoreWithBatchFetches}, otherwise, will call
* configuration store for each config key path and put the result into {@link Map}
* </p>
*/
@Override
public Map<ConfigKeyPath, Config> getOwnConfigs(Collection<ConfigKeyPath> configKeys) {
if (this.cs instanceof ConfigStoreWithBatchFetches) {
ConfigStoreWithBatchFetches batchStore = (ConfigStoreWithBatchFetches) this.cs;
return batchStore.getOwnConfigs(configKeys, this.version);
}
Map<ConfigKeyPath, Config> result = new HashMap<>();
for (ConfigKeyPath configKey : configKeys) {
result.put(configKey, this.cs.getOwnConfig(configKey, this.version));
}
return result;
}
@SuppressWarnings
private Config getResolvedConfigRecursive(ConfigKeyPath configKey, Set<String> alreadyLoadedPaths) {
return getResolvedConfigRecursive(configKey, alreadyLoadedPaths, Optional.<Config>absent());
}
private Config getResolvedConfigRecursive(ConfigKeyPath configKey, Set<String> alreadyLoadedPaths,
Optional<Config> runtimeConfig) {
if (this.cs instanceof ConfigStoreWithResolution) {
return ((ConfigStoreWithResolution) this.cs).getResolvedConfig(configKey, this.version);
}
if (!alreadyLoadedPaths.add(configKey.getAbsolutePathString())) {
return ConfigFactory.empty();
}
Config initialConfig = this.getOwnConfig(configKey);
if (configKey.isRootPath()) {
return initialConfig;
}
List<ConfigKeyPath> ownImports = this.topology.getOwnImports(configKey, runtimeConfig);
// merge with other configs from imports
if (ownImports != null) {
for (ConfigKeyPath p : ownImports) {
initialConfig =
initialConfig.withFallback(this.getResolvedConfigRecursive(p, alreadyLoadedPaths, runtimeConfig));
}
}
// merge with configs from parent for Non root
initialConfig = initialConfig
.withFallback(this.getResolvedConfigRecursive(configKey.getParent(), alreadyLoadedPaths, runtimeConfig));
return initialConfig;
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version if
* the internal {@link ConfigStore} is {@link ConfigStoreWithResolution}, otherwise based on {@link ConfigStoreTopologyInspector}
*
* 1. find out all the imports recursively
* 2. resolved the config on the fly
* </p>
*/
public Config getResolvedConfig(ConfigKeyPath configKey, Optional<Config> runtimeConfig) {
return getResolvedConfigRecursive(configKey, Sets.<String>newHashSet(), runtimeConfig)
.withFallback(ConfigFactory.defaultOverrides()).resolve();
}
@Override
public Config getResolvedConfig(ConfigKeyPath configKey) {
return getResolvedConfig(configKey, Optional.<Config>absent());
}
/**
* {@inheritDoc}.
*
* <p>
* This implementation simply delegate the functionality to the internal {@link ConfigStore}/version
* if the internal {@link ConfigStore} is {@link ConfigStoreWithBatchFetches}, otherwise, will call
* configuration store for each config key path and put the result into {@link Map}
* </p>
*/
@Override
public Map<ConfigKeyPath, Config> getResolvedConfigs(Collection<ConfigKeyPath> configKeys) {
if (this.cs instanceof ConfigStoreWithBatchFetches) {
ConfigStoreWithBatchFetches batchStore = (ConfigStoreWithBatchFetches) this.cs;
return batchStore.getResolvedConfigs(configKeys, this.version);
}
Map<ConfigKeyPath, Config> result = new HashMap<>();
for (ConfigKeyPath configKey : configKeys) {
result.put(configKey, this.getResolvedConfig(configKey));
}
return result;
}
}
| 2,728 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/SingleLinkedListConfigKeyPath.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
public class SingleLinkedListConfigKeyPath implements ConfigKeyPath {
public static final String PATH_DELIMETER = "/";
public static final SingleLinkedListConfigKeyPath ROOT = new SingleLinkedListConfigKeyPath(null, "");
private final ConfigKeyPath parent;
private final String ownName;
// constructor private, can only create path from ROOT using createChild method
private SingleLinkedListConfigKeyPath(ConfigKeyPath parent, String name) {
this.parent = parent;
this.ownName = name;
}
@Override
public ConfigKeyPath getParent() {
if (this.isRootPath())
throw new UnsupportedOperationException("Can not getParent from Root");
return this.parent;
}
@Override
public String getOwnPathName() {
return this.ownName;
}
@Override
public ConfigKeyPath createChild(String childPathName) {
if (childPathName == null || childPathName.length() == 0 || childPathName.indexOf(PATH_DELIMETER) >= 0) {
throw new IllegalArgumentException(
String.format("Name \"%s\" can not be null/empty string and can not contains the delimiter \"%s\"",
childPathName, PATH_DELIMETER));
}
return new SingleLinkedListConfigKeyPath(this, childPathName);
}
@Override
public String getAbsolutePathString() {
if (this.isRootPath()) {
return this.getOwnPathName() + PATH_DELIMETER;
}
// first level children do not need to add "/"
if (this.parent.isRootPath())
return this.parent.getAbsolutePathString() + this.ownName;
return this.parent.getAbsolutePathString() + PATH_DELIMETER + this.ownName;
}
@Override
public boolean isRootPath() {
return this == ROOT;
}
@Override
public String toString() {
return this.getAbsolutePathString();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.ownName == null) ? 0 : this.ownName.hashCode());
result = prime * result + ((this.parent == null) ? 0 : this.parent.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SingleLinkedListConfigKeyPath other = (SingleLinkedListConfigKeyPath) obj;
if (this.ownName == null) {
if (other.ownName != null)
return false;
} else if (!this.ownName.equals(other.ownName))
return false;
if (this.parent == null) {
if (other.parent != null)
return false;
} else if (!this.parent.equals(other.parent))
return false;
return true;
}
}
| 2,729 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/ConfigStoreValueInspector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.Collection;
import java.util.Map;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
/**
* The ConfigStoreValueInspector interface used to inspect the {@link com.typesafe.config.Config} for a given
* {@link org.apache.gobblin.config.store.api.ConfigKeyPath} in {@link ConfigStore}
*
* @author mitu
*
*/
public interface ConfigStoreValueInspector {
/**
* Obtains the configuration properties directly associated with a given config keys. These <b>
* will not</b> include any properties/values which can be obtained from the ancestors or imported
* config keys.
*
* @param configKey the config key path whose properties are needed.
* @return the directly specified configuration in {@link Config} format for input configKey
*/
public Config getOwnConfig(ConfigKeyPath configKey);
/**
* Obtains a {@link Config} object with all implicit and explicit imports resolved, i.e. specified
* using the {@link Config#withFallback(com.typesafe.config.ConfigMergeable)} API.
*
* @param configKey the path of the configuration key to be resolved
* @return the {@link Config} object associated with the specified config key with all direct
* and indirect imports resolved.
*/
public Config getResolvedConfig(ConfigKeyPath configKey);
public Config getResolvedConfig(ConfigKeyPath configKey, Optional<Config> runtimeConfig);
/**
*
* @param configKeys the config keys whose {@link Config} objects are to be fetched
* @return the Map from the config key to its own {@link com.typesafe.config.Config} object
*/
public Map<ConfigKeyPath, Config> getOwnConfigs(Collection<ConfigKeyPath> configKeys);
/**
*
* @param configKeys the config keys whose {@link Config} objects are to be fetched
* @return the Map from the config key to its resolved {@link com.typesafe.config.Config} object
*/
public Map<ConfigKeyPath, Config> getResolvedConfigs(Collection<ConfigKeyPath> configKeys);
}
| 2,730 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/common/impl/ConfigStoreTopologyInspector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.common.impl;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
/**
* The ConfigStoreTopology interface used to describe the topology of a configuration store.
*
* Each node in the configuration store is represented as one {@link ConfigKeyPath}, so the topology of
* any node includes:
*
* 1. The link to it's parent ( already defined in {@link ConfigKeyPath} )
* 2. All the direct children of the node.
* 3. The node's directly outgoing imports links. ( defined as getOwnImports() )
* 4. Other nodes which directly imports the node. ( defined as getImportedBy() )
* 5. The node's directly and indirectly imports links. ( defined as getImportsRecursively )
* 6. Other nodes which directly and indirectly imports the node. ( defined as getImportedByRecursively() )
*
* @author mitu
*
*/
public interface ConfigStoreTopologyInspector {
/**
* Obtains the direct children config keys for a given config key.
*
* @param configKey the config key path whose children are necessary.
* @return the direct children config key paths
*/
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey);
/**
* @param configKey the config key path which to get own imports.
* @return the paths of the directly imported config keys for the specified config key
* Note that order is significant the earlier ConfigKeyPath in the List will have higher priority
* when resolving configuration conflicts.
*/
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey);
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, Optional<Config> runtimeConfig);
/**
* Obtains the collection of config keys which import a given config key.
*
* @param configKey the config key path which is imported
* @return The {@link Collection} of paths of the config keys which import the specified config key
*/
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey);
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey, Optional<Config> runtimeConfig);
/**
* Obtains the list of config keys which are directly and indirectly imported by the specified
* config key. The import graph is traversed in depth-first manner. For a given config key,
* explicit imports are listed before implicit imports from the ancestor keys.
*`
* @param configKey the path of the config key whose imports are needed
* @return the paths of the directly and indirectly imported keys, including config keys imported
* by ancestors. The earlier config key in the list will have higher priority when resolving
* configuration conflict.
*/
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey);
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig);
/**
* Obtains all config keys which directly or indirectly import a given config key
* @param configKey the path of the config key being imported
* @return The {@link Collection} of paths of the config keys that directly or indirectly import
* the specified config key in the specified conf version.
*/
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey);
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey, Optional<Config> runtimeConfig);
}
| 2,731 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/client | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/client/api/ConfigStoreFactoryDoesNotExistsException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client.api;
/**
* ConfigStoreFactoryDoesNotExistsException indicate the requested {@link ConfigStoreFactory} can not
* be found
* @author mitu
*
*/
public class ConfigStoreFactoryDoesNotExistsException extends Exception {
private static final long serialVersionUID = -131707505927389860L;
private static final String MESSAGE_FORMAT =
"Failed to find the config store factory with scheme: %s, because of: %s ";
private final String scheme;
public ConfigStoreFactoryDoesNotExistsException(String scheme, String message) {
super(String.format(MESSAGE_FORMAT, scheme, message));
this.scheme = scheme;
}
public ConfigStoreFactoryDoesNotExistsException(String scheme, Exception e) {
super(String.format(MESSAGE_FORMAT, scheme, e.toString()), e);
this.scheme = scheme;
}
public String getScheme(){
return this.scheme;
}
}
| 2,732 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/client | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/client/api/VersionStabilityPolicy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.client.api;
import org.apache.gobblin.annotation.Alpha;
import org.apache.gobblin.config.store.api.ConfigStoreWithStableVersioning;
/**
* This policy specifies the behavior expected by the client application when making repeated
* calls to fetch the configuration object for the same config key and version. This interface
* is closely associated with the {@link ConfigStoreWithStableVersioning} API.
*
* <p>The semantic of each policy is documented with each constant.
*
* <p> Here is the table that summarizes the expected client library behavior depending on the
* VersionStabilityPolicy and ConfigStoreWithStableVersioning support from a store.
* <table>
* <tr><th>VersionStabilityPolicy/ConfigStoreWithStableVersioning</th>
* <th>No</th> <th>Yes</th></tr>
* <tr><th>{@link #CROSS_JVM_STABILITY}</th> <td>ERROR</td> <td>WeakCache</td></tr>
* <tr><th>{@link #STRONG_LOCAL_STABILITY}</th> <td>StrongCache</td> <td>WeakCache</td></tr>
* <tr><th>{@link #WEAK_LOCAL_STABILITY}</th> <td>WeakCache</td> <td>WeakCache</td></tr>
* <tr><th>{@link #READ_FRESHEST}</th> <td>NoCache</td> <td>WeakCache</td></tr>
* </table>
*
* <ul>
* <li>ERROR means that the client library should throw an exception because the requested
* VersionStabilityPolicy cannot be supported</li>
* <li>WeakCache means that the client library may cache in memory configs that have been already
* read for performance reasons and if memory allows it.</li>
* <li>StrongCache means that the client library should always cache in memory the read configs to
* guarantee the requested VersionStabilityPolicy</li>
* <li>NoCache means that the client library should never cache the read configs.</li>
* </ul>
*/
@Alpha
public enum VersionStabilityPolicy {
/** Reading the same config key and version from different JVMs must return the same result. */
CROSS_JVM_STABILITY,
/** Reading the same config key and version from the same JVMs must return the same result. */
STRONG_LOCAL_STABILITY,
/**
* The application does not depend on getting the same config for the same key and version but
* the client library may use caching to improve performance. This means that the application
* may read a stale config if the underlying store does not support stable versioning. */
WEAK_LOCAL_STABILITY,
/**
* The application needs to read the most recent config if the underlying store does not support
* stable versioning.
*/
READ_FRESHEST
}
| 2,733 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/DeployableConfigSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import java.io.IOException;
import java.io.InputStream;
import java.util.Set;
import org.apache.gobblin.config.store.api.ConfigStore;
/**
* An abstraction for accessing configs to be deployed by the {@link ConfigStore}. The interface finds all the configs
* that need to be deployed and also provides a way to read them as {@link InputStream}s
*/
public interface DeployableConfigSource {
/**
* Open an {@link InputStream} for every config to be deployed.
*
* @return a {@link Set} of {@link ConfigStream}s for each resource to be deployed
*/
public Set<ConfigStream> getConfigStreams() throws IOException;
}
| 2,734 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/DeploymentConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.ToString;
/**
* Holds deployment configuration to be passed to {@link Deployable#deploy(DeploymentConfig)}
*/
@AllArgsConstructor
@Getter
@ToString
public class DeploymentConfig {
/**
* The source to use for this deployment.
*/
private final DeployableConfigSource deployableConfigSource;
/**
* Version number to be used for this deployment
*/
private final String newVersion;
}
| 2,735 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/ConfigStream.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import java.io.InputStream;
import com.google.common.base.Optional;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* A wrapper for {@link InputStream} that also contains the path of the resource.
*/
@AllArgsConstructor
@Getter
public class ConfigStream {
private final Optional<InputStream> inputStream;
private final String configPath;
}
| 2,736 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/ClasspathConfigSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.reflections.Reflections;
import org.reflections.scanners.ResourcesScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.FilterBuilder;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.google.common.collect.Sets;
/**
* A {@link DeployableConfigSource} that reads configs to be deployed from classpath.
* Caller can set {@link #CONFIG_STORE_CLASSPATH_RESOURCE_NAME_KEY} to the name of classpath resource under which deployable configs are available.
* If the property is not set the {@link #DEFAULT_CONFIG_STORE_CLASSPATH_RESOURCE_NAME} is used to search the classpath for deployable configs
* <p>
* It finds the config files in classpath under the
* <code>classpathRootName</code> directory. Every config file found will be deployed to the <code>storeUri</code> with
* a new <code>version</code>.
* </p>
*/
public class ClasspathConfigSource implements DeployableConfigSource {
private final String classpathRootName;
private static final String DEFAULT_CONFIG_STORE_CLASSPATH_RESOURCE_NAME = "_CONFIG_STORE";
public static final String CONFIG_STORE_CLASSPATH_RESOURCE_NAME_KEY =
"gobblin.config.management.store.deploy.classpathresource";
/**
* A {@link DeployableConfigSource} that reads configs to be deployed from classpath.
* Caller can set {@link #CONFIG_STORE_CLASSPATH_RESOURCE_NAME_KEY} to the name of classpath resource under which deployable configs are available.
* If the property is not set the {@link #DEFAULT_CONFIG_STORE_CLASSPATH_RESOURCE_NAME} is used to search the classpath for deployable configs
* <p>
* It finds the config files in classpath under the
* <code>classpathRootName</code> directory. Every config file found will be deployed to the <code>storeUri</code> with
* a new <code>version</code>.
* </p>
*/
public ClasspathConfigSource(Properties props) {
this.classpathRootName =
props.getProperty(CONFIG_STORE_CLASSPATH_RESOURCE_NAME_KEY, DEFAULT_CONFIG_STORE_CLASSPATH_RESOURCE_NAME);
}
/**
* Scan the classpath for {@link #classpathRootName} and return all resources under it.
* {@inheritDoc}
* @see org.apache.gobblin.config.store.deploy.DeployableConfigSource#getDeployableConfigPaths()
*/
private Set<String> getDeployableConfigPaths() {
ConfigurationBuilder cb =
new ConfigurationBuilder().setUrls(ClasspathHelper.forClassLoader()).setScanners(new ResourcesScanner())
.filterInputsBy(new FilterBuilder().include(String.format(".*%s.*", this.classpathRootName)));
Reflections reflections = new Reflections(cb);
Pattern pattern = Pattern.compile(".*");
return reflections.getResources(pattern);
}
/**
* Open an {@link InputStream} for <code>resourcePath</code> in classpath
* {@inheritDoc}
* @see org.apache.gobblin.config.store.deploy.DeployableConfigSource#getConfigStream(java.lang.String)
*/
private static InputStream getConfigStream(String configPath) {
return ClasspathConfigSource.class.getClassLoader().getResourceAsStream(configPath);
}
/**
* Scan the classpath for {@link #classpathRootName} and opens an {@link InputStream} each resource under it.
* {@inheritDoc}
* @see org.apache.gobblin.config.store.deploy.DeployableConfigSource#getConfigStreams()
*/
@Override
public Set<ConfigStream> getConfigStreams() throws IOException {
Set<ConfigStream> configStreams = Sets.newHashSet();
for (String configPath : getDeployableConfigPaths()) {
configStreams.add(new ConfigStream(Optional.of(getConfigStream(configPath)),
StringUtils.substringAfter(Strings.nullToEmpty(configPath), this.classpathRootName + "/")));
}
return configStreams;
}
}
| 2,737 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/Deployable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import java.io.IOException;
import org.apache.gobblin.config.store.api.ConfigStore;
/**
* An interface to deploy and rollback {@link ConfigStore}s. {@link ConfigStore}s that implement {@link Deployable} can
* be deployed using {@link StoreDeployer}
*
* @param <D> {@link DeploymentConfig} or its subclasses that has configs to deploy the store
*/
public interface Deployable<D extends DeploymentConfig> {
/**
* Deploy a version {@link DeploymentConfig#getNewVersion()} of configs provided by
* {@link DeploymentConfig#getDeployableConfigSource()} on the {@link ConfigStore}
*
* @param deploymentConfig to use for this deployment
*/
public void deploy(D deploymentConfig) throws IOException;
}
| 2,738 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/StoreDeployer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import java.net.URI;
import java.util.ServiceLoader;
import lombok.extern.slf4j.Slf4j;
/**
* A tool to deploy configs provided by a {@link DeployableConfigSource} to a {@link ConfigStore}. The deployment
* semantics are defined the {@link ConfigStore} themselves. A {@link ConfigStore} must implement {@link Deployable} for
* the {@link StoreDeployer} to deploy on it. If the {@link ConfigStore} for <code>storeUri</code> does not implement
* {@link Deployable}, the deployment will be a no-op
*/
@Slf4j
public class StoreDeployer {
/**
* Deploy configs in <code>classpathStoreRoot</code> to <code>storeUri</code>
*
* @param storeUri to which confgs are deployed
* @param confgSource The source that provides deployable configs.
* @param version to be used for this deployment
*
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void deploy(URI storeUri, DeployableConfigSource confgSource, String version) throws Exception {
ServiceLoader<ConfigStoreFactory> loader = ServiceLoader.load(ConfigStoreFactory.class);
for (ConfigStoreFactory storeFactory : loader) {
log.info("Found ConfigStore with scheme : " + storeFactory.getScheme());
if (storeUri.getScheme().equals(storeFactory.getScheme())) {
log.info("Using ConfigStore with scheme : " + storeFactory.getScheme());
ConfigStore configStore = storeFactory.createConfigStore(storeUri);
if (configStore instanceof Deployable<?>) {
((Deployable) configStore).deploy(new FsDeploymentConfig(confgSource, version));
} else {
log.error(String.format("Deployment failed. The store %s does not implement %s", storeFactory.getClass(),
Deployable.class.getName()));
}
}
}
}
}
| 2,739 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/deploy/FsDeploymentConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.deploy;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
/**
* A {@link DeploymentConfig} for Hadoop {@link FileSystem} based stores.
*/
@Getter
@ToString
public class FsDeploymentConfig extends DeploymentConfig {
/**
* Since the config store needs to be accessed by all users, the default permission of the store will be read-execute
* for all users
*/
public static final FsPermission DEFAULT_STORE_PERMISSIONS = new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE,
FsAction.READ_EXECUTE);
/**
* Build a new {@link FsDeploymentConfig}
*
* @param deployableConfigSource Source that provides the deployable configs
* @param version to be used for this deployment
* @param storePermissions for configs being deployed
*/
public FsDeploymentConfig(@NonNull final DeployableConfigSource deployableConfigSource, @NonNull final String version,
@NonNull final FsPermission storePermissions) {
super(deployableConfigSource, version);
this.storePermissions = storePermissions;
}
/**
* Build a new {@link FsDeploymentConfig} using the default store permission {@link #DEFAULT_STORE_PERMISSIONS}
*
* @param deployableConfigSource Source that provides the deployable configs
* @param version to be used for this deployment
*/
public FsDeploymentConfig(final DeployableConfigSource deployableConfigSource, final String version) {
this(deployableConfigSource, version, DEFAULT_STORE_PERMISSIONS);
}
/**
* Permission to be set on the configs deployed
*/
private final FsPermission storePermissions;
}
| 2,740 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/zip/SimpleLocalIvyConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.zip;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.util.Properties;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import org.apache.hadoop.fs.Path;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import com.google.common.collect.ImmutableMap;
import com.sun.nio.zipfs.ZipFileSystem;
/**
* An implementation of {@link ConfigStoreFactory} that takes a locally-existed zip as the backend of Config-store
* and creates a {@link ZipFileConfigStore} with it.
*
* {@link ZipFileConfigStore} has more advantage on encapsulating Config-store itself comparing to
* {@link org.apache.gobblin.config.store.hdfs.SimpleHadoopFilesystemConfigStore}, where the latter could, for example,
* cause small-file problem on HDFS as the size of Config-Store grows.
*/
public class SimpleLocalIvyConfigStoreFactory implements ConfigStoreFactory<ZipFileConfigStore> {
private String currentVersion;
private static ThreadLocal<FileSystem> THREADLOCAL_FS = new ThreadLocal<>();
static final String STORE_PREFIX_KEY = "storePrefix";
static final String IVY_SCHEME_PREFIX = "ivy-";
/**
* If not specified an version, assigned with an default version since the primary usage of this class
* is for testing.
*/
public SimpleLocalIvyConfigStoreFactory() {
this.currentVersion = "v1.0";
}
public SimpleLocalIvyConfigStoreFactory(String configStoreVersion) {
this.currentVersion = configStoreVersion;
}
@Override
public String getScheme() {
return getSchemePrefix() + "file";
}
protected String getSchemePrefix() {
return IVY_SCHEME_PREFIX;
}
/**
*
* @param configKey whose path contains the physical path to the zip file.
* @return
* @throws ConfigStoreCreationException
*/
@Override
public ZipFileConfigStore createConfigStore(URI configKey)
throws ConfigStoreCreationException {
Properties factoryProps = parseUriIntoParameterSet(configKey);
try {
// Construct URI as jar for zip file, as "jar" is the scheme for ZipFs.
URI uri = new URI("jar:file", null, new Path(factoryProps.getProperty("storePath")).toString(), null);
/** Using threadLocal to avoid {@link java.nio.file.FileSystemAlreadyExistsException} */
if (THREADLOCAL_FS.get() == null) {
FileSystem zipFs = FileSystems.newFileSystem(uri, ImmutableMap.of());
THREADLOCAL_FS.set(zipFs);
}
return new ZipFileConfigStore((ZipFileSystem) THREADLOCAL_FS.get(), getBaseURI(configKey), currentVersion,
factoryProps.getProperty(STORE_PREFIX_KEY, ""));
} catch (URISyntaxException | IOException e) {
throw new RuntimeException("Unable to load zip from classpath. ", e);
}
}
/**
* Parse the configKey and obtain the parameters set required to ivy coordinates.
*/
Properties parseUriIntoParameterSet(URI configKey) {
Properties factoryProps = new Properties();
for (NameValuePair param : URLEncodedUtils.parse(configKey, "UTF-8")) {
factoryProps.setProperty(param.getName(), param.getValue());
}
return factoryProps;
}
/**
* Base URI for a config store should be root of the zip file, so change path part of URI to be null
*/
URI getBaseURI(URI configKey) throws URISyntaxException {
return new URI(configKey.getScheme(), configKey.getAuthority(), "/", configKey.getQuery(), configKey.getFragment());
}
}
| 2,741 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/zip/ZipFileConfigStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.zip;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Stream;
import org.apache.commons.lang.StringUtils;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.PhysicalPathNotExistException;
import org.apache.gobblin.config.store.api.VersionDoesNotExistException;
import org.apache.gobblin.config.store.hdfs.SimpleHadoopFilesystemConfigStore;
import com.google.common.base.Charsets;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.sun.nio.zipfs.ZipFileSystem;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.extern.slf4j.Slf4j;
/**
* {@link ConfigStore} that uses a zipped file containing all the config store paths.
*
* Similar to {@link SimpleHadoopFilesystemConfigStore} but using java APIs instead of Hadoop APIs for the Filesystem to
* allow reading the file without unzipping.
*
* It is assumed that the version passed in the constructor will be the only version used.
*/
@Slf4j
public class ZipFileConfigStore implements ConfigStore {
private final FileSystem fs;
private final URI logicalStoreRoot;
private String version;
private String storePrefix;
/**
* Construct a ZipFileConfigStore
*
* @param fs A {@link ZipFileSystem} created using the zip file or jar containing the config store
* @param logicalStoreRoot URI of this config store's root
* @param version Config store version to use (only version allowed for lookups is the version passed here)
* @param storePrefix Prefix to use if all paths in config store are under a parent directory
*/
public ZipFileConfigStore(ZipFileSystem fs, URI logicalStoreRoot, String version, String storePrefix) {
Preconditions.checkNotNull(fs);
Preconditions.checkNotNull(logicalStoreRoot);
Preconditions.checkNotNull(version);
this.fs = fs;
this.logicalStoreRoot = logicalStoreRoot;
this.version = version;
this.storePrefix = storePrefix;
}
@Override
public String getCurrentVersion() {
return this.version;
}
@Override
public URI getStoreURI() {
return this.logicalStoreRoot;
}
/**
* Retrieves all the children of the given {@link ConfigKeyPath} using {@link Files#walk} to list files
*/
@Override
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(version.equals(getCurrentVersion()));
List<ConfigKeyPath> children = new ArrayList<>();
Path datasetDir = getDatasetDirForKey(configKey);
try {
if (!Files.exists(this.fs.getPath(datasetDir.toString()))) {
throw new PhysicalPathNotExistException(this.logicalStoreRoot,
"Cannot find physical location:" + this.fs.getPath(datasetDir.toString()));
}
Stream<Path> files = Files.walk(datasetDir, 1);
for (Iterator<Path> it = files.iterator(); it.hasNext();) {
Path path = it.next();
if (Files.isDirectory(path) && !path.equals(datasetDir)) {
children.add(configKey.createChild(StringUtils.removeEnd(path.getName(path.getNameCount() - 1).toString(),
SingleLinkedListConfigKeyPath.PATH_DELIMETER)));
}
}
return children;
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting children for configKey: \"%s\"", configKey), e);
}
}
/**
* Retrieves all the {@link ConfigKeyPath}s that are imported by the given {@link ConfigKeyPath}. Similar to
* {@link SimpleHadoopFilesystemConfigStore#getOwnImports}
*/
@Override
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version) {
return getOwnImports(configKey, version, Optional.<Config>absent());
}
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(version.equals(getCurrentVersion()));
List<ConfigKeyPath> configKeyPaths = new ArrayList<>();
Path datasetDir = getDatasetDirForKey(configKey);
Path includesFile = this.fs.getPath(datasetDir.toString(), SimpleHadoopFilesystemConfigStore.INCLUDES_CONF_FILE_NAME);
try {
if (!Files.exists(includesFile)) {
return configKeyPaths;
}
if (!Files.isDirectory(includesFile)) {
try (InputStream includesConfInStream = Files.newInputStream(includesFile)) {
configKeyPaths = SimpleHadoopFilesystemConfigStore.getResolvedConfigKeyPaths(includesConfInStream, runtimeConfig);
}
}
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting config for configKey: \"%s\"", configKey), e);
}
return configKeyPaths;
}
/**
* Retrieves the {@link Config} for the given {@link ConfigKeyPath}. Similar to
* {@link SimpleHadoopFilesystemConfigStore#getOwnConfig}
*/
@Override
public Config getOwnConfig(ConfigKeyPath configKey, String version) throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(version.equals(getCurrentVersion()));
Path datasetDir = getDatasetDirForKey(configKey);
Path mainConfFile = this.fs.getPath(datasetDir.toString(), SimpleHadoopFilesystemConfigStore.MAIN_CONF_FILE_NAME);
try {
if (!Files.exists(mainConfFile)) {
return ConfigFactory.empty();
}
if (!Files.isDirectory(mainConfFile)) {
try (InputStream mainConfInputStream = Files.newInputStream(mainConfFile)) {
return ConfigFactory.parseReader(new InputStreamReader(mainConfInputStream, Charsets.UTF_8));
}
}
return ConfigFactory.empty();
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting config for configKey: \"%s\"", configKey), e);
}
}
private Path getDatasetDirForKey(ConfigKeyPath configKey) throws VersionDoesNotExistException {
return this.fs.getPath(this.storePrefix, configKey.getAbsolutePathString());
}
}
| 2,742 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/zip/IvyConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.zip;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Paths;
import java.util.Properties;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import org.apache.gobblin.config.store.hdfs.SimpleHDFSConfigStoreFactory;
import org.apache.gobblin.config.store.hdfs.SimpleHDFSStoreMetadata;
import org.apache.gobblin.config.store.hdfs.SimpleHadoopFilesystemConfigStore;
import org.apache.gobblin.util.DownloadUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import com.sun.nio.zipfs.ZipFileSystem;
/**
* {@link ConfigStoreFactory} that downloads a jar file containing the config store paths through ivy and creates a
* {@link ZipFileConfigStore} with it. May be useful to avoid making many HDFS calls for large config stores.
*
* An ivy settings file must be present on the classpath named {@link DownloadUtils#IVY_SETTINGS_FILE_NAME}
*/
public class IvyConfigStoreFactory extends SimpleLocalIvyConfigStoreFactory {
/**
* Ivy coordinates required for downloading jar file.
*/
protected static final String ORG_KEY = "org";
protected static final String MODULE_KEY = "module";
protected static final String STORE_PATH_KEY = "storePath";
@Override
public String getScheme() {
return getSchemePrefix() + SimpleHDFSConfigStoreFactory.HDFS_SCHEME_NAME;
}
/**
* Example configKey URI (configuration is passed as part of the query)
*
* ivy-hdfs:/<relativePath>?org=<jarOrg>&module=<jarModule>&storePath=/path/to/hdfs/store&storePrefix=_CONFIG_STORE
*
* ivy-hdfs: scheme for this factory
* relativePath: config key path within the jar
* org/module: org and module of jar containing config store
* storePath: location of HDFS config store (used for getting current version)
* storePrefix: prefix to paths in config store
*/
@Override
public ZipFileConfigStore createConfigStore(URI configKey) throws ConfigStoreCreationException {
if (!configKey.getScheme().equals(getScheme())) {
throw new ConfigStoreCreationException(configKey, "Config key URI must have scheme " + getScheme());
}
Properties factoryProps = parseUriIntoParameterSet(configKey);
String jarOrg = factoryProps.getProperty(ORG_KEY);
String jarModule = factoryProps.getProperty(MODULE_KEY);
String storePath = factoryProps.getProperty(STORE_PATH_KEY);
if (jarOrg == null || jarModule == null || storePath == null) {
throw new ConfigStoreCreationException(configKey, "Config key URI must contain org, module, and storePath");
}
try {
SimpleHDFSStoreMetadata metadata = new SimpleHDFSStoreMetadata(
org.apache.hadoop.fs.FileSystem.get(new Configuration()), new Path(storePath,
SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME));
String currentVersion = metadata.getCurrentVersion();
URI[] uris = DownloadUtils.downloadJar(jarOrg, jarModule, currentVersion, false);
if (uris.length != 1) {
throw new ConfigStoreCreationException(configKey, "Expected one jar file from URI");
}
FileSystem zipFs = FileSystems.newFileSystem(Paths.get(uris[0].getPath()), null);
if (!(zipFs instanceof ZipFileSystem)) {
throw new ConfigStoreCreationException(configKey, "Downloaded file must be a zip or jar file");
}
return new ZipFileConfigStore((ZipFileSystem) zipFs, getBaseURI(configKey), currentVersion, factoryProps.getProperty(STORE_PREFIX_KEY, ""));
} catch (IOException | URISyntaxException e) {
throw new ConfigStoreCreationException(configKey, e);
}
}
}
| 2,743 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/DefaultCapableLocalConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.fs.FileSystem;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
/**
* A {@link SimpleHDFSConfigStoreFactory} that uses the user directory as the config store location. Use scheme
* "default-file".
*/
public class DefaultCapableLocalConfigStoreFactory extends SimpleLocalHDFSConfigStoreFactory {
public DefaultCapableLocalConfigStoreFactory() {
}
public DefaultCapableLocalConfigStoreFactory(Config factoryConfig) {
super(factoryConfig);
}
public static final String SCHEME_PREFIX = "default-";
@Override
protected URI getDefaultRootDir(Config factoryConfig, FileSystem defaultFileSystem,
Optional<URI> configDefinedDefaultURI) {
try {
if (configDefinedDefaultURI.isPresent()) {
return configDefinedDefaultURI.get();
} else {
return new URI(System.getProperty("user.dir"));
}
} catch (URISyntaxException use) {
throw new RuntimeException(use);
}
}
@Override
protected String getSchemePrefix() {
return SCHEME_PREFIX;
}
}
| 2,744 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/SimpleHadoopFilesystemConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Strings;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.config.store.api.ConfigStoreCreationException;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import org.apache.gobblin.util.ConfigUtils;
/**
* An abstract base class for {@link ConfigStoreFactory}s based on {@link FileSystem}.
* Subclasses should implement {@link #getPhysicalScheme()}, {@link #getDefaultStoreFs(Config, Optional)} and
* {@link #getDefaultRootDir(Config, FileSystem, Optional)}.
*/
public abstract class SimpleHadoopFilesystemConfigStoreFactory implements ConfigStoreFactory<SimpleHadoopFilesystemConfigStore> {
protected static final String SIMPLE_HDFS_SCHEME_PREFIX = "simple-";
/** Global namespace for properties if no scope is used */
public static final String DEFAULT_CONFIG_NAMESPACE = SimpleHDFSConfigStoreFactory.class.getName();
/** Scoped configuration properties */
public static final String DEFAULT_STORE_URI_KEY = "default_store_uri";
private final String physicalScheme;
private final Config factoryConfig;
private final URI defaultURI;
private Optional<FileSystem> defaultFileSystem;
private Optional<URI> defaultRootDir;
/** Instantiates a new instance using standard typesafe config defaults:
* {@link ConfigFactory#load()} */
public SimpleHadoopFilesystemConfigStoreFactory() {
this(ConfigUtils.getConfigOrEmpty(ConfigFactory.load(), DEFAULT_CONFIG_NAMESPACE));
}
/**
* Instantiates a new instance of the factory with the specified config. The configuration is
* expected to be scoped, i.e. the properties should not be prefixed.
*/
public SimpleHadoopFilesystemConfigStoreFactory(Config factoryConfig) {
this.physicalScheme = getPhysicalScheme();
this.factoryConfig = factoryConfig;
this.defaultURI = computeDefaultURI(this.factoryConfig);
}
private URI computeDefaultURI(Config factoryConfig) {
if (factoryConfig.hasPath(DEFAULT_STORE_URI_KEY)) {
String uriString = factoryConfig.getString(DEFAULT_STORE_URI_KEY);
if (Strings.isNullOrEmpty(uriString)) {
throw new IllegalArgumentException("Default store URI should be non-empty");
}
try {
URI uri = new URI(uriString);
if (uri.getScheme() == null || this.physicalScheme.equals(uri.getScheme())) {
return uri;
}
} catch (URISyntaxException use) {
throw new IllegalArgumentException("Could not use default uri " + uriString);
}
}
return null;
}
/**
* Returns the physical scheme this {@link ConfigStoreFactory} is responsible for. To support new HDFS
* {@link FileSystem} implementations, subclasses should override this method.
*/
protected abstract String getPhysicalScheme();
/**
* Returns the default {@link FileSystem} used for {@link org.apache.gobblin.config.store.api.ConfigStore}s generated by this
* factory.
* @param factoryConfig the user supplied factory configuration.
* @param configDefinedDefaultURI if the user specified a default uri, that uri.
*/
protected abstract FileSystem getDefaultStoreFs(Config factoryConfig, Optional<URI> configDefinedDefaultURI);
/**
* Returns the {@link URI} for the default store created by this factory.
* @param factoryConfig the user supplied factory configuration.
* @param configDefinedDefaultURI if the user specified a default uri, that uri.
* @param defaultFileSystem the default {@link FileSystem} obtained from {@link #getDefaultStoreFs(Config, Optional)}.
*/
protected abstract URI getDefaultRootDir(Config factoryConfig, FileSystem defaultFileSystem, Optional<URI> configDefinedDefaultURI);
private synchronized FileSystem getDefaultStoreFsLazy() {
if (this.defaultFileSystem == null) {
this.defaultFileSystem = Optional.fromNullable(getDefaultStoreFs(this.factoryConfig, Optional.fromNullable(this.defaultURI)));
}
return this.defaultFileSystem.orNull();
}
private synchronized URI getDefaultStoreURILazy() {
if (this.defaultRootDir == null) {
this.defaultRootDir = Optional.fromNullable(computeDefaultStoreURI());
}
return this.defaultRootDir.orNull();
}
private URI computeDefaultStoreURI() {
try {
if (getDefaultStoreFsLazy() == null) {
return null;
}
URI defaultRoot = getDefaultRootDir(this.factoryConfig, getDefaultStoreFsLazy(), Optional.fromNullable(this.defaultURI));
if (defaultRoot == null) {
return null;
}
Path path = getDefaultStoreFsLazy().makeQualified(new Path(defaultRoot));
if (!isValidStoreRootPath(getDefaultStoreFsLazy(), path)) {
throw new IllegalArgumentException(path + " is not a config store.");
}
return path.toUri();
} catch (IOException ioe) {
throw new RuntimeException("Could not create a default uri for scheme " + getScheme(), ioe);
}
}
private static boolean isValidStoreRootPath(FileSystem fs, Path storeRootPath) throws IOException {
Path storeRoot = new Path(storeRootPath, SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME);
return fs.exists(storeRoot);
}
@Override
public String getScheme() {
return getSchemePrefix() + getPhysicalScheme();
}
/**
* Creates a {@link SimpleHadoopFilesystemConfigStore} for the given {@link URI}. The {@link URI} specified should be the fully
* qualified path to the dataset in question. For example,
* {@code simple-hdfs://[authority]:[port][path-to-config-store][path-to-dataset]}. It is important to note that the
* path to the config store on HDFS must also be specified. The combination
* {@code [path-to-config-store][path-to-dataset]} need not specify an actual {@link Path} on HDFS.
*
* <p>
* If the {@link URI} does not contain an authority, a default authority and root directory are provided. The
* default authority is taken from the NameNode {@link URI} the current process is co-located with. The default path
* is "/user/[current-user]/".
* </p>
*
* @param configKey The URI of the config key that needs to be accessed.
*
* @return a {@link SimpleHadoopFilesystemConfigStore} configured with the the given {@link URI}.
*
* @throws ConfigStoreCreationException if the {@link SimpleHadoopFilesystemConfigStore} could not be created.
*/
@Override
public SimpleHadoopFilesystemConfigStore createConfigStore(URI configKey) throws ConfigStoreCreationException {
FileSystem fs = createFileSystem(configKey);
URI physicalStoreRoot = getStoreRoot(fs, configKey);
URI logicalStoreRoot = URI.create(getSchemePrefix() + physicalStoreRoot);
return new SimpleHadoopFilesystemConfigStore(fs, physicalStoreRoot, logicalStoreRoot);
}
protected String getSchemePrefix() {
return SIMPLE_HDFS_SCHEME_PREFIX;
}
/**
* Creates a {@link FileSystem} given a user specified configKey.
*/
private FileSystem createFileSystem(URI configKey) throws ConfigStoreCreationException {
try {
return FileSystem.get(createFileSystemURI(configKey), new Configuration());
} catch (IOException | URISyntaxException e) {
throw new ConfigStoreCreationException(configKey, e);
}
}
/**
* Creates a Hadoop FS {@link URI} given a user-specified configKey. If the given configKey does not have an authority,
* a default one is used instead, provided by the default root path.
*/
private URI createFileSystemURI(URI configKey) throws URISyntaxException, IOException {
// Validate the scheme
String configKeyScheme = configKey.getScheme();
if (!configKeyScheme.startsWith(getSchemePrefix())) {
throw new IllegalArgumentException(
String.format("Scheme for configKey \"%s\" must begin with \"%s\"!", configKey, getSchemePrefix()));
}
if (Strings.isNullOrEmpty(configKey.getAuthority())) {
return new URI(getPhysicalScheme(), getDefaultStoreFsLazy().getUri().getAuthority(), "", "", "");
}
String uriPhysicalScheme = configKeyScheme.substring(getSchemePrefix().length(), configKeyScheme.length());
return new URI(uriPhysicalScheme, configKey.getAuthority(), "", "", "");
}
/**
* This method determines the physical location of the {@link SimpleHadoopFilesystemConfigStore} root directory on HDFS. It does
* this by taking the {@link URI} given by the user and back-tracing the path. It checks if each parent directory
* contains the folder {@link SimpleHadoopFilesystemConfigStore#CONFIG_STORE_NAME}. It the assumes this {@link Path} is the root
* directory.
*
* <p>
* If the given configKey does not have an authority, then this method assumes the given {@link URI#getPath()} does
* not contain the dataset root. In which case it uses the {@link #getDefaultRootDir()} as the root directory. If
* the default root dir does not contain the {@link SimpleHadoopFilesystemConfigStore#CONFIG_STORE_NAME} then a
* {@link ConfigStoreCreationException} is thrown.
* </p>
*/
private URI getStoreRoot(FileSystem fs, URI configKey) throws ConfigStoreCreationException {
if (Strings.isNullOrEmpty(configKey.getAuthority())) {
if (getDefaultStoreURILazy() != null) {
return getDefaultStoreURILazy();
} else if (isAuthorityRequired()) {
throw new ConfigStoreCreationException(configKey, "No default store has been configured.");
}
}
Path path = new Path(configKey.getPath());
while (path != null) {
try {
// the abs URI may point to an unexist path for
// 1. phantom node
// 2. as URI did not specify the version
if (fs.exists(path)) {
for (FileStatus fileStatus : fs.listStatus(path)) {
if (fileStatus.isDirectory()
&& fileStatus.getPath().getName().equals(SimpleHadoopFilesystemConfigStore.CONFIG_STORE_NAME)) {
return fs.getUri().resolve(fileStatus.getPath().getParent().toUri());
}
}
}
} catch (IOException e) {
throw new ConfigStoreCreationException(configKey, e);
}
path = path.getParent();
}
throw new ConfigStoreCreationException(configKey, "Cannot find the store root!");
}
protected boolean isAuthorityRequired() {
return true;
}
@VisibleForTesting
URI getDefaultStoreURI() {
return getDefaultStoreURILazy() == null ? null : getDefaultStoreURILazy();
}
}
| 2,745 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/SimpleLocalHDFSConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
/**
* Extension of {@link SimpleHDFSConfigStoreFactory} that creates a {@link SimpleHadoopFilesystemConfigStore} which works for the
* local file system.
*/
public class SimpleLocalHDFSConfigStoreFactory extends SimpleHadoopFilesystemConfigStoreFactory {
private static final String LOCAL_HDFS_SCHEME_NAME = "file";
public SimpleLocalHDFSConfigStoreFactory() {
}
public SimpleLocalHDFSConfigStoreFactory(Config factoryConfig) {
super(factoryConfig);
}
@Override
protected String getPhysicalScheme() {
return LOCAL_HDFS_SCHEME_NAME;
}
@Override
protected FileSystem getDefaultStoreFs(Config factoryConfig, Optional<URI> configDefinedDefaultURI) {
try {
return FileSystem.getLocal(new Configuration());
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
@Override
protected URI getDefaultRootDir(Config factoryConfig, FileSystem defaultFileSystem,
Optional<URI> configDefinedDefaultURI) {
// Return null because lack of authority does not indicate that a default root directory should be used
return null;
}
@Override
protected boolean isAuthorityRequired() {
return false;
}
}
| 2,746 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/SimpleHDFSConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.apache.gobblin.config.store.api.ConfigStoreFactory;
import org.apache.gobblin.util.ConfigUtils;
/**
* An implementation of {@link ConfigStoreFactory} for creating {@link SimpleHadoopFilesystemConfigStore}s. This class only works
* the physical scheme {@link #HDFS_SCHEME_NAME}.
*
* @see SimpleHadoopFilesystemConfigStore
*/
public class SimpleHDFSConfigStoreFactory extends SimpleHadoopFilesystemConfigStoreFactory {
public static final String HDFS_SCHEME_NAME = "hdfs";
/** Instantiates a new instance using standard typesafe config defaults:
* {@link ConfigFactory#load()} */
public SimpleHDFSConfigStoreFactory() {
this(ConfigUtils.getConfigOrEmpty(ConfigFactory.load(), DEFAULT_CONFIG_NAMESPACE));
}
/**
* Instantiates a new instance of the factory with the specified config. The configuration is
* expected to be scoped, i.e. the properties should not be prefixed.
*/
public SimpleHDFSConfigStoreFactory(Config factoryConfig) {
super(factoryConfig);
}
@Override
protected FileSystem getDefaultStoreFs(Config factoryConfig, Optional<URI> configDefinedDefaultURI) {
try {
if (configDefinedDefaultURI.isPresent() && configDefinedDefaultURI.get().getAuthority() != null) {
return FileSystem.get(configDefinedDefaultURI.get(), new Configuration());
} else {
FileSystem fs = FileSystem.get(new Configuration());
return HDFS_SCHEME_NAME.equals(fs.getScheme()) ? fs : null;
}
} catch (IOException ioe) {
throw new RuntimeException("Could not create default store fs for scheme " + getScheme());
}
}
@Override
protected URI getDefaultRootDir(Config factoryConfig, FileSystem defaultFileSystem,
Optional<URI> configDefinedDefaultURI) {
return configDefinedDefaultURI.or(defaultFileSystem.getHomeDirectory().toUri());
}
@Override
protected String getPhysicalScheme() {
return HDFS_SCHEME_NAME;
}
}
| 2,747 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/SimpleHadoopFilesystemConfigStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.gobblin.config.common.impl.SingleLinkedListConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigKeyPath;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.api.ConfigStoreWithStableVersioning;
import org.apache.gobblin.config.store.api.VersionDoesNotExistException;
import org.apache.gobblin.config.store.deploy.ConfigStream;
import org.apache.gobblin.config.store.deploy.Deployable;
import org.apache.gobblin.config.store.deploy.DeployableConfigSource;
import org.apache.gobblin.config.store.deploy.FsDeploymentConfig;
import org.apache.gobblin.util.FileListUtils;
import org.apache.gobblin.util.PathUtils;
import org.apache.gobblin.util.io.SeekableFSInputStream;
import org.apache.gobblin.util.io.StreamUtils;
/**
* An implementation of {@link ConfigStore} backed by HDFS. The class assumes a simple file and directory layout
* structure where each path under the root store directory corresponds to a dataset. The {@link #getStoreURI()} method
* gives an {@link URI} that identifies the HDFS cluster being used, as well as the root directory of the store. When
* querying this store, the scheme should be of the form {@code simple-[hdfs-scheme]} (a.k.a the logical scheme). For
* example, if the store is located on a the local filesystem the scheme should be {@code simple-file}, if the store
* is located on HDFS, the scheme should be {@code simple-hdfs}. This class can be constructed using a
* {@link SimpleHDFSConfigStoreFactory}.
*
* <p>
* The class assumes a directory called {@link #CONFIG_STORE_NAME} is under the root directory. This folder should
* contain a directory for each version deployed to the {@link ConfigStore}. An example directory structure could look
* like: <br>
* <blockquote>
* <code>
* /root<br>
*  /my-simple-store<br>
*   /_CONFIG_STORE<br>
*    /v1.0<br>
*     /dataset1<br>
*      /child-dataset<br>
*       /main.conf<br>
*       /includes.conf<br>
*     /dataset2<br>
*      /main.conf<br>
*      /child-dataset<br>
*       /main.conf<br>
* </code>
* </blockquote>
* </p>
*
* <p>
* In the above example, the root of the store is {@code /root/my-simple-store/}. The code automatically assumes that
* this folder contains a directory named {@link #CONFIG_STORE_NAME}. In order to access the dataset
* {@code dataset1/child-dataset} using ConfigClient#getConfig(URI), the specified {@link URI} should be
* {@code simple-hdfs://[authority]:[port]/root/my-simple-store/dataset1/child-dataset/}. Note this is the fully
* qualified path to the actual {@link #MAIN_CONF_FILE_NAME} file on HDFS, with the {@link #CONFIG_STORE_NAME} and the
* {@code version} directories removed.
* </p>
*
* <p>
* All the {@link Config}s for a dataset should be put in the associated {@link #MAIN_CONF_FILE_NAME} file, and all
* the imports should be put in the associated {@link #INCLUDES_CONF_FILE_NAME} file.
* </p>
*
* <p>
* This class is not responsible for deploying configurations from an external source to HDFS, only for reading them.
* </p>
*
* @see SimpleHDFSConfigStoreFactory
*/
@Slf4j
@ConfigStoreWithStableVersioning
public class SimpleHadoopFilesystemConfigStore implements ConfigStore, Deployable<FsDeploymentConfig> {
public static final String CONFIG_STORE_NAME = "_CONFIG_STORE";
public static final String MAIN_CONF_FILE_NAME = "main.conf";
public static final String INCLUDES_CONF_FILE_NAME = "includes.conf";
private static final String INCLUDES_KEY_NAME = "includes";
private final FileSystem fs;
private final URI physicalStoreRoot;
private final URI logicalStoreRoot;
private final Cache<String, Path> versions;
private final SimpleHDFSStoreMetadata storeMetadata;
/**
* Constructs a {@link SimpleHadoopFilesystemConfigStore} using a given {@link FileSystem} and a {@link URI} that points to the
* physical location of the store root.
*
* @param fs the {@link FileSystem} the {@link ConfigStore} is stored on.
* @param physicalStoreRoot the fully qualified {@link URI} of the physical store root, the {@link URI#getScheme()} of the
* {@link URI} should match the {@link FileSystem#getScheme()} of the given {@link FileSystem}.
* @param logicalStoreRoot the fully qualfied {@link URI} of the logical store root
*/
protected SimpleHadoopFilesystemConfigStore(FileSystem fs, URI physicalStoreRoot, URI logicalStoreRoot) {
Preconditions.checkNotNull(fs, "fs cannot be null!");
Preconditions.checkNotNull(physicalStoreRoot, "physicalStoreRoot cannot be null!");
Preconditions.checkNotNull(logicalStoreRoot, "logicalStoreRoot cannot be null!");
this.fs = fs;
Preconditions.checkArgument(!Strings.isNullOrEmpty(physicalStoreRoot.getScheme()),
"The physicalStoreRoot must have a valid scheme!");
Preconditions.checkArgument(physicalStoreRoot.getScheme().equals(fs.getUri().getScheme()),
"The scheme of the physicalStoreRoot and the filesystem must match!");
Preconditions.checkArgument(!Strings.isNullOrEmpty(physicalStoreRoot.getPath()),
"The path of the physicalStoreRoot must be valid as it is the root of the store!");
this.physicalStoreRoot = physicalStoreRoot;
this.logicalStoreRoot = logicalStoreRoot;
this.versions = CacheBuilder.newBuilder().build();
this.storeMetadata = new SimpleHDFSStoreMetadata(fs, new Path(new Path(this.physicalStoreRoot), CONFIG_STORE_NAME));
}
/**
* Returns a {@link String} representation of the active version stored in the {@link ConfigStore}. This method
* determines the current active version by reading the {@link #CONFIG_STORE_METADATA_FILENAME} in
* {@link #CONFIG_STORE_NAME}
*
* @return a {@link String} representing the current active version of the {@link ConfigStore}.
*/
@Override
public String getCurrentVersion() {
try {
return this.storeMetadata.getCurrentVersion();
} catch (IOException e) {
Path configStoreDir = new Path(new Path(this.physicalStoreRoot), CONFIG_STORE_NAME);
throw new RuntimeException(
String.format("Error while checking current version for configStoreDir: \"%s\"", configStoreDir), e);
}
}
/**
* Returns a {@link URI} representing the logical store {@link URI} where the {@link URI#getPath()} is the path to
* the root of the {@link ConfigStore}.
*
* @return a {@link URI} representing the logical store {@link URI} (e.g. simple-hdfs://[authority]:[port][path-to-root]).
*/
@Override
public URI getStoreURI() {
return this.logicalStoreRoot;
}
/**
* Retrieves all the children of the given {@link ConfigKeyPath} by doing a {@code ls} on the {@link Path} specified
* by the {@link ConfigKeyPath}. If the {@link Path} described by the {@link ConfigKeyPath} does not exist, an empty
* {@link Collection} is returned.
*
* @param configKey the config key path whose children are necessary.
* @param version specify the configuration version in the configuration store.
*
* @return a {@link Collection} of {@link ConfigKeyPath} where each entry is a child of the given configKey.
*
* @throws VersionDoesNotExistException if the version specified cannot be found in the {@link ConfigStore}.
*/
@Override
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(!Strings.isNullOrEmpty(version), "version cannot be null or empty!");
List<ConfigKeyPath> children = new ArrayList<>();
Path datasetDir = getDatasetDirForKey(configKey, version);
try {
if (!this.fs.exists(datasetDir)) {
return children;
}
for (FileStatus fileStatus : this.fs.listStatus(datasetDir)) {
if (fileStatus.isDirectory()) {
children.add(configKey.createChild(fileStatus.getPath().getName()));
}
}
return children;
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting children for configKey: \"%s\"", configKey), e);
}
}
@Override
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version) {
return getOwnImports(configKey, version, Optional.<Config>absent());
}
/**
* Retrieves all the {@link ConfigKeyPath}s that are imported by the given {@link ConfigKeyPath}. This method does this
* by reading the {@link #INCLUDES_CONF_FILE_NAME} file associated with the dataset specified by the given
* {@link ConfigKeyPath}. If the {@link Path} described by the {@link ConfigKeyPath} does not exist, then an empty
* {@link List} is returned.
*
* @param configKey the config key path whose tags are needed
* @param version the configuration version in the configuration store.
*
* @return a {@link List} of {@link ConfigKeyPath}s where each entry is a {@link ConfigKeyPath} imported by the dataset
* specified by the configKey.
*
* @throws VersionDoesNotExistException if the version specified cannot be found in the {@link ConfigStore}.
*/
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(!Strings.isNullOrEmpty(version), "version cannot be null or empty!");
List<ConfigKeyPath> configKeyPaths = new ArrayList<>();
Path datasetDir = getDatasetDirForKey(configKey, version);
Path includesFile = new Path(datasetDir, INCLUDES_CONF_FILE_NAME);
try {
if (!this.fs.exists(includesFile)) {
return configKeyPaths;
}
FileStatus includesFileStatus = this.fs.getFileStatus(includesFile);
if (!includesFileStatus.isDirectory()) {
try (InputStream includesConfInStream = this.fs.open(includesFileStatus.getPath())) {
configKeyPaths.addAll(getResolvedConfigKeyPaths(includesConfInStream, runtimeConfig));
}
}
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting config for configKey: \"%s\"", configKey), e);
}
return configKeyPaths;
}
/**
* Get resolved config key paths given an includes file as an {@link InputStream}
*
* The includes returned are used to build a fallback chain.
* With the natural order, if a key found in the first include it is not be overriden by the next include.
* By reversing the list, the Typesafe fallbacks are constructed bottom up.
*
* @param includesConfInStream includes.conf file as an {@link InputStream}
* @return a {@link List} of resolved ConfigKeyPaths
*/
public static List<ConfigKeyPath> getResolvedConfigKeyPaths(InputStream includesConfInStream, Optional<Config> runtimeConfig) throws IOException {
return Lists.newArrayList(Iterables.transform(Lists.reverse(resolveIncludesList(
IOUtils.readLines(includesConfInStream, Charsets.UTF_8), runtimeConfig)), new IncludesToConfigKey()));
}
/**
* A helper to resolve System properties and Environment variables in includes paths
* The method loads the list of unresolved <code>includes</code> into an in-memory {@link Config} object and reolves
* with a fallback on {@link ConfigFactory#defaultOverrides()}
*
* @param includes list of unresolved includes
* @return a list of resolved includes
*/
@VisibleForTesting
public static List<String> resolveIncludesList(List<String> includes, Optional<Config> runtimeConfig) {
// Create a TypeSafe Config object with Key INCLUDES_KEY_NAME and value an array of includes
StringBuilder includesBuilder = new StringBuilder();
for (String include : includes) {
// Skip comments
if (StringUtils.isNotBlank(include) && !StringUtils.startsWith(include, "#")) {
includesBuilder.append(INCLUDES_KEY_NAME).append("+=").append(include).append("\n");
}
}
// Resolve defaultOverrides and environment variables.
if (includesBuilder.length() > 0) {
if (runtimeConfig.isPresent()) {
return ConfigFactory.parseString(includesBuilder.toString()).withFallback(ConfigFactory.defaultOverrides())
.withFallback(runtimeConfig.get()).resolve()
.getStringList(INCLUDES_KEY_NAME);
} else {
return ConfigFactory.parseString(includesBuilder.toString()).withFallback(ConfigFactory.defaultOverrides())
.resolve().getStringList(INCLUDES_KEY_NAME);
}
}
return Collections.emptyList();
}
public static List<String> resolveIncludesList(List<String> includes) {
return resolveIncludesList(includes, Optional.<Config>absent());
}
/**
* Retrieves the {@link Config} for the given {@link ConfigKeyPath} by reading the {@link #MAIN_CONF_FILE_NAME}
* associated with the dataset specified by the given {@link ConfigKeyPath}. If the {@link Path} described by the
* {@link ConfigKeyPath} does not exist then an empty {@link Config} is returned.
*
* @param configKey the config key path whose properties are needed.
* @param version the configuration version in the configuration store.
*
* @return a {@link Config} for the given configKey.
*
* @throws VersionDoesNotExistException if the version specified cannot be found in the {@link ConfigStore}.
*/
@Override
public Config getOwnConfig(ConfigKeyPath configKey, String version) throws VersionDoesNotExistException {
Preconditions.checkNotNull(configKey, "configKey cannot be null!");
Preconditions.checkArgument(!Strings.isNullOrEmpty(version), "version cannot be null or empty!");
Path datasetDir = getDatasetDirForKey(configKey, version);
Path mainConfFile = new Path(datasetDir, MAIN_CONF_FILE_NAME);
try {
if (!this.fs.exists(mainConfFile)) {
return ConfigFactory.empty();
}
FileStatus configFileStatus = this.fs.getFileStatus(mainConfFile);
if (!configFileStatus.isDirectory()) {
try (InputStream mainConfInputStream = this.fs.open(configFileStatus.getPath())) {
return ConfigFactory.parseReader(new InputStreamReader(mainConfInputStream, Charsets.UTF_8));
}
}
return ConfigFactory.empty();
} catch (IOException e) {
throw new RuntimeException(String.format("Error while getting config for configKey: \"%s\"", configKey), e);
}
}
/**
* Retrieves the dataset dir on HDFS associated with the given {@link ConfigKeyPath} and the given version. This
* directory contains the {@link #MAIN_CONF_FILE_NAME} and {@link #INCLUDES_CONF_FILE_NAME} file, as well as any child
* datasets.
*/
private Path getDatasetDirForKey(ConfigKeyPath configKey, String version) throws VersionDoesNotExistException {
String datasetFromConfigKey = getDatasetFromConfigKey(configKey);
if (StringUtils.isBlank(datasetFromConfigKey)) {
return getVersionRoot(version);
}
return new Path(getVersionRoot(version), datasetFromConfigKey);
}
/**
* Retrieves the name of a dataset from a given {@link ConfigKeyPath}, relative to the store root.
*/
private static String getDatasetFromConfigKey(ConfigKeyPath configKey) {
return StringUtils.removeStart(configKey.getAbsolutePathString(), SingleLinkedListConfigKeyPath.PATH_DELIMETER);
}
/**
* Constructs a {@link Path} that points to the location of the given version of the {@link ConfigStore} on HDFS. If
* this {@link Path} does not exist, a {@link VersionDoesNotExistException} is thrown.
*/
private Path getVersionRoot(String version) throws VersionDoesNotExistException {
try {
return this.versions.get(version, new VersionRootLoader(version));
} catch (ExecutionException e) {
throw new RuntimeException(
String.format("Error while checking if version \"%s\" for store \"%s\" exists", version, getStoreURI()), e);
}
}
/**
* Implementation of {@link Callable} that finds the root {@link Path} of a specified version. To be used in
* conjunction with the {@link #versions} cache.
*/
@AllArgsConstructor
private class VersionRootLoader implements Callable<Path> {
private String version;
@Override
public Path call() throws IOException {
Path versionRootPath = PathUtils.combinePaths(SimpleHadoopFilesystemConfigStore.this.physicalStoreRoot.toString(),
CONFIG_STORE_NAME, this.version);
if (SimpleHadoopFilesystemConfigStore.this.fs.isDirectory(versionRootPath)) {
return versionRootPath;
}
throw new VersionDoesNotExistException(getStoreURI(), this.version,
String.format("Cannot find specified version under root %s", versionRootPath));
}
}
/**
* Implementation of {@link Function} that translates a {@link String} in an {@link #INCLUDES_CONF_FILE_NAME} file to
* a {@link ConfigKeyPath}.
*/
private static class IncludesToConfigKey implements Function<String, ConfigKeyPath> {
@Override
public ConfigKeyPath apply(String input) {
if (input == null) {
return null;
}
ConfigKeyPath configKey = SingleLinkedListConfigKeyPath.ROOT;
for (String file : Splitter.on(SingleLinkedListConfigKeyPath.PATH_DELIMETER).omitEmptyStrings().split(input)) {
configKey = configKey.createChild(file);
}
return configKey;
}
}
/**
* Deploy configs provided by {@link FsDeploymentConfig#getDeployableConfigSource()} to HDFS.
* For each {@link ConfigStream} returned by {@link DeployableConfigSource#getConfigStreams()}, creates a resource on HDFS.
* <br>
* <ul> Does the following:
* <li> Read {@link ConfigStream}s and write them to HDFS
* <li> Create parent directories of {@link ConfigStream#getConfigPath()} if required
* <li> Set {@link FsDeploymentConfig#getStorePermissions()} to all resourced created on HDFS
* <li> Update current active version in the store metadata file.
* </ul>
*
* <p>
* For example: If "test-root" is a resource in classpath and all resources under it needs to be deployed,
* <br>
* <br>
* <b>In Classpath:</b><br>
* <blockquote> <code>
* test-root<br>
*  /data<br>
*   /set1<br>
*    /main.conf<br>
*  /tag<br>
*   /tag1<br>
*    /main.conf<br>
* </code> </blockquote>
* </p>
*
* <p>
* A new version 2.0.0 {@link FsDeploymentConfig#getNewVersion()} is created on HDFS under <code>this.physicalStoreRoot/_CONFIG_STORE</code>
* <br>
* <br>
* <b>On HDFS after deploy:</b><br>
* <blockquote> <code>
* /_CONFIG_STORE<br>
*  /2.0.0<br>
*   /data<br>
*    /set1<br>
*     /main.conf<br>
*   /tag<br>
*    /tag1<br>
*     /main.conf<br>
* </code> </blockquote>
* </p>
*
*/
@Override
public void deploy(FsDeploymentConfig deploymentConfig) throws IOException {
log.info("Deploying with config : " + deploymentConfig);
Path hdfsconfigStoreRoot = new Path(this.physicalStoreRoot.getPath(), CONFIG_STORE_NAME);
if (!this.fs.exists(hdfsconfigStoreRoot)) {
throw new IOException("Config store root not present at " + this.physicalStoreRoot.getPath());
}
Path hdfsNewVersionPath = new Path(hdfsconfigStoreRoot, deploymentConfig.getNewVersion());
if (!this.fs.exists(hdfsNewVersionPath)) {
this.fs.mkdirs(hdfsNewVersionPath, deploymentConfig.getStorePermissions());
Set<ConfigStream> confStreams = deploymentConfig.getDeployableConfigSource().getConfigStreams();
for (ConfigStream confStream : confStreams) {
String confAtPath = confStream.getConfigPath();
log.info("Copying resource at : " + confAtPath);
Path hdsfConfPath = new Path(hdfsNewVersionPath, confAtPath);
if (!this.fs.exists(hdsfConfPath.getParent())) {
this.fs.mkdirs(hdsfConfPath.getParent());
}
// If an empty directory needs to created it may not have a stream.
if (confStream.getInputStream().isPresent()) {
// Read the resource as a stream from the classpath and write it to HDFS
try (SeekableFSInputStream inputStream = new SeekableFSInputStream(confStream.getInputStream().get());
FSDataOutputStream os = this.fs.create(hdsfConfPath, false)) {
StreamUtils.copy(inputStream, os);
}
}
}
// Set permission for newly copied files
for (FileStatus fileStatus : FileListUtils.listPathsRecursively(this.fs, hdfsNewVersionPath,
FileListUtils.NO_OP_PATH_FILTER)) {
this.fs.setPermission(fileStatus.getPath(), deploymentConfig.getStorePermissions());
}
} else {
log.warn(String.format(
"STORE WITH VERSION %s ALREADY EXISTS. NEW RESOURCES WILL NOT BE COPIED. ONLY STORE MEATADATA FILE WILL BE UPDATED TO %s",
deploymentConfig.getNewVersion(), deploymentConfig.getNewVersion()));
}
this.storeMetadata.setCurrentVersion(deploymentConfig.getNewVersion());
log.info(String.format("New version %s of config store deployed at %s", deploymentConfig.getNewVersion(),
hdfsconfigStoreRoot));
}
@VisibleForTesting
URI getPhysicalStoreRoot() {
return this.physicalStoreRoot;
}
}
| 2,748 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/hdfs/SimpleHDFSStoreMetadata.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.hdfs;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.google.common.base.Charsets;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigRenderOptions;
import com.typesafe.config.ConfigValueFactory;
import org.apache.gobblin.config.store.api.ConfigStore;
import org.apache.gobblin.config.store.deploy.FsDeploymentConfig;
import org.apache.gobblin.util.HadoopUtils;
/**
* A metadata accessor for an HDFS based {@link ConfigStore}. A HDFS based {@link ConfigStore} will have a file named
* {@link #CONFIG_STORE_METADATA_FILENAME} that contains store metadata as key/value pairs. This class helps adding more
* key/value pairs to the store metadata file and helps reading key/value pairs from the store metadata file. For
* instance the current active version of the store is stored at {@link #CONFIG_STORE_METADATA_CURRENT_VERSION_KEY}.
*/
public class SimpleHDFSStoreMetadata {
private static final String CONFIG_STORE_METADATA_FILENAME = "store-metadata.conf";
private static final String CONFIG_STORE_METADATA_CURRENT_VERSION_KEY = "config.hdfs.store.version.current";
private final FileSystem fs;
private final Path storeMetadataFilePath;
/**
* Create a new {@link SimpleHDFSStoreMetadata} to read and write store metadata
*
* @param fs where metadata is stored
* @param configStoreDir path to {@link SimpleHadoopFilesystemConfigStore#CONFIG_STORE_NAME}
*/
public SimpleHDFSStoreMetadata(final FileSystem fs, final Path configStoreDir) {
this.storeMetadataFilePath = new Path(configStoreDir, CONFIG_STORE_METADATA_FILENAME);
this.fs = fs;
}
/**
* Writes the <code>config</code> to {@link #storeMetadataFilePath}. Creates a backup file at
* <code>storeMetadataFilePath + ".bkp"</code> to recover old metadata in case of unexpected deployment failures
*
* @param config to be serialized
* @throws IOException if there was any problem writing the <code>config</code> to the store metadata file.
*/
void writeMetadata(Config config) throws IOException {
Path storeMetadataFileBkpPath =
new Path(this.storeMetadataFilePath.getParent(), this.storeMetadataFilePath.getName() + ".bkp");
// Delete old backup file if exists
HadoopUtils.deleteIfExists(this.fs, storeMetadataFileBkpPath, true);
// Move current storeMetadataFile to backup
if (this.fs.exists(this.storeMetadataFilePath)) {
HadoopUtils.renamePath(this.fs, this.storeMetadataFilePath, storeMetadataFileBkpPath);
}
// Write new storeMetadataFile
try (FSDataOutputStream outputStream =
FileSystem.create(this.fs, this.storeMetadataFilePath, FsDeploymentConfig.DEFAULT_STORE_PERMISSIONS);) {
outputStream.write(config.root().render(ConfigRenderOptions.concise()).getBytes(Charsets.UTF_8));
} catch (Exception e) {
// Restore from backup
HadoopUtils.deleteIfExists(this.fs, this.storeMetadataFilePath, true);
HadoopUtils.renamePath(this.fs, storeMetadataFileBkpPath, this.storeMetadataFilePath);
throw new IOException(
String.format("Failed to write store metadata at %s. Restored existing store metadata file from backup",
this.storeMetadataFilePath),
e);
}
}
private void addMetadata(String key, String value) throws IOException {
Config newConfig;
if (isStoreMetadataFilePresent()) {
newConfig = readMetadata().withValue(key, ConfigValueFactory.fromAnyRef(value));
} else {
newConfig = ConfigFactory.empty().withValue(key, ConfigValueFactory.fromAnyRef(value));
}
writeMetadata(newConfig);
}
/**
* Update the current version of the store in {@link #CONFIG_STORE_METADATA_FILENAME} file at {@link #storeMetadataFilePath}
*
* @param version to be updated
*/
void setCurrentVersion(String version) throws IOException {
addMetadata(CONFIG_STORE_METADATA_CURRENT_VERSION_KEY, version);
}
/**
* Get the current version from {@link #CONFIG_STORE_METADATA_FILENAME} file at {@link #storeMetadataFilePath}
*
*/
public String getCurrentVersion() throws IOException {
return readMetadata().getString(CONFIG_STORE_METADATA_CURRENT_VERSION_KEY);
}
/**
* Get all metadata from the {@link #CONFIG_STORE_METADATA_FILENAME} file at {@link #storeMetadataFilePath}
*
*/
Config readMetadata() throws IOException {
if (!isStoreMetadataFilePresent()) {
throw new IOException("Store metadata file does not exist at " + this.storeMetadataFilePath);
}
try (InputStream storeMetadataInputStream = this.fs.open(this.storeMetadataFilePath)) {
return ConfigFactory.parseReader(new InputStreamReader(storeMetadataInputStream, Charsets.UTF_8));
}
}
private boolean isStoreMetadataFilePresent() throws IOException {
return this.fs.exists(this.storeMetadataFilePath);
}
}
| 2,749 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/PhysicalPathNotExistException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.net.URI;
/**
* Indicating the physical layer doesn't exist in certain scenarios (e.g. When root is not detected which usually
* is due to users making mistakes on configuration.
*/
public class PhysicalPathNotExistException extends RuntimeException {
private static final long serialVersionUID = 1L;
private static final String MESSAGE_FORMAT =
"Failed to find the location %s in config store because of: %s ";
private final URI uri;
public PhysicalPathNotExistException(URI inspectedURI, String errorMessage) {
super(String.format(MESSAGE_FORMAT, inspectedURI, errorMessage));
this.uri = inspectedURI;
}
public URI getUri() {
return this.uri;
}
} | 2,750 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/VersionDoesNotExistException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.net.URI;
import org.apache.gobblin.annotation.Alpha;
@Alpha
/** Denotes that a requested config version is invalid or is no longer available. */
public class VersionDoesNotExistException extends RuntimeException {
private static final long serialVersionUID = 1L;
private static final String MESSAGE_FORMAT =
"Failed to find the version %s in config store %s because of: %s ";
private final URI storeURI;
private final String configVersion;
public VersionDoesNotExistException(URI storeURI, String configVersion, String errorMessage) {
super(String.format(MESSAGE_FORMAT, configVersion, storeURI, errorMessage));
this.storeURI = storeURI;
this.configVersion = configVersion;
}
public VersionDoesNotExistException(URI storeURI, String configVersion, Exception e) {
super(String.format(MESSAGE_FORMAT, configVersion, storeURI, e.getMessage()), e);
this.storeURI = storeURI;
this.configVersion = configVersion;
}
public URI getStoreURI() {
return this.storeURI;
}
public String getConfigVersion() {
return this.configVersion;
}
}
| 2,751 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreWithResolution.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.util.List;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
/**
* The ConfigStoreWithResolution interface is used to indicate the {@link ConfigStore} implementation
* supports an efficient import configuration resolution for a given config key. The library will
* delegate the import resolution to this implementation instead of performing it.
*
* The resolution is performed by using the
* {@link Config#withFallback(com.typesafe.config.ConfigMergeable)} in the correct order. See the
* package documentation for more information on the order of import resolution.
*
* @author mitu
*
*/
@Alpha
public interface ConfigStoreWithResolution extends ConfigStore {
/**
* Obtains a {@link Config} object with all implicit and explicit imports resolved, i.e. specified
* using the {@link Config#withFallback(com.typesafe.config.ConfigMergeable)} API.
*
* @param configKey the path of the configuration key to be resolved
* @param version the configuration version for resolution
* @return the {@link Config} object associated with the specified config key with akk direct
* and indirect imports resolved.
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Config getResolvedConfig(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
/**
* Obtains the list of config keys which are directly and indirectly imported by the specified
* config key. The import graph is traversed in depth-first manner. For a given config key,
* explicit imports are listed before implicit imports from the ancestor keys.
*
* @param configKey the path of the config key whose imports are needed
* @param version the configuration version to check
* @return the paths of the directly and indirectly imported keys, including config keys imported
* by ancestors. The earlier config key in the list will have higher priority when resolving
* configuration conflict.
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
public List<ConfigKeyPath> getImportsRecursively(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException;
}
| 2,752 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.net.URI;
import org.apache.gobblin.annotation.Alpha;
/**
* ConfigStoreFactory is used to created {@link ConfigStore}s. Each ConfigStoreFactory is responsible for
* instantiating {@link ConfigStore}s that can handle specific config key URI scheme. Typically those
* {@link ConfigStore}s correspond to different physical instances and are differentiated by the
* authority in the URI. The ConfigStoreFactory will typically also define a default
* {@link ConfigStore} which is to be used if no authority is specified in the config key URI.
*
* @author mitu
*
* @param <T> The java class of the {@link ConfigStore} implementation(s) supported by this factory
*/
@Alpha
public interface ConfigStoreFactory<T extends ConfigStore> {
/**
* @return the URI scheme for which this configuration store factory is responsible.
* All the configuration store created by this configuration factory will share the same scheme
* name.
*/
public String getScheme();
/**
* Obtains the {@link ConfigStore} to handle a specific config key.
*
* @param configKey The URI of the config key that needs to be accessed.
* @return {@link ConfigStore} which can handle the specified config key. If the config key URI is
* missing the authority part, the factory may choose a default store if available or throw
* a ConfigStoreCreationException
* @throws ConfigStoreCreationException if the URI cannot be mapped to a config store
* @throws IllegalArgumentException if the scheme of the config key URI does not match
* the value of {@link #getScheme()}.
*/
public T createConfigStore(URI configKey) throws ConfigStoreCreationException;
}
| 2,753 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigKeyPath.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import org.apache.gobblin.annotation.Alpha;
/**
* The ConfigKeyPath is used to describe the relative path for a given configuration key URI to
* the root URI of a config store ({@link ConfigStore#getStoreURI()}). For example,
* for a configuration store with root URI hfs://namenode.grid.company.com:9000/configs/hfs_config_root/
* and a config key URI hfs://namenode.grid.company.com:9000/configs/hfs_config_root/data/tracking/,
* the ConfigKeyPath will be /data/tracking.
* @author mitu
*
*/
@Alpha
public interface ConfigKeyPath {
/**
* The path to the parent.
* @throws UnsupportedOperationException if the current node is the root.
*/
public ConfigKeyPath getParent();
/**
* The last component of this path. For example, for /a/b/c, it will return "c". If the current
* path points to the root ("/"), the result is the empty string "".
*/
public String getOwnPathName();
/**
* Creates a path that is a child of the current path by appending one component to the path.
* For example, if the current path points to "/a/b", createChild("c") will return a path that
* points to "/a/b/c" . */
public ConfigKeyPath createChild(String childPathName);
/**
* The absolute configuration key path. This is joining all path components from the root using
* "/" as a separator.
*/
public String getAbsolutePathString();
/** Check if the current path is the root path ("/"). */
public boolean isRootPath();
}
| 2,754 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
/**
* The ConfigStore interface used to describe a configuration store. A configuration store is a
* responsible for:
* <ul>
* <li>Storing and fetching the values for configuration keys
* <li>Storing and fetching the tags for configuration keys
* <li>Maintaining different versions of the above mappings
* </ul>
* This API defines the minimum functionality that a store has to implement. There are also
* a number of additional APIS (such as {@link ConfigStoreWithBatchFetches},
* {@link ConfigStoreWithImportedBy}, {@link ConfigStoreWithImportedByRecursively},
* {@link ConfigStoreWithResolution}, {@link ConfigStoreWithStableVersioning}) that denote that the
* store supports additional operations efficiently and the config client library should delegate
* those to the store rather than implementing those itself.
*
* @author mitu
*
*/
@Alpha
public interface ConfigStore {
/**
* @return the current version for that configuration store.
*/
public String getCurrentVersion();
/**
* Obtains the config store root URI. This represents the logical location of the store.
*
* @return the configuration store root URI .
*/
public URI getStoreURI();
/**
* Obtains the direct children config keys for a given config key. For example, the child
* paths for /data/tracking may be /data/tracking/ImpressionEvent and /data/tracking/ClickEvent .
*
* <p>Note that this method should not be used for "service discovery", i.e. it need not return
* all possible child paths but only those defined in the store. For example, the configuration
* for /data/tracking/ConversionEvent may be implicitly inherited from /data/tracking and
* /data/tracking/ConversionEvent may not be returned by this method.
*
* @param configKey the config key path whose children are necessary.
* @param version specify the configuration version in the configuration store.
* @return the direct children config key paths
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Collection<ConfigKeyPath> getChildren(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
/**
* Obtains the list of all config keys with which are given config key is tagged/annotated.
*
* @param configKey the config key path whose tags are needed
* @param version the configuration version in the configuration store.
* @return the paths of the directly imported config keys for the specified config key and
* version. Note that order is significant the earlier ConfigKeyPath in the List will have higher priority
* when resolving configuration conflicts.
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*
*/
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
public List<ConfigKeyPath> getOwnImports(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException;
/**
* Obtains the configuration properties directly associated with a given config keys. These <b>
* will not</b> include any properties/values which can be obtained from the ancestors or imported
* config keys.
*
* @param configKey the config key path whose properties are needed.
* @param version the configuration version in the configuration store.
* @return the directly specified configuration in {@link Config} format for input uri
* against input configuration version
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Config getOwnConfig(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
}
| 2,755 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreWithStableVersioning.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.apache.gobblin.annotation.Alpha;
import org.apache.gobblin.config.client.api.VersionStabilityPolicy;
/**
* The ConfigStoreWithStableVersioning annotation is used to indicate that the configuration store
* supports stable versioning. This means that:
*
* <ul>
* <li>Once published the version will remain available for at least a day even if it gets rolled
* back.</li>
* <li>The version and all its configuration objects are immutable.</li>
* </ul>
*
* Version stability defines the possible outcomes for repeated calls to
* {@link ConfigStore#getOwnConfig(ConfigKeyPath, String)}
* for the same config key and version from the same or different JVMs. This is used in conjunction
* with {@link VersionStabilityPolicy} to control client library behavior with respect to caching
* config values.
*
* @author mitu
*
*/
@Documented @Retention(value=RetentionPolicy.RUNTIME) @Target(value=ElementType.TYPE)
@Alpha
public @interface ConfigStoreWithStableVersioning {
}
| 2,756 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreWithImportedBy.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.util.Collection;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
/**
* ConfigStoreWithImportedBy indicates that this {@link ConfigStore} implementation supports an
* efficient mapping from the config key to the config keys that directly import it (aka the
* imported-by relationship).
* @author mitu
*
*/
@Alpha
public interface ConfigStoreWithImportedBy extends ConfigStore {
/**
* Obtains the collection of config keys which import a given config key.
*
* @param configKey the config key path which is imported
* @param version the configuration version to run the query against
* @return The {@link Collection} of paths of the config keys which import the specified config key
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
public Collection<ConfigKeyPath> getImportedBy(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException;
}
| 2,757 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreWithImportedByRecursively.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.util.Collection;
import com.google.common.base.Optional;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
/**
* This is an extension of the {@link ConfigStoreWithImportedBy} interface which marks that
* this {@link ConfigStore} implementation supports not only efficiently obtaining the
* config keys that directly import a given config key but also the full transitive closure of such
* keys.
*
* Note that when calculating the transitive closure, implicit imports coming from ancestor config
* keys are also considered.
*/
@Alpha
public interface ConfigStoreWithImportedByRecursively extends ConfigStoreWithImportedBy {
/**
* Obtains all config keys which directly or indirectly import a given config key
* @param configKey the path of the config key being imported
* @param version the configuration version to check against
* @return The {@link Collection} of paths of the config keys that directly or indirectly import
* the specified config key in the specified conf version.
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey, String version)
throws VersionDoesNotExistException;
public Collection<ConfigKeyPath> getImportedByRecursively(ConfigKeyPath configKey, String version, Optional<Config> runtimeConfig)
throws VersionDoesNotExistException;
}
| 2,758 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreWithBatchFetches.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.util.Collection;
import java.util.Map;
import com.typesafe.config.Config;
import org.apache.gobblin.annotation.Alpha;
/**
* ConfigStoreWithBatchFetches indicate this {@link ConfigStore} support (efficient) fetching of
* batches of config keys with the same config version. For {@link ConfigStore} implementations that
* implement this interface, the config client library will delegate the batch fetches to the
* store instead of doing that itself. A typical use case for this interface is if the {@link ConfigStore}
* supports an RPC call which can fetch multiple config objects with a single call.
*
* @author mitu
*
*/
@Alpha
public interface ConfigStoreWithBatchFetches extends ConfigStore {
/**
*
* @param configKeys the config keys whose {@link Config} objects are to be fetched
* @param version the configuration version of the config keys
* @return the Map from the config key to its the own {@link com.typesafe.config.Config} object
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Map<ConfigKeyPath, Config> getOwnConfigs(Collection<ConfigKeyPath> configKeys, String version)
throws VersionDoesNotExistException;
/**
*
* @param configKeys the config keys whose {@link Config} objects are to be fetched
* @param version the configuration version of the config keys
* @return the Map from the config key to its the resolved {@link com.typesafe.config.Config} object
* @throws VersionDoesNotExistException if the requested config version does not exist (any longer)
*/
public Map<ConfigKeyPath, Config> getResolvedConfigs(Collection<ConfigKeyPath> configKeys, String version)
throws VersionDoesNotExistException;
} | 2,759 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/ConfigStoreCreationException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.config.store.api;
import java.net.URI;
import org.apache.gobblin.annotation.Alpha;
@Alpha
public class ConfigStoreCreationException extends Exception {
private static final long serialVersionUID = 1L;
private static final String MESSAGE_FORMAT = "Failed to create config store %s because of: %s";
private final URI storeURI;
public ConfigStoreCreationException(URI storeURI, String message) {
super(String.format(MESSAGE_FORMAT, storeURI, message));
this.storeURI = storeURI;
}
public ConfigStoreCreationException(URI storeURI, Exception e) {
super(String.format(MESSAGE_FORMAT, storeURI, e.getMessage()), e);
this.storeURI = storeURI;
}
public URI getStoreURI(){
return this.storeURI;
}
}
| 2,760 |
0 | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store | Create_ds/gobblin/gobblin-config-management/gobblin-config-core/src/main/java/org/apache/gobblin/config/store/api/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains the config stores API for gobblin configuration management.
*
* Configuration stores provide a low-level API for reading {@link com.typesafe.config.Config} objects. These
* are generally accessed through the config client API.
*/
package org.apache.gobblin.config.store.api;
| 2,761 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/SkipWorkUnitsIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.Properties;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
/**
* Integration test for skipped work units.
*
* Created by adsharma on 11/22/16.
*/
public class SkipWorkUnitsIntegrationTest {
@BeforeTest
@AfterTest
public void cleanDir()
throws IOException {
GobblinLocalJobLauncherUtils.cleanDir();
}
/**
* This test is to validate that the skipped work units wont be passed to the publisher.
* @throws Exception
*/
@Test
public void testSkippedWorkUnitsAvoidPublisher()
throws Exception {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty("data.publisher.type", "org.apache.gobblin.TestSkipWorkUnitsPublisher");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
/**
* This test is to validate that job will be successful if commit policy is commit on full success, even if some of the workunits are skipped.
* @throws Exception
*/
@Test
public void testJobSuccessOnFullCommit()
throws Exception {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty("job.commit.policy", "full");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
/**
* This test validates that the skipped work units are persisted in the state store and can be read again.
* @throws Exception
*/
@Test
public void testSkippedWorkUnitsPersistenceInStateStore()
throws Exception {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
jobProperties.setProperty("test.workunit.persistence", "true");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
}
| 2,762 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TestSkipWorkUnitsSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.testng.Assert;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.Source;
import org.apache.gobblin.source.extractor.DummyExtractor;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Created by adsharma on 11/22/16.
*/
public class TestSkipWorkUnitsSource implements Source {
private final String TEST_WORKUNIT_PERSISTENCE = "test.workunit.persistence";
private final int NUMBER_OF_SKIP_WORKUNITS = 3;
private final int NUMBER_OF_WORKUNITS = 4;
public List<WorkUnit> getWorkunits(SourceState state) {
List<WorkUnit> workUnits = new ArrayList<>();
if (state.contains(TEST_WORKUNIT_PERSISTENCE)) {
testSkipWorkUnitPersistence(state);
return workUnits;
}
for (int i = 0; i < NUMBER_OF_WORKUNITS; i++) {
WorkUnit workUnit = WorkUnit.createEmpty();
if (i < NUMBER_OF_SKIP_WORKUNITS) {
workUnit.skip();
}
workUnits.add(workUnit);
}
return workUnits;
}
public Extractor getExtractor(WorkUnitState state)
throws IOException {
return new DummyExtractor(state);
}
public void testSkipWorkUnitPersistence(SourceState state) {
if (!state.getPropAsBoolean(TEST_WORKUNIT_PERSISTENCE)) {
return;
}
int skipCount = 0;
for (WorkUnitState workUnitState : state.getPreviousWorkUnitStates()) {
if (workUnitState.getWorkingState() == WorkUnitState.WorkingState.SKIPPED) {
skipCount++;
}
}
Assert.assertEquals(skipCount, NUMBER_OF_SKIP_WORKUNITS,
"All skipped work units are not persisted in the state store");
}
public void shutdown(SourceState state) {
}
}
| 2,763 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TestSkipWorkUnitsPublisher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.Collection;
import org.testng.Assert;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.publisher.DataPublisher;
/**
* Created by adsharma on 11/22/16.
*/
public class TestSkipWorkUnitsPublisher extends DataPublisher {
public TestSkipWorkUnitsPublisher(State state)
throws IOException {
super(state);
}
public void initialize() {
}
public void publishData(Collection<? extends WorkUnitState> states)
throws IOException {
for (WorkUnitState state : states) {
Assert.assertTrue(state.getWorkingState() != WorkUnitState.WorkingState.SKIPPED,
"Skipped WorkUnit shouldn't be passed to publisher");
if (state.getWorkingState() == WorkUnitState.WorkingState.SUCCESSFUL) {
state.setWorkingState(WorkUnitState.WorkingState.COMMITTED);
} else {
state.setWorkingState(WorkUnitState.WorkingState.FAILED);
}
}
}
public void publishMetadata(Collection<? extends WorkUnitState> states) {
}
public void close() {
}
}
| 2,764 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TestAvroSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.Source;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
public class TestAvroSource implements Source<Schema, GenericRecord> {
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
WorkUnit workUnit = WorkUnit.createEmpty();
workUnit.addAll(state);
return Collections.singletonList(workUnit);
}
@Override
public Extractor<Schema, GenericRecord> getExtractor(WorkUnitState state)
throws IOException {
return new TestAvroExtractor(state);
}
@Override
public void shutdown(SourceState state) {
}
}
| 2,765 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/GobblinLocalJobLauncherUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.File;
import java.io.IOException;
import java.util.Properties;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
import com.google.common.io.Files;
import org.apache.gobblin.runtime.app.ApplicationLauncher;
import org.apache.gobblin.runtime.app.ServiceBasedAppLauncher;
import org.apache.gobblin.runtime.local.LocalJobLauncher;
public class GobblinLocalJobLauncherUtils {
public static final String RESOURCE_DIR = Files.createTempDir().getAbsolutePath()+"/";
public static final String SAMPLE_DIR = "test_data/daily/2016/10/01/";
public static final String DATA_PURGER_COMMIT_DATA = "data.purger.commit.data";
public static final String STATE_STORE = "state_store";
public static final String WRITER_STAGING = "writer_staging";
public static final String WRITER_OUTPUT = "writer_output";
public static final String TMP = "tmp";
public static final String METRICS = "metrics";
public static final String FINAL_DIR = "final_dir";
public static void invokeLocalJobLauncher(Properties properties)
throws Exception {
try (ApplicationLauncher applicationLauncher = new ServiceBasedAppLauncher(properties,
properties.getProperty(ServiceBasedAppLauncher.APP_NAME, "CliLocalJob-" + UUID.randomUUID()))) {
applicationLauncher.start();
try (LocalJobLauncher localJobLauncher = new LocalJobLauncher(properties)) {
localJobLauncher.launchJob(null);
}
applicationLauncher.stop();
}
}
public static void cleanDir()
throws IOException {
FileUtils.forceMkdir(new File(RESOURCE_DIR + STATE_STORE));
FileUtils.forceMkdir(new File(RESOURCE_DIR + WRITER_STAGING));
FileUtils.forceMkdir(new File(RESOURCE_DIR + WRITER_OUTPUT));
FileUtils.forceMkdir(new File(RESOURCE_DIR + TMP));
FileUtils.forceMkdir(new File(RESOURCE_DIR + METRICS));
FileUtils.forceMkdir(new File(RESOURCE_DIR + SAMPLE_DIR));
FileUtils.forceMkdir(new File(RESOURCE_DIR + FINAL_DIR));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + STATE_STORE));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + WRITER_STAGING));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + WRITER_OUTPUT));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + TMP));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + METRICS));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + SAMPLE_DIR));
FileUtils.cleanDirectory(new File(RESOURCE_DIR + FINAL_DIR));
}
public static Properties getJobProperties(Properties jobProperties, String fileProperties)
throws IOException {
jobProperties.load(GobblinLocalJobLauncherUtils.class.getClassLoader().getResourceAsStream(fileProperties));
return jobProperties;
}
public static Properties getJobProperties(String fileProperties)
throws IOException {
return getJobProperties(new Properties(), fileProperties);
}
}
| 2,766 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TaskSkipErrRecordsIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.JobException;
@Test
public class TaskSkipErrRecordsIntegrationTest {
private static final String SAMPLE_FILE = "test.avro";
public static final String TASK_SKIP_ERROR_RECORDS = "task.skip.error.records";
public static final String ONE = "1";
public static final String ZERO = "0";
@BeforeTest
@AfterTest
public void cleanDir()
throws IOException {
GobblinLocalJobLauncherUtils.cleanDir();
}
/**
* Converter will throw DataConversionException while trying to convert
* first record. Since task.skip.error.records is set to 0, this job should fail.
*/
@Test(expectedExceptions = JobException.class)
public void skipZeroErrorRecordTest()
throws Exception {
Properties jobProperties = getProperties();
jobProperties.setProperty(TASK_SKIP_ERROR_RECORDS, ZERO);
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
/**
* Converter will throw DataConversionException while trying to convert
* first record. Since task.skip.error.records is set to 1, this job should succeed
*/
@Test
public void skipOneErrorRecordTest()
throws Exception {
Properties jobProperties = getProperties();
jobProperties.setProperty(TASK_SKIP_ERROR_RECORDS, ONE);
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
private Properties getProperties()
throws IOException {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/task_skip_err_records.properties");
URL resource = getClass().getClassLoader().getResource("runtime_test/" + SAMPLE_FILE);
Assert.assertNotNull(resource, "Sample file not found");
File sampleFile = new File(resource.getFile());
FileUtils.copyFile(sampleFile,
new File(GobblinLocalJobLauncherUtils.RESOURCE_DIR + GobblinLocalJobLauncherUtils.SAMPLE_DIR + SAMPLE_FILE));
jobProperties.setProperty(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL,
GobblinLocalJobLauncherUtils.RESOURCE_DIR + GobblinLocalJobLauncherUtils.SAMPLE_DIR + SAMPLE_FILE);
return jobProperties;
}
}
| 2,767 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TestAvroConverter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.converter.Converter;
import org.apache.gobblin.converter.DataConversionException;
import org.apache.gobblin.converter.SchemaConversionException;
import org.apache.gobblin.converter.SingleRecordIterable;
/**
* Test converter which throws DataConversionException while converting first record
*/
public class TestAvroConverter extends Converter<Schema, Schema, GenericRecord, GenericRecord> {
private long recordCount = 0;
@Override
public Schema convertSchema(Schema inputSchema, WorkUnitState workUnit)
throws SchemaConversionException {
return inputSchema;
}
@Override
public Iterable<GenericRecord> convertRecord(Schema outputSchema, GenericRecord inputRecord, WorkUnitState workUnit)
throws DataConversionException {
recordCount++;
if (recordCount == 1) {
throw new DataConversionException("Unable to convert record");
}
return new SingleRecordIterable<GenericRecord>(inputRecord);
}
}
| 2,768 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TaskErrorIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.jboss.byteman.contrib.bmunit.BMNGRunner;
import org.jboss.byteman.contrib.bmunit.BMRule;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.SourceState;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.instrumented.extractor.InstrumentedExtractor;
import org.apache.gobblin.publisher.DataPublisher;
import org.apache.gobblin.publisher.NoopPublisher;
import org.apache.gobblin.runtime.GobblinMultiTaskAttempt;
import org.apache.gobblin.runtime.JobState;
import org.apache.gobblin.runtime.TaskContext;
import org.apache.gobblin.runtime.task.BaseAbstractTask;
import org.apache.gobblin.runtime.task.TaskFactory;
import org.apache.gobblin.runtime.task.TaskIFace;
import org.apache.gobblin.runtime.task.TaskUtils;
import org.apache.gobblin.source.Source;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
import org.apache.gobblin.source.workunit.WorkUnit;
import org.apache.gobblin.util.retry.RetryerFactory;
import static org.apache.gobblin.util.retry.RetryerFactory.RETRY_TIMES;
import static org.apache.gobblin.util.retry.RetryerFactory.RETRY_TYPE;
@Test (singleThreaded = true)
public class TaskErrorIntegrationTest extends BMNGRunner {
private static String EXCEPTION_MESSAGE = "test exception";
@BeforeTest
@AfterTest
public void cleanDir()
throws IOException {
GobblinLocalJobLauncherUtils.cleanDir();
}
/**
* Test that an extractor that raises an error on creation results in a log message from {@link GobblinMultiTaskAttempt}
* and does not hang.
* @throws Exception
*/
@Test
public void extractorCreationError()
throws Exception {
TestAppender testAppender = new TestAppender();
Logger logger = LogManager.getLogger(GobblinMultiTaskAttempt.class.getName() + "-noattempt");
logger.addAppender(testAppender);
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, BaseTestSource.class.getName());
jobProperties.setProperty(TestExtractor.RAISE_ERROR, "true");
jobProperties.setProperty(RETRY_TYPE, RetryerFactory.RetryType.FIXED_ATTEMPT.name());
// Disable retry
jobProperties.setProperty(RETRY_TIMES, "1");
try {
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
} catch (Exception e){
// Expecting to get exception, do nothing
}
Assert.assertTrue(testAppender.events.stream().anyMatch(e -> e.getRenderedMessage()
.startsWith("Could not create task for workunit")));
logger.removeAppender(testAppender);
}
/**
* Test when extractor failure happens as above, retry kicked in and heal the extractor itself.
*/
@Test
public void extractorCreationErrorWithRetry() throws Exception {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, BaseTestSource.class.getName());
jobProperties.setProperty(TestExtractor.RAISE_ERROR, "true");
jobProperties.setProperty(RETRY_TYPE, RetryerFactory.RetryType.FIXED_ATTEMPT.name());
jobProperties.setProperty(TestExtractor.ENABLE_RETRY_FLIP, "true");
// Enable retry and should work for the second time.
jobProperties.setProperty(RETRY_TIMES, "2");
// Any failure should fail the test.
try {
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
} catch (Throwable t) {
Assert.fail();
}
Assert.assertTrue(true);
}
/**
* Test that a task submission error results in a log message from {@link GobblinMultiTaskAttempt}
* and does not hang
* @throws Exception
*/
@Test (enabled = false)
@BMRule(name = "testErrorDuringSubmission", targetClass = "org.apache.gobblin.runtime.TaskExecutor",
targetMethod = "submit(Task)", targetLocation = "AT ENTRY", condition = "true",
action = "throw new RuntimeException(\"Exception for testErrorDuringSubmission\")")
public void testErrorDuringSubmission()
throws Exception {
TestAppender testAppender = new TestAppender();
Logger logger = LogManager.getLogger(GobblinMultiTaskAttempt.class.getName() + "-noattempt");
logger.addAppender(testAppender);
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, BaseTestSource.class.getName());
jobProperties.setProperty(TestExtractor.RAISE_ERROR, "false");
try {
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
} catch (Exception e){
// Expect to get exception, do nothing
}
Assert.assertTrue(testAppender.events.stream().anyMatch(e -> e.getRenderedMessage()
.startsWith("Could not submit task for workunit")));
logger.removeAppender(testAppender);
}
@Test
public void testCustomizedTaskFrameworkFailureInTaskCreation() throws Exception {
TestAppender testAppender = new TestAppender();
Logger logger = LogManager.getLogger(GobblinMultiTaskAttempt.class.getName() + "-noattempt");
logger.addAppender(testAppender);
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/skip_workunits_test.properties");
jobProperties.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, CustomizedTaskTestSource.class.getName());
// To demonstrate failure caught in task creation in test setting, disabled retry in task creation.
jobProperties.setProperty(RETRY_TIMES, "1");
jobProperties.setProperty(RETRY_TYPE, RetryerFactory.RetryType.FIXED_ATTEMPT.name());
try {
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
} catch (Throwable t){
// Expect to get exception, do nothing
}
Assert.assertTrue(testAppender.events.stream().anyMatch(e -> e.getRenderedMessage().contains("Could not create task for workunit")));
logger.removeAppender(testAppender);
}
/**
* Test extractor that can be configured to raise an exception on construction,
* or heal it self after even times of retry (constructor-attempt)
*/
public static class TestExtractor<S, D> extends InstrumentedExtractor<S, D> {
private static final String RAISE_ERROR = "raiseError";
private static int RETRY_COUNT = 1;
private static final String ENABLE_RETRY_FLIP = "enableRetry";
public TestExtractor(WorkUnitState workUnitState) {
super(workUnitState);
try {
if (workUnitState.getPropAsBoolean(ENABLE_RETRY_FLIP, false) && RETRY_COUNT % 2 == 0) {
return;
}
if (workUnitState.getPropAsBoolean(RAISE_ERROR, false)) {
throw new RuntimeException(EXCEPTION_MESSAGE);
}
} finally {
// Need to make sure retryCount increment at the end of each constructor.
RETRY_COUNT += 1;
}
}
@Override
public S getSchema() throws IOException {
return null;
}
@Override
public long getExpectedRecordCount() {
return 0;
}
@Override
public long getHighWatermark() {
return 0;
}
@Override
public D readRecordImpl(D reuse) throws DataRecordException, IOException {
return null;
}
}
/**
* Testing task and factory implementation for Customized Task implementation.
*/
public static class TestCustomizedTask extends BaseAbstractTask {
public TestCustomizedTask(TaskContext taskContext) {
super(taskContext);
// trigger OutOfMemoryError on purpose during creation phase.
throw new OutOfMemoryError();
}
}
public static class TestTaskFactory implements TaskFactory {
@Override
public TaskIFace createTask(TaskContext taskContext) {
return new TestCustomizedTask(taskContext);
}
@Override
public DataPublisher createDataPublisher(JobState.DatasetState datasetState) {
return new NoopPublisher(datasetState);
}
}
public static class CustomizedTaskTestSource extends BaseTestSource {
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
WorkUnit workUnit = new WorkUnit();
TaskUtils.setTaskFactoryClass(workUnit, TestTaskFactory.class);
workUnit.addAll(state);
return Collections.singletonList(workUnit);
}
}
/**
* Test source that creates a {@link TestExtractor}
*/
public static class BaseTestSource implements Source<Schema, GenericRecord> {
@Override
public List<WorkUnit> getWorkunits(SourceState state) {
WorkUnit workUnit = WorkUnit.createEmpty();
workUnit.addAll(state);
return Collections.singletonList(workUnit);
}
@Override
public Extractor<Schema, GenericRecord> getExtractor(WorkUnitState state)
throws IOException {
return new TestExtractor(state);
}
@Override
public void shutdown(SourceState state) {
}
}
private class TestAppender extends AppenderSkeleton {
List<LoggingEvent> events = new ArrayList<LoggingEvent>();
public void close() {}
public boolean requiresLayout() {return false;}
@Override
protected void append(LoggingEvent event) {
events.add(event);
}
}
}
| 2,769 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/WriterOutputFormatIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Properties;
import org.apache.commons.io.FileUtils;
import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import com.google.common.io.Files;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.hive.HiveSerDeWrapper;
public class WriterOutputFormatIntegrationTest {
private static final String SAMPLE_FILE = "test.avro";
@BeforeTest
@AfterTest
public void cleanDir()
throws IOException {
GobblinLocalJobLauncherUtils.cleanDir();
}
//TODO: Disabling test until this issue is fixed -> https://issues.apache.org/jira/browse/GOBBLIN-1318
@Test( enabled=false )
public void parquetOutputFormatTest()
throws Exception {
Properties jobProperties = getProperties();
jobProperties.setProperty(HiveSerDeWrapper.SERDE_SERIALIZER_TYPE, "PARQUET");
jobProperties.setProperty(ConfigurationKeys.WRITER_OUTPUT_FORMAT_KEY, "PARQUET");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
@Test
public void orcOutputFormatTest()
throws Exception {
Properties jobProperties = getProperties();
jobProperties.setProperty(HiveSerDeWrapper.SERDE_SERIALIZER_TYPE, "ORC");
jobProperties.setProperty(ConfigurationKeys.WRITER_OUTPUT_FORMAT_KEY, "ORC");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
@Test
public void textfileOutputFormatTest()
throws Exception {
Properties jobProperties = getProperties();
jobProperties.setProperty(HiveSerDeWrapper.SERDE_SERIALIZER_TYPE, "TEXTFILE");
jobProperties.setProperty(ConfigurationKeys.WRITER_OUTPUT_FORMAT_KEY, "TEXTFILE");
GobblinLocalJobLauncherUtils.invokeLocalJobLauncher(jobProperties);
}
private Properties getProperties()
throws IOException {
Properties jobProperties =
GobblinLocalJobLauncherUtils.getJobProperties("runtime_test/writer_output_format_test.properties");
URL resource = getClass().getClassLoader().getResource("runtime_test/" + SAMPLE_FILE);
Assert.assertNotNull(resource, "Sample file should be present");
File sampleFile = new File(resource.getFile());
File testFile = File.createTempFile("writerTest", ".avro");
FileUtils.copyFile(sampleFile, testFile);
jobProperties.setProperty(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL,
testFile.getAbsolutePath());
String outputRootDirectory = Files.createTempDir().getAbsolutePath() + "/";
jobProperties.setProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, outputRootDirectory + "state_store");
jobProperties.setProperty(ConfigurationKeys.WRITER_STAGING_DIR, outputRootDirectory + "writer_staging");
jobProperties.setProperty(ConfigurationKeys.WRITER_OUTPUT_DIR, outputRootDirectory + "writer_output");
jobProperties.setProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR, outputRootDirectory + "final_dir");
return jobProperties;
}
}
| 2,770 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/TestAvroExtractor.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.FileReader;
import org.apache.avro.file.SeekableInput;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.mapred.FsInput;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.configuration.WorkUnitState;
import org.apache.gobblin.source.extractor.DataRecordException;
import org.apache.gobblin.source.extractor.Extractor;
public class TestAvroExtractor implements Extractor<Schema, GenericRecord> {
private WorkUnitState state;
private List<GenericRecord> recordList;
private Iterator<GenericRecord> recordIterator;
public TestAvroExtractor(WorkUnitState workUnitState)
throws IOException {
this.state = workUnitState;
this.recordList =getRecordFromFile(workUnitState.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL));
this.recordIterator = this.recordList.iterator();
}
public static List<GenericRecord> getRecordFromFile(String path)
throws IOException {
Configuration config = new Configuration();
SeekableInput input = new FsInput(new Path(path), config);
DatumReader<GenericRecord> reader1 = new GenericDatumReader<>();
FileReader<GenericRecord> fileReader = DataFileReader.openReader(input, reader1);
List<GenericRecord> records = new ArrayList<>();
for (GenericRecord datum : fileReader) {
records.add(datum);
}
fileReader.close();
return records;
}
@Override
public Schema getSchema()
throws IOException {
if (recordList == null) {
return null;
}
if (recordList.isEmpty()) {
return null;
}
return recordList.get(0).getSchema();
}
@Override
public GenericRecord readRecord(@Deprecated GenericRecord reuse)
throws DataRecordException, IOException {
if (this.recordIterator.hasNext()) {
return this.recordIterator.next();
} else {
return null;
}
}
@Override
public long getExpectedRecordCount() {
return recordList.size();
}
@Override
public long getHighWatermark() {
return recordList.size();
}
@Override
public void close()
throws IOException {
}
}
| 2,771 |
0 | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/test/integration/data | Create_ds/gobblin/gobblin-test-harness/src/test/java/org/apache/gobblin/test/integration/data/management/CopyIntegrationTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.integration.data.management;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.runtime.JobLauncher;
import org.apache.gobblin.runtime.JobLauncherFactory;
import java.io.FileReader;
import java.util.Properties;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
/**
* A class to test the copy job in standalone mode
*/
public class CopyIntegrationTest {
private Properties gobblinProps;
private Properties jobProps;
@BeforeClass
public void setup() throws Exception {
this.gobblinProps = new Properties();
gobblinProps.load(new FileReader("gobblin-test-harness/resource/dataManagement/copy/job-props/copy.properties"));
this.jobProps = new Properties();
jobProps.load(new FileReader("gobblin-test-harness/resource/dataManagement/copy/job-props/copy.pull"));
}
@Test
public void testTarGzCopy() throws Exception {
Closer closer = Closer.create();
try {
JobLauncher jobLauncher = closer.register(JobLauncherFactory.newJobLauncher(gobblinProps, jobProps));
jobLauncher.launchJob(null);
String file1Path =
gobblinProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR) + "/LogData/sub1/sub2/text1.txt";
String file2Path =
gobblinProps.getProperty(ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR) + "/LogData/sub1/sub2/text2.txt";
FileSystem fs = FileSystem.getLocal(new Configuration());
Assert.assertEquals(IOUtils.toString(closer.register(fs.open(new Path(file1Path)))), "text1");
Assert.assertEquals(IOUtils.toString(closer.register(fs.open(new Path(file2Path)))), "text2");
} finally {
closer.close();
}
}
@AfterClass
@BeforeClass
public void cleanup() throws Exception {
FileSystem fs = FileSystem.getLocal(new Configuration());
fs.delete(new Path("gobblin-test-harness/testOutput"), true);
}
}
| 2,772 |
0 | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup/config/ConfigStepsGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.setup.config;
import java.util.Collection;
/**
* An interface for generating the steps associated to the config. The steps may include
* copy of data or validating of the test.
*
* @author sveerama
*
*/
public interface ConfigStepsGenerator {
/**
* This method will generate the list of execution steps associated to the config,
* @return List of config steps in {@link Step}
*/
public Collection<Step> generateExecutionSteps();
}
| 2,773 |
0 | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup/config/TestHarnessLauncher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.setup.config;
/**
* An interface for parsing the config data for test harness.
* This interface is allows to parse and validate the config setting
*
* @author sveerama
*
*/
public interface TestHarnessLauncher extends ConfigStepsGenerator {
/**
* This method will launch the test for config entry after validation
* @throws Exception
*/
public void launchTest() throws Exception;
}
| 2,774 |
0 | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/setup/config/Step.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.setup.config;
/**
* This interface is to define individual steps associated to a config entry, this implements the Operator interface.
* Essentially one step can have many operators and each operator has an execution
*
* @author sveerama
*
*/
public interface Step {
/**
* This method will execute the current step which in turn will execute list of operators
*
*/
public boolean execute() throws Exception;
/**
* Get the name of the step in the series of steps for information
* @return step name
*/
public String getStepName();
}
| 2,775 |
0 | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/execution | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/execution/validator/ValidationRule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.execution.validator;
/**
* An interface is for defining the validator for the test. The input can be the set of input files and the output will be be output of
*
* @author sveerama
*
*/
public interface ValidationRule {
/**
*
* @return will be a boolean based on the validation process
*/
public boolean isValid();
}
| 2,776 |
0 | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/execution | Create_ds/gobblin/gobblin-test-harness/src/main/java/org/apache/gobblin/test/execution/operator/SetupOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.test.execution.operator;
/**
* An interface for defining the operator , the operator could be a copy of file or converting a file from one format to another. These operators are used for the setup phase of the test
*
* @author sveerama
*/
public interface SetupOperator {
/**
* This method is invoked to execute an operator. The operator will have an associated execution process.
* @return the success of execution for the operator
*/
public boolean executeOperator() throws Exception;
}
| 2,777 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/TalkPastServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import org.apache.commons.lang.StringUtils;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.Socket;
import java.security.MessageDigest;
import java.util.Map;
/**
* A TCP server that simultaneously reads and writes from a socket. This is to test the Tunnel's buffer management and
* ensure client and server responses don't get mixed up. This server also keeps a MD5 hash of all messages received per
* client so that tests can verify the server received what the client transmitted.
*
* @author kkandekar@linkedin.com
*/
class TalkPastServer extends MockServer {
private final int nMsgs;
private final Map<String, MessageDigest> digestMsgsRecvdAtServer;
public TalkPastServer(int nMsgs, Map<String, MessageDigest> digestMsgsRecvdAtServer) {
this.nMsgs = nMsgs;
this.digestMsgsRecvdAtServer = digestMsgsRecvdAtServer;
}
static String generateMsgFromServer(int i) {
return i + " " + StringUtils.repeat("Babble babble ", 10000) + "\n";
}
@Override
void handleClientSocket(Socket clientSocket) throws IOException {
LOG.info("Writing to client");
try {
final BufferedOutputStream serverOut = new BufferedOutputStream(clientSocket.getOutputStream());
EasyThread clientWriterThread = new EasyThread() {
@Override
void runQuietly() throws Exception {
long t = System.currentTimeMillis();
try {
for (int i = 0; i < nMsgs; i++) {
serverOut.write(generateMsgFromServer(i).getBytes());
sleepQuietly(2);
}
} catch (IOException e) {
e.printStackTrace();
}
LOG.info("Server done writing in " + (System.currentTimeMillis() - t) + " ms");
}
}.startThread();
_threads.add(clientWriterThread);
BufferedReader serverIn = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
String line = serverIn.readLine();
while (line != null && !line.equals("Goodbye")) {
String[] tokens = line.split(":", 2);
String client = tokens[0];
digestMsgsRecvdAtServer.get(client).update(line.getBytes());
digestMsgsRecvdAtServer.get(client).update("\n".getBytes());
line = serverIn.readLine();
}
LOG.info("Server done reading");
try {
clientWriterThread.join();
} catch (InterruptedException e) {
}
serverOut.write("Goodbye\n".getBytes());
serverOut.flush();
clientSocket.close();
} catch (IOException e) {
e.printStackTrace();
throw e;
}
}
}
| 2,778 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/DoubleEchoServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
/**
* A TCP server that reads a line from a socket, trims it, and echoes it back twice on the same line.
*
* @author kkandekar@linkedin.com
*/
class DoubleEchoServer extends MockServer {
private final long delay;
public DoubleEchoServer(long delay) {
this.delay = delay;
}
@Override
void handleClientSocket(Socket clientSocket) throws IOException {
try {
BufferedReader in = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
PrintWriter out = new PrintWriter(clientSocket.getOutputStream());
String line = in.readLine();
while (line != null && isServerRunning()) {
if (this.delay > 0) {
sleepQuietly(this.delay);
}
out.println(line + " " + line);
out.flush();
line = in.readLine();
}
} catch (IOException ignored) {
// This gets thrown when the proxy abruptly closes a connection
} finally {
clientSocket.close();
}
}
}
| 2,779 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/EasyThread.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A convenient, trackable, easy-to-cleanup wrapper around threads.
*
* @author kkandekar@linkedin.com
*/
abstract class EasyThread extends Thread {
protected static final Logger LOG = LoggerFactory.getLogger(EasyThread.class);
final static Set<EasyThread> ALL_THREADS = Collections.synchronizedSet(new HashSet<EasyThread>());
EasyThread startThread() {
setDaemon(true);
start();
ALL_THREADS.add(this);
return this;
}
@Override
public void run() {
try {
runQuietly();
} catch (Exception e) {
LOG.info("Exception in EasyThread#run", e);
}
}
abstract void runQuietly() throws Exception;
}
| 2,780 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/ConnectProxyServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import org.apache.commons.io.IOUtils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Due to the lack of a suitable embeddable proxy server (the Jetty version here is too old and MockServer's Proxy
* expects SSL traffic and breaks for arbitrary bytes) we had to write our own mini CONNECT proxy. This simply gets
* an HTTP CONNECT request over a socket, opens another socket to the specified remote server and relays bytes between
* the two connections.
*
* @author kkandekar@linkedin.com
*/
class ConnectProxyServer extends MockServer {
private final boolean mixServerAndProxyResponse;
private final boolean largeResponse;
Pattern hostPortPattern;
int nBytesToCloseSocketAfter;
/**
* @param mixServerAndProxyResponse Force proxy to send 200 OK and server response in single write such that both
* responses reach the tunnel in the same read. This can happen for a multitude of
* reasons, e.g. the proxy GC's, or the network hiccups, or the tunnel GC's.
* @param largeResponse Force proxy to send a large response
* @param nBytesToCloseSocketAfter
*/
public ConnectProxyServer(boolean mixServerAndProxyResponse, boolean largeResponse, int nBytesToCloseSocketAfter) {
this.mixServerAndProxyResponse = mixServerAndProxyResponse;
this.largeResponse = largeResponse;
this.nBytesToCloseSocketAfter = nBytesToCloseSocketAfter;
hostPortPattern = Pattern.compile("Host: (.*):([0-9]+)");
}
@Override
void handleClientSocket(Socket clientSocket) throws IOException {
final InputStream clientToProxyIn = clientSocket.getInputStream();
BufferedReader clientToProxyReader = new BufferedReader(new InputStreamReader(clientToProxyIn));
final OutputStream clientToProxyOut = clientSocket.getOutputStream();
String line = clientToProxyReader.readLine();
String connectRequest = "";
while (line != null && isServerRunning()) {
connectRequest += line + "\r\n";
if (connectRequest.endsWith("\r\n\r\n")) {
break;
}
line = clientToProxyReader.readLine();
}
// connect to given host:port
Matcher matcher = hostPortPattern.matcher(connectRequest);
if (!matcher.find()) {
try {
sendConnectResponse("400 Bad Request", clientToProxyOut, null, 0);
} finally {
clientSocket.close();
stopServer();
}
return;
}
String host = matcher.group(1);
int port = Integer.decode(matcher.group(2));
// connect to server
Socket serverSocket = new Socket();
try {
serverSocket.connect(new InetSocketAddress(host, port));
addSocket(serverSocket);
byte [] initialServerResponse = null;
int nbytes = 0;
if (mixServerAndProxyResponse) {
// we want to mix the initial server response with the 200 OK
initialServerResponse = new byte[64];
nbytes = serverSocket.getInputStream().read(initialServerResponse);
}
sendConnectResponse("200 OK", clientToProxyOut, initialServerResponse, nbytes);
} catch (IOException e) {
try {
sendConnectResponse("404 Not Found", clientToProxyOut, null, 0);
} finally {
clientSocket.close();
stopServer();
}
return;
}
final InputStream proxyToServerIn = serverSocket.getInputStream();
final OutputStream proxyToServerOut = serverSocket.getOutputStream();
_threads.add(new EasyThread() {
@Override
void runQuietly() throws Exception {
try {
IOUtils.copy(clientToProxyIn, proxyToServerOut);
} catch (IOException e) {
LOG.warn("Exception " + e.getMessage() + " on " + getServerSocketPort());
}
}
}.startThread());
try {
if (nBytesToCloseSocketAfter > 0) {
// Simulate proxy abruptly closing connection
int leftToRead = nBytesToCloseSocketAfter;
byte [] buffer = new byte[leftToRead+ 256];
while (true) {
int numRead = proxyToServerIn.read(buffer, 0, leftToRead);
if (numRead < 0) {
break;
}
clientToProxyOut.write(buffer, 0, numRead);
clientToProxyOut.flush();
leftToRead -= numRead;
if (leftToRead <= 0) {
LOG.warn("Cutting connection after " + nBytesToCloseSocketAfter + " bytes");
break;
}
}
} else {
IOUtils.copy(proxyToServerIn, clientToProxyOut);
}
} catch (IOException e) {
LOG.warn("Exception " + e.getMessage() + " on " + getServerSocketPort());
}
clientSocket.close();
serverSocket.close();
}
private void sendConnectResponse(String statusMessage, OutputStream out,
byte[] initialServerResponse, int initialServerResponseSize)
throws IOException {
String extraHeader = "";
if (largeResponse) {
// this is to force multiple reads while draining the proxy CONNECT response in Tunnel. Normal proxy responses
// won't be this big (well, unless you annoy squid proxy, which happens sometimes), but a select() call
// waking up for multiple reads before a buffer is full is normal
for (int i = 0; i < 260; i++) {
extraHeader += "a";
}
}
byte [] httpResponse = ("HTTP/1.1 " + statusMessage + "\r\nContent-Length: 0\r\nServer: MockProxy" + extraHeader + "\r\n\r\n").getBytes();
if (initialServerResponse != null) {
byte [] mixedResponse = new byte[httpResponse.length + initialServerResponseSize];
System.arraycopy(httpResponse, 0, mixedResponse, 0, httpResponse.length);
System.arraycopy(initialServerResponse, 0, mixedResponse, httpResponse.length, initialServerResponseSize);
out.write(mixedResponse);
} else {
out.write(httpResponse);
}
out.flush();
}
}
| 2,781 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/TalkFirstDoubleEchoServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.Socket;
import java.net.SocketException;
/**
* A double-echo TCP server that writes to a socket as soon as it accepts one. This is to simulate the behavior of
* certain protocols like MySql.
*
* @author kkandekar@linkedin.com
*/
class TalkFirstDoubleEchoServer extends MockServer {
@Override
void handleClientSocket(Socket clientSocket) throws IOException {
LOG.info("Writing to client");
BufferedReader in = new BufferedReader(new InputStreamReader(clientSocket.getInputStream()));
PrintWriter out = new PrintWriter(clientSocket.getOutputStream());
try {
out.println("Hello");
out.flush();
String line = in.readLine();
while (line != null && isServerRunning()) {
out.println(line + " " + line);
out.flush();
line = in.readLine();
}
}
catch (SocketException se) {
// don't bring down server when client disconnected abruptly
if (!se.getMessage().contains("Connection reset")) {
throw se;
}
}
finally {
clientSocket.close();
}
}
}
| 2,782 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/MockServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
/**
* A convenient-to-use TCP server class to implement mock test servers.
*
* @author kkandekar@linkedin.com
*/
abstract class MockServer {
protected static final Logger LOG = LoggerFactory.getLogger(MockServer.class);
volatile boolean _serverRunning = true;
ServerSocket _server;
Set<EasyThread> _threads = Collections.synchronizedSet(new HashSet<EasyThread>());
List<Socket> _sockets = new Vector<Socket>();
int _serverSocketPort;
int numConnects = 0; //only counted for proxy connects
static void sleepQuietly(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
}
}
public MockServer start() throws IOException {
_server = new ServerSocket();
_server.setSoTimeout(5000);
_server.bind(new InetSocketAddress("localhost", 0));
_serverSocketPort = _server.getLocalPort();
_threads.add(new EasyThread() {
@Override
void runQuietly() throws Exception {
runServer();
}
}.startThread());
return this;
}
// accept thread
public void runServer() {
while (_serverRunning) {
try {
final Socket clientSocket = _server.accept();
numConnects++;
LOG.info("Accepted connection on " + getServerSocketPort());
// client handler thread
_threads.add(new EasyThread() {
@Override
void runQuietly() throws Exception {
try {
addSocket(clientSocket);
handleClientSocket(clientSocket);
} catch (IOException e) {
LOG.info("Exception in handleClientSocket, stopping server", e);
stopServer();
}
}
}.startThread());
} catch (IOException e) {
LOG.info("Exception in runServer", e);
}
}
try {
_server.close();
} catch (IOException e) {
LOG.info("Exception while closing server", e);
}
}
int getNumConnects() {
return numConnects;
}
boolean isServerRunning() {
return _serverRunning;
}
int getServerSocketPort() {
return _serverSocketPort;
}
// need to keep track of socket because interrupting thread is not working
void addSocket(Socket socket) {
_sockets.add(socket);
}
abstract void handleClientSocket(Socket socket) throws IOException;
public void stopServer() {
_serverRunning = false;
IOUtils.closeQuietly(_server);
for (Socket socket : _sockets) {
IOUtils.closeQuietly(socket);
}
for (EasyThread thread : _threads) {
thread.interrupt();
}
}
}
| 2,783 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/TunnelTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.IOUtils;
import org.mockserver.integration.ClientAndServer;
import org.mockserver.model.HttpForward;
import org.mockserver.model.HttpRequest;
import org.mockserver.model.HttpResponse;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
/**
* Tests for HTTP traffic through Tunnel via an HTTP proxy.
*
* @author navteniev@linkedin.com
*/
@Test(enabled=false, groups = { "gobblin.tunnel", "disabledOnCI" })
public class TunnelTest {
private ClientAndServer mockServer;
int PORT = 0;
@BeforeClass
public void startProxy()
throws IOException {
mockServer = ClientAndServer.startClientAndServer(0);
PORT = mockServer.getPort();
}
@AfterClass
public void stopProxy() {
mockServer.stop();
}
@AfterMethod
public void reset(){
mockServer.reset();
}
@Test (enabled=false)
public void mustBuildTunnelAndStartAcceptingConnections()
throws Exception {
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
try {
int tunnelPort = tunnel.getPort();
assertTrue(tunnelPort > 0);
} finally {
tunnel.close();
}
}
@Test (enabled=false)
public void mustHandleClientDisconnectingWithoutClosingTunnel()
throws Exception {
mockExample();
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
client.write(ByteBuffer.wrap("GET / HTTP/1.1%nUser-Agent: GobblinTunnel%nConnection:keep - alive %n%n".getBytes()));
client.close();
assertNotNull(fetchContent(tunnelPort));
} finally {
tunnel.close();
}
}
@Test (enabled=false)
public void mustHandleConnectionToExternalResource()
throws Exception {
mockExample();
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
try {
String content = fetchContent(tunnel.getPort());
assertNotNull(content);
} finally {
tunnel.close();
}
}
@Test (enabled=false)
public void mustHandleMultipleConnections()
throws Exception {
mockExample();
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
int clients = 5;
final CountDownLatch startSignal = new CountDownLatch(1);
final CountDownLatch doneSignal = new CountDownLatch(clients);
ExecutorService executor = Executors.newFixedThreadPool(clients);
try {
final int tunnelPort = tunnel.getPort();
List<Future<String>> results = new ArrayList<Future<String>>();
for (int i = 0; i < clients; i++) {
Future<String> result = executor.submit(new Callable<String>() {
@Override
public String call()
throws Exception {
startSignal.await();
try {
return fetchContent(tunnelPort);
} finally {
doneSignal.countDown();
}
}
});
results.add(result);
}
startSignal.countDown();
doneSignal.await();
for (Future<String> result : results) {
assertNotNull(result.get());
}
} finally {
tunnel.close();
}
}
@Test(enabled=false, expectedExceptions = SocketException.class)
public void mustRefuseConnectionWhenProxyIsUnreachable()
throws Exception {
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", 1);
try {
int tunnelPort = tunnel.getPort();
fetchContent(tunnelPort);
} finally {
tunnel.close();
}
}
@Test(enabled=false, expectedExceptions = SocketException.class)
public void mustRefuseConnectionWhenProxyRefuses() throws Exception{
mockServer.when(HttpRequest.request().withMethod("CONNECT").withPath("www.us.apache.org:80"))
.respond(HttpResponse.response().withStatusCode(403));
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
try {
int tunnelPort = tunnel.getPort();
fetchContent(tunnelPort);
} finally {
tunnel.close();
}
}
@Test(enabled=false, expectedExceptions = SocketException.class)
public void mustRefuseConnectionWhenProxyTimesOut() throws Exception{
mockServer.when(HttpRequest.request().withMethod("CONNECT").withPath("www.us.apache.org:80"))
.respond(HttpResponse.response().withDelay(TimeUnit.SECONDS,2).withStatusCode(200));
Tunnel tunnel = Tunnel.build("example.org", 80, "localhost", PORT);
try {
int tunnelPort = tunnel.getPort();
fetchContent(tunnelPort);
} finally {
tunnel.close();
}
}
@Test(enabled = false)
public void mustDownloadLargeFiles()
throws Exception {
mockServer.when(HttpRequest.request().withMethod("CONNECT").withPath("www.us.apache.org:80"))
.respond(HttpResponse.response().withStatusCode(200));
mockServer.when(HttpRequest.request().withMethod("GET")
.withPath("/dist//httpcomponents/httpclient/binary/httpcomponents-client-4.5.1-bin.tar.gz"))
.forward(HttpForward.forward().withHost("www.us.apache.org").withPort(80));
Tunnel tunnel = Tunnel.build("www.us.apache.org", 80, "localhost", PORT);
try {
IOUtils.copyLarge((InputStream) new URL("http://localhost:" + tunnel.getPort()
+ "/dist//httpcomponents/httpclient/binary/httpcomponents-client-4.5.1-bin.tar.gz")
.getContent(new Class[]{InputStream.class}),
new FileOutputStream(File.createTempFile("httpcomponents-client-4.5.1-bin", "tar.gz")));
} finally {
tunnel.close();
}
}
private String fetchContent(int tunnelPort)
throws IOException {
InputStream content = (InputStream) new URL(String.format("http://localhost:%s/", tunnelPort)).openConnection()
.getContent(new Class[]{InputStream.class});
return IOUtils.toString(content);
}
private void mockExample()
throws IOException {
mockServer.when(HttpRequest.request().withMethod("CONNECT").withPath("example.org:80"))
.respond(HttpResponse.response().withStatusCode(200));
mockServer.when(HttpRequest.request().withMethod("GET").withPath("/"))
.respond(HttpResponse.response(IOUtils.toString(getClass().getResourceAsStream("/example.org.html"))));
}
}
| 2,784 |
0 | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/test/java/org/apache/gobblin/tunnel/TestTunnelWithArbitraryTCPTraffic.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertTrue;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
/**
* Tests for Tunnel with arbitrary TCP traffic.
*
* @author kkandekar@linkedin.com
*/
@Test(enabled=false, singleThreaded = true, groups = { "gobblin.tunnel", "disabledOnCI" })
public class TestTunnelWithArbitraryTCPTraffic {
private static final Logger LOG = LoggerFactory.getLogger(TestTunnelWithArbitraryTCPTraffic.class);
MockServer doubleEchoServer;
MockServer delayedDoubleEchoServer;
MockServer talkFirstEchoServer;
@BeforeClass
void setUp() throws IOException {
doubleEchoServer = startDoubleEchoServer();
LOG.info("Double Echo Server on " + doubleEchoServer.getServerSocketPort());
delayedDoubleEchoServer = startDoubleEchoServer(1000);
LOG.info("Delayed DoubleEchoServer on " + delayedDoubleEchoServer.getServerSocketPort());
talkFirstEchoServer = startTalkFirstEchoServer();
LOG.info("TalkFirstEchoServer on " + talkFirstEchoServer.getServerSocketPort());
}
@AfterClass
void cleanUp() {
doubleEchoServer.stopServer();
delayedDoubleEchoServer.stopServer();
talkFirstEchoServer.stopServer();
MockServer.sleepQuietly(100);
int nAlive = 0;
for (EasyThread t : EasyThread.ALL_THREADS) {
if (t.isAlive()) {
LOG.warn(t + " IS ALIVE!");
nAlive++;
}
}
LOG.warn("Threads left alive " + nAlive);
}
private MockServer startConnectProxyServer(final boolean largeResponse,
final boolean mixServerAndProxyResponse,
final int nBytesToCloseSocketAfter) throws IOException {
return new ConnectProxyServer(mixServerAndProxyResponse, largeResponse, nBytesToCloseSocketAfter).start();
}
private MockServer startConnectProxyServer(final boolean largeResponse,
final boolean mixServerAndProxyResponse) throws IOException {
return startConnectProxyServer(largeResponse, mixServerAndProxyResponse, -1);
}
private MockServer startConnectProxyServer() throws IOException {
return startConnectProxyServer(false, false);
}
private MockServer startDoubleEchoServer() throws IOException {
return startDoubleEchoServer(0);
}
private MockServer startDoubleEchoServer(final long delay) throws IOException {
return new DoubleEchoServer(delay).start();
}
private static String readFromSocket(SocketChannel client) throws IOException {
ByteBuffer readBuf = ByteBuffer.allocate(256);
LOG.info("Reading from client");
client.read(readBuf);
readBuf.flip();
return StandardCharsets.US_ASCII.decode(readBuf).toString();
}
private static void writeToSocket(SocketChannel client, byte [] bytes) throws IOException {
client.write(ByteBuffer.wrap(bytes));
client.socket().getOutputStream().flush();
}
// Baseline test to ensure clients work without tunnel
@Test(enabled=false, timeOut = 15000)
public void testDirectConnectionToEchoServer() throws IOException {
SocketChannel client = SocketChannel.open();
try {
client.connect(new InetSocketAddress("localhost", doubleEchoServer.getServerSocketPort()));
writeToSocket(client, "Knock\n".getBytes());
String response = readFromSocket(client);
client.close();
assertEquals(response, "Knock Knock\n");
} finally {
client.close();
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServer() throws IOException {
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("localhost", doubleEchoServer.getServerSocketPort(), "localhost",
proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
String response = readFromSocket(client);
client.close();
assertEquals(response, "Knock Knock\n");
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToDelayedEchoServer() throws IOException {
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("localhost", delayedDoubleEchoServer.getServerSocketPort(), "localhost",
proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
String response = readFromSocket(client);
client.close();
assertEquals(response, "Knock Knock\n");
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServerMultiRequest() throws IOException {
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("localhost", doubleEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
String response1 = readFromSocket(client);
client.write(ByteBuffer.wrap("Hello\n".getBytes()));
String response2 = readFromSocket(client);
client.close();
assertEquals(response1, "Knock Knock\n");
assertEquals(response2, "Hello Hello\n");
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
private MockServer startTalkFirstEchoServer() throws IOException {
return new TalkFirstDoubleEchoServer().start();
}
private void runClientToTalkFirstServer(int tunnelPort) throws IOException {
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
String response0 = readFromSocket(client);
LOG.info(response0);
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
String response1 = readFromSocket(client);
LOG.info(response1);
client.write(ByteBuffer.wrap("Hello\n".getBytes()));
String response2 = readFromSocket(client);
LOG.info(response2);
client.close();
assertEquals(response0, "Hello\n");
assertEquals(response1, "Knock Knock\n");
assertEquals(response2, "Hello Hello\n");
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServerThatRespondsFirst() throws IOException {
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("localhost", talkFirstEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
runClientToTalkFirstServer(tunnelPort);
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServerThatRespondsFirstWithMixedProxyAndServerResponseInBuffer() throws IOException {
MockServer proxyServer = startConnectProxyServer(false, true);
Tunnel tunnel = Tunnel.build("localhost", talkFirstEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
runClientToTalkFirstServer(tunnelPort);
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServerThatRespondsFirstAcrossMultipleDrainReads() throws IOException {
MockServer proxyServer = startConnectProxyServer(true, true);
Tunnel tunnel = Tunnel.build("localhost", talkFirstEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
runClientToTalkFirstServer(tunnelPort);
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelToEchoServerThatRespondsFirstAcrossMultipleDrainReadsWithMultipleClients()
throws IOException, InterruptedException {
MockServer proxyServer = startConnectProxyServer(true, true);
Tunnel tunnel = Tunnel.build("localhost", talkFirstEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
final int tunnelPort = tunnel.getPort();
List<EasyThread> threads = new ArrayList<EasyThread>();
for (int i = 0; i < 5; i++) {
threads.add(new EasyThread() {
@Override
void runQuietly() throws Exception {
try {
runClientToTalkFirstServer(tunnelPort);
} catch (IOException e) {
e.printStackTrace();
}
}
}.startThread());
}
for (Thread t : threads) {
t.join();
}
assertEquals(proxyServer.getNumConnects(), 5);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
private void runSimultaneousDataExchange(boolean useTunnel, int nclients)
throws IOException, InterruptedException, NoSuchAlgorithmException {
long t0 = System.currentTimeMillis();
final int nMsgs = 50;
final Map<String, MessageDigest> digestMsgsRecvdAtServer = new HashMap<String, MessageDigest>();
final Map<String, MessageDigest> digestMsgsSentByClients = new HashMap<String, MessageDigest>();
final Map<String, MessageDigest> digestMsgsRecvdAtClients = new HashMap<String, MessageDigest>();
for (int c = 0; c < nclients ; c++) {
digestMsgsRecvdAtServer.put(Integer.toString(c), MessageDigest.getInstance("MD5"));
digestMsgsSentByClients.put(Integer.toString(c), MessageDigest.getInstance("MD5"));
digestMsgsRecvdAtClients.put(Integer.toString(c), MessageDigest.getInstance("MD5"));
}
final MessageDigest digestMsgsSentByServer = MessageDigest.getInstance("MD5");
for (int i = 0; i < nMsgs; i++) {
digestMsgsSentByServer.update(TalkPastServer.generateMsgFromServer(i).getBytes());
}
String hashOfMsgsSentByServer = Hex.encodeHexString(digestMsgsSentByServer.digest());
MockServer talkPastServer = startTalkPastServer(nMsgs, digestMsgsRecvdAtServer);
int targetPort = talkPastServer.getServerSocketPort();
Tunnel tunnel = null;
MockServer proxyServer = null;
if (useTunnel) {
proxyServer = startConnectProxyServer();
tunnel = Tunnel.build("localhost", talkPastServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
targetPort = tunnel.getPort();
}
try {
List<EasyThread> clientThreads = new ArrayList<EasyThread>();
final int portToUse = targetPort;
for (int c = 0; c < nclients; c++) {
final int clientId = c;
clientThreads.add(new EasyThread() {
@Override
void runQuietly() throws Exception {
long t = System.currentTimeMillis();
LOG.info("\t" + clientId + ": Client starting");
final MessageDigest digestMsgsRecvdAtClient = digestMsgsRecvdAtClients.get(Integer.toString(clientId));
//final SocketChannel client = SocketChannel.open(); // tunnel test hangs for some reason with SocketChannel
final Socket client = new Socket();
client.connect(new InetSocketAddress("localhost", portToUse));
EasyThread serverReaderThread = new EasyThread() {
@Override
public void runQuietly() {
try {
BufferedReader clientIn = new BufferedReader(new InputStreamReader(client.getInputStream()));
String line = clientIn.readLine();
while (line != null && !line.equals("Goodbye")) {
//LOG.info("\t" + clientId + ": Server said [" + line.substring(0, 32) + "... ]");
digestMsgsRecvdAtClient.update(line.getBytes());
digestMsgsRecvdAtClient.update("\n".getBytes());
line = clientIn.readLine();
}
} catch (IOException e) {
e.printStackTrace();
}
LOG.info("\t" + clientId + ": Client done reading");
}
}.startThread();
MessageDigest hashMsgsFromClient = digestMsgsSentByClients.get(Integer.toString(clientId));
BufferedOutputStream clientOut = new BufferedOutputStream(client.getOutputStream());
for (int i = 0; i < nMsgs; i++) {
String msg = clientId + ":" + i + " " + StringUtils.repeat("Blahhh Blahhh ", 10000) +"\n";
//LOG.info(clientId + " sending " + msg.length() + " bytes");
byte [] bytes = msg.getBytes();
hashMsgsFromClient.update(bytes);
clientOut.write(bytes);
MockServer.sleepQuietly(2);
}
clientOut.write(("Goodbye\n".getBytes()));
clientOut.flush();
LOG.info("\t" + clientId + ": Client done writing in " + (System.currentTimeMillis() - t) + " ms");
serverReaderThread.join();
LOG.info("\t" + clientId + ": Client done in " + (System.currentTimeMillis() - t) + " ms");
client.close();
}
}.startThread());
}
for (Thread clientThread : clientThreads) {
clientThread.join();
}
LOG.info("All data transfer done in " + (System.currentTimeMillis() - t0) + " ms");
} finally {
talkPastServer.stopServer();
if (tunnel != null) {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
assertEquals(proxyServer.getNumConnects(), nclients);
}
Map<String, String> hashOfMsgsRecvdAtServer = new HashMap<String, String>();
Map<String, String> hashOfMsgsSentByClients = new HashMap<String, String>();
Map<String, String> hashOfMsgsRecvdAtClients = new HashMap<String, String>();
for (int c = 0; c < nclients; c++) {
String client = Integer.toString(c);
hashOfMsgsRecvdAtServer.put(client, Hex.encodeHexString(digestMsgsRecvdAtServer.get(client).digest()));
hashOfMsgsSentByClients.put(client, Hex.encodeHexString(digestMsgsSentByClients.get(client).digest()));
hashOfMsgsRecvdAtClients.put(client, Hex.encodeHexString(digestMsgsRecvdAtClients.get(client).digest()));
}
LOG.info("\tComparing client sent to server received");
assertEquals(hashOfMsgsSentByClients, hashOfMsgsRecvdAtServer);
LOG.info("\tComparing server sent to client received");
for (String hashOfMsgsRecvdAtClient : hashOfMsgsRecvdAtClients.values()) {
assertEquals(hashOfMsgsSentByServer, hashOfMsgsRecvdAtClient);
}
LOG.info("\tDone");
}
}
private MockServer startTalkPastServer(final int nMsgs, final Map<String, MessageDigest> digestMsgsRecvdAtServer) throws IOException {
return new TalkPastServer(nMsgs, digestMsgsRecvdAtServer).start();
}
// Baseline tests to ensure simultaneous data transfer protocol is fine (disabled for now)
@Test(enabled = false, timeOut = 15000)
public void testSimultaneousDataExchangeWithDirectConnection()
throws IOException, InterruptedException, NoSuchAlgorithmException {
runSimultaneousDataExchange(false, 1);
}
@Test(enabled = false, timeOut = 15000)
public void testSimultaneousDataExchangeWithDirectConnectionAndMultipleClients()
throws IOException, InterruptedException, NoSuchAlgorithmException {
runSimultaneousDataExchange(false, 3);
}
/*
I wrote this test because I saw this symptom once randomly while testing with Gobblin. Test passes, but occasionally
we see the following warning in the logs:
15/10/29 21:11:17 WARN tunnel.Tunnel: exception handling event on java.nio.channels.SocketChannel[connected local=/127.0.0.1:34669 remote=/127.0.0.1:38578]
java.nio.channels.CancelledKeyException
at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:73)
at sun.nio.ch.SelectionKeyImpl.readyOps(SelectionKeyImpl.java:87)
at java.nio.channels.SelectionKey.isWritable(SelectionKey.java:312)
at gobblin.tunnel.Tunnel$ReadWriteHandler.write(Tunnel.java:423)
at gobblin.tunnel.Tunnel$ReadWriteHandler.call(Tunnel.java:403)
at gobblin.tunnel.Tunnel$ReadWriteHandler.call(Tunnel.java:365)
at gobblin.tunnel.Tunnel$Dispatcher.dispatch(Tunnel.java:142)
at gobblin.tunnel.Tunnel$Dispatcher.run(Tunnel.java:127)
at java.lang.Thread.run(Thread.java:745)
*/
@Test(enabled=false, timeOut = 20000)
public void testSimultaneousDataExchangeWithTunnel()
throws IOException, InterruptedException, NoSuchAlgorithmException {
runSimultaneousDataExchange(true, 1);
}
@Test(enabled=false, timeOut = 20000)
public void testSimultaneousDataExchangeWithTunnelAndMultipleClients()
throws IOException, InterruptedException, NoSuchAlgorithmException {
runSimultaneousDataExchange(true, 3);
}
@Test(enabled=false, expectedExceptions = IOException.class)
public void testTunnelWhereProxyConnectionToServerFailsWithWriteFirstClient() throws IOException, InterruptedException {
MockServer proxyServer = startConnectProxyServer();
final int nonExistentPort = 54321; // hope this doesn't exist!
Tunnel tunnel = Tunnel.build("localhost", nonExistentPort, "localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.configureBlocking(true);
client.connect(new InetSocketAddress("localhost", tunnelPort));
// Might have to write multiple times before connection error propagates back from proxy through tunnel to client
for (int i = 0; i < 5; i++) {
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
Thread.sleep(100);
}
String response1 = readFromSocket(client);
LOG.info(response1);
client.close();
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
assertEquals(proxyServer.getNumConnects(), 1);
}
}
@Test(enabled=false, timeOut = 15000)
public void testTunnelThreadDeadAfterClose() throws IOException, InterruptedException {
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("localhost", talkFirstEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
String response0 = readFromSocket(client);
LOG.info(response0);
// write a lot of data to increase chance of response after close
for (int i = 0; i < 1000; i++) {
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
}
// don't wait for response
client.close();
assertEquals(response0, "Hello\n");
assertEquals(proxyServer.getNumConnects(), 1);
} finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
@Test(enabled=false, timeOut = 15000, expectedExceptions = IOException.class)
public void testTunnelThreadDeadAfterUnexpectedException() throws IOException, InterruptedException {
MockServer proxyServer = startConnectProxyServer(false, false, 8);
Tunnel tunnel = Tunnel.build("localhost", doubleEchoServer.getServerSocketPort(),
"localhost", proxyServer.getServerSocketPort());
String response = "";
try {
int tunnelPort = tunnel.getPort();
SocketChannel client = SocketChannel.open();
client.connect(new InetSocketAddress("localhost", tunnelPort));
client.write(ByteBuffer.wrap("Knock\n".getBytes()));
response = readFromSocket(client);
LOG.info(response);
for (int i = 0; i < 5; i++) {
client.write(ByteBuffer.wrap("Hello\n".getBytes()));
Thread.sleep(100);
}
client.close();
} finally {
proxyServer.stopServer();
tunnel.close();
assertNotEquals(response, "Knock Knock\n");
assertEquals(proxyServer.getNumConnects(), 1);
assertFalse(tunnel.isTunnelThreadAlive());
}
}
/**
* This test demonstrates connecting to a mysql DB through
* and http proxy tunnel to a public data set of genetic data
* http://www.ensembl.org/info/data/mysql.html
*
* Disabled for now because it requires the inclusion of a mysql jdbc driver jar
*
* @throws Exception
*/
@Test(enabled = false, timeOut = 40000)
public void accessEnsembleDB() throws Exception{
MockServer proxyServer = startConnectProxyServer();
Tunnel tunnel = Tunnel.build("useastdb.ensembl.org", 5306,
"localhost", proxyServer.getServerSocketPort());
try {
int port = tunnel.getPort();
Connection connection =
DriverManager.getConnection("jdbc:mysql://localhost:" + port + "/homo_sapiens_core_82_38?user=anonymous");
String query2 = "SELECT DISTINCT gene_id, biotype, source, description from gene LIMIT 1000";
ResultSet resultSet = connection.createStatement().executeQuery(query2);
int row = 0;
while (resultSet.next()) {
row++;
LOG.info(String.format("%s|%s|%s|%s|%s%n", row, resultSet.getString(1), resultSet.getString(2), resultSet.getString(3),
resultSet.getString(4)));
}
assertEquals(row, 1000);
assertTrue(proxyServer.getNumConnects() > 0);
}
finally {
proxyServer.stopServer();
tunnel.close();
assertFalse(tunnel.isTunnelThreadAlive());
}
}
}
| 2,785 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/ProxySetupHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.gobblin.configuration.ConfigurationKeys;
/**
* Handler for setting up the connection from the Tunnel to the intermediate proxy via HTTP CONNECT.
*/
class ProxySetupHandler implements Callable<HandlerState> {
private static final Logger LOG = LoggerFactory.getLogger(Tunnel.class);
public static final String HTTP_1_1_OK = "HTTP/1.1 200";
private static final ByteBuffer OK_REPLY =
ByteBuffer.wrap(HTTP_1_1_OK.getBytes(ConfigurationKeys.DEFAULT_CHARSET_ENCODING));
public static final String HTTP_1_0_OK = "HTTP/1.0 200";
private static final Set<ByteBuffer> OK_REPLIES = new HashSet<>(
Arrays.asList(OK_REPLY, ByteBuffer.wrap(HTTP_1_0_OK.getBytes(ConfigurationKeys.DEFAULT_CHARSET_ENCODING))));
private final SocketChannel client;
private final Selector selector;
private final SocketChannel proxy;
private HandlerState state = HandlerState.CONNECTING;
private ByteBuffer buffer;
private final long connectStartTime;
private int totalBytesRead = 0;
private final Config config;
ProxySetupHandler(SocketChannel client, Selector selector, Config config) throws IOException {
this.config = config;
this.client = client;
this.selector = selector;
this.buffer =
ByteBuffer
.wrap(String
.format(
"CONNECT %s:%d HTTP/1.1\r%nUser-Agent: GobblinTunnel\r%nservice-name: gobblin\r%n"
+ "Connection: keep-alive\r%nHost: %s:%d\r%n\r%n",
config.getRemoteHost(), config.getRemotePort(), config.getRemoteHost(), config.getRemotePort())
.getBytes(ConfigurationKeys.DEFAULT_CHARSET_ENCODING));
//Blocking call
this.proxy = SocketChannel.open();
this.proxy.configureBlocking(false);
this.connectStartTime = System.currentTimeMillis();
boolean connected =
this.proxy.connect(new InetSocketAddress(this.config.getProxyHost(), this.config.getProxyPort()));
if (!connected) {
this.client.configureBlocking(false);
this.client.register(this.selector, SelectionKey.OP_READ, this);
this.proxy.register(this.selector, SelectionKey.OP_CONNECT, this);
} else {
this.state = HandlerState.WRITING;
this.proxy.register(this.selector, SelectionKey.OP_WRITE, this);
}
}
@Override
public HandlerState call() throws Exception {
try {
switch (this.state) {
case CONNECTING:
connect();
break;
case WRITING:
write();
break;
case READING:
read();
break;
default:
throw new IllegalStateException("ProxySetupHandler should not be in state " + this.state);
}
} catch (IOException ioe) {
LOG.warn("Failed to establish a proxy connection for {}", this.client.getRemoteAddress(), ioe);
closeChannels();
}
return this.state;
}
private void connect() throws IOException {
if (this.proxy.isOpen()) {
if (this.proxy.finishConnect()) {
this.proxy.register(this.selector, SelectionKey.OP_WRITE, this);
SelectionKey clientKey = this.client.keyFor(this.selector);
if (clientKey != null) {
clientKey.cancel();
}
this.state = HandlerState.WRITING;
} else if (this.connectStartTime + Config.PROXY_CONNECT_TIMEOUT_MS < System.currentTimeMillis()) {
LOG.warn("Proxy connect timed out for client {}", this.client);
closeChannels();
}
}
}
private void write() throws IOException {
while (this.proxy.write(this.buffer) > 0) {}
if (this.buffer.remaining() == 0) {
this.proxy.register(this.selector, SelectionKey.OP_READ, this);
this.state = HandlerState.READING;
this.buffer = ByteBuffer.allocate(1024);
}
}
private void read() throws IOException {
int lastBytes = 0;
while ((lastBytes = this.proxy.read(this.buffer)) > 0) {
// totalBytesRead has to be stateful because read() might return at arbitrary points
this.totalBytesRead += lastBytes;
}
if (this.totalBytesRead >= OK_REPLY.limit()) {
byte[] temp = this.buffer.array();
this.buffer.flip();
if (OK_REPLIES.contains(ByteBuffer.wrap(temp, 0, OK_REPLY.limit()))) {
// drain the rest of the HTTP response. 2 consecutive CRLFs signify the end of an HTTP
// message (some proxies return newlines instead of CRLFs)
for (int i = OK_REPLY.limit(); i <= (this.buffer.limit() - 4); i++) {
if (((temp[i] == '\n') && (temp[i + 1] == '\n')) || ((temp[i + 1] == '\n') && (temp[i + 2] == '\n'))
|| ((temp[i + 2] == '\n') && (temp[i + 3] == '\n'))
|| ((temp[i] == '\r') && (temp[i + 1] == '\n') && (temp[i + 2] == '\r') && (temp[i + 3] == '\n'))) {
this.state = null;
this.buffer.position(i + 4);
new ReadWriteHandler(this.proxy, this.buffer, this.client, this.selector);
return;
}
}
} else {
LOG.error("Got non-200 response from proxy: ["
+ new String(temp, 0, OK_REPLY.limit(), ConfigurationKeys.DEFAULT_CHARSET_ENCODING)
+ "], closing connection.");
closeChannels();
}
}
}
private void closeChannels() {
if (this.proxy.isOpen()) {
try {
this.proxy.close();
} catch (IOException log) {
LOG.warn("Failed to close proxy channel {}", this.proxy, log);
}
}
if (this.client.isOpen()) {
try {
this.client.close();
} catch (IOException log) {
LOG.warn("Failed to close client channel {}", this.client, log);
}
}
}
}
| 2,786 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/Tunnel.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.util.Set;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.gobblin.util.ExecutorsUtils;
/**
* This class implements a tunnel through a proxy to resource on the internet. Frequently data stores to be accessed by
* Gobblin reside outside data centers. In these cases, outbound access from a data center typically needs to go through
* a gateway proxy for security purposes. In some cases this is an HTTP proxy. However, some protocols like JDBC don't
* support the concept of "proxies", let alone HTTP proxies, and hence a solution is needed to enable this.
*
* This class provides a method of tunneling arbitrary protocols like JDBC connections over an HTTP proxy. Note that
* while it is currently only implemented for JDBC (see {@link org.apache.gobblin.source.jdbc.JdbcExtractor} and
* {@link org.apache.gobblin.source.jdbc.JdbcExtractor}), it can be extended to work with any other
* TCP-based protocol.
*
* The way the Tunnel works is as follows:
* 1. When a Gobblin data source or Extractor or related class (such as JdbcProvider) is invoked to fetch data from an
* externally hosted resource, it should check if the WorkUnit has a proxy host and port defined.
* 2. If a proxy is defined, it should extract the remote host and port from the target URL hosting the resource (e.g.
* the JdbcProvider gets this from the connectionUrl.)
* 3. The extractor then creates a Tunnel instance configured with the remote host and port and the proxy host and port.
* 4. The Tunnel starts a thread that listens on an arbitrary port on localhost.
* 5. The extractor then points the target URL to the localhost and port the Tunnel is listening on. (E.g. in the case
* of JDBC, the JdbcProvider changes the connectionUrl to replace the remote host and port with the localhost and
* port before passing it on to the driver.)
* 6. Hence when the extractor client (e.g. JDBC driver) creates a connection, it connects to the Tunnel socket instead
* of the actual target host.
* 7. The Tunnel then connects to the remote host through the proxy via a HTTP CONNECT request.
* 8. If established successfully, the Tunnel then simply relays bytes back and forth between the Gobblin extractor and
* the target host via the intermediate proxy.)
* 7. When the Gobblin extractor (e.g. JDBC data source) is closed down, the Tunnel must be shut down as well.
*
* The Tunnel can accept as many connections as the JdbcExtractor opens. It uses NIO to minimize resource usage.
*
* @author navteniev@linkedin.com
* @author kkandekar@linkedin.com
*/
public class Tunnel {
public static final int NON_EXISTENT_PORT = -1;
private static final Logger LOG = LoggerFactory.getLogger(Tunnel.class);
private ServerSocketChannel server;
private Thread thread;
private final Config config;
private Tunnel(String remoteHost, int remotePort, String proxyHost, int proxyPort) {
this.config = new Config(remoteHost, remotePort, proxyHost, proxyPort);
}
private Tunnel open() throws IOException {
try {
this.server = ServerSocketChannel.open().bind(null);
this.server.configureBlocking(false);
Selector selector = Selector.open();
startTunnelThread(selector);
return this;
} catch (IOException ioe) {
LOG.error("Failed to open the tunnel", ioe);
throw ioe;
}
}
public int getPort() throws IOException {
SocketAddress localAddress = null;
try {
if (this.server != null && this.server.isOpen()) {
localAddress = this.server.getLocalAddress();
}
if (localAddress instanceof InetSocketAddress) {
return ((InetSocketAddress) localAddress).getPort();
}
} catch (IOException e) {
LOG.error("Failed to get tunnel port", e);
throw e;
}
return NON_EXISTENT_PORT;
}
private void startTunnelThread(Selector selector) {
this.thread = new Thread(new Dispatcher(selector), "Tunnel Listener");
this.thread.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("Uncaught exception in thread " + t.getName(), e);
}
});
//so we don't prevent the JVM from shutting down, just in case
this.thread.setDaemon(true);
this.thread.start();
}
public boolean isTunnelThreadAlive() {
return (this.thread != null && this.thread.isAlive());
}
private class Dispatcher implements Runnable {
private final Selector selector;
public Dispatcher(Selector selector) {
this.selector = selector;
}
@Override
public void run() {
try {
Tunnel.this.server.register(this.selector, SelectionKey.OP_ACCEPT,
ExecutorsUtils.loggingDecorator(new AcceptHandler(Tunnel.this.server, this.selector, Tunnel.this.config)));
while (!Thread.interrupted()) {
this.selector.select();
Set<SelectionKey> selectionKeys = this.selector.selectedKeys();
for (SelectionKey selectionKey : selectionKeys) {
dispatch(selectionKey);
}
selectionKeys.clear();
}
} catch (IOException ioe) {
LOG.error("Unhandled IOException. Tunnel will close", ioe);
}
LOG.info("Closing tunnel");
}
private void dispatch(SelectionKey selectionKey) {
Callable<?> attachment = (Callable<?>) selectionKey.attachment();
try {
attachment.call();
} catch (Exception e) {
LOG.error("exception handling event on {}", selectionKey.channel(), e);
}
}
}
public void close() {
try {
this.server.close();
LOG.info("Closed tunnel.");
} catch (IOException ioe) {
LOG.warn("Exception during shutdown of tunnel", ioe);
} finally {
try {
this.thread.interrupt();
this.thread.join();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
public static Tunnel build(String remoteHost, int remotePort, String proxyHost, int proxyPort) throws IOException {
return new Tunnel(remoteHost, remotePort, proxyHost, proxyPort).open();
}
}
| 2,787 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/AcceptHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.IOException;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handler for accepting connections from the client on the socket the Tunnel listens on.
*/
class AcceptHandler implements Callable<HandlerState> {
private static final Logger LOG = LoggerFactory.getLogger(Tunnel.class);
private final ServerSocketChannel server;
private final Selector selector;
private final Config config;
AcceptHandler(ServerSocketChannel server, Selector selector, Config config) {
this.config = config;
this.server = server;
this.selector = selector;
}
@Override
public HandlerState call()
throws Exception {
SocketChannel client = this.server.accept();
LOG.info("Accepted connection from {}", client.getRemoteAddress());
try {
new ProxySetupHandler(client, selector, config);
} catch (IOException ioe) {
client.close();
}
return HandlerState.ACCEPTING;
}
}
| 2,788 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/ReadWriteHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.CancelledKeyException;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.concurrent.Callable;
import static java.nio.channels.SelectionKey.OP_READ;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class handles the relaying of data back and forth between the Client-to-Tunnel and Tunnel-to-Proxy
* socket connections. This class is not thread safe.
*/
class ReadWriteHandler implements Callable<HandlerState> {
static final Logger LOG = LoggerFactory.getLogger(Tunnel.class);
private final SocketChannel proxy;
private final SocketChannel client;
private final Selector selector;
private final ByteBuffer buffer = ByteBuffer.allocate(1000000);
private HandlerState state = HandlerState.READING;
ReadWriteHandler(SocketChannel proxy, ByteBuffer mixedServerResponseBuffer, SocketChannel client, Selector selector)
throws IOException {
this.proxy = proxy;
this.client = client;
this.selector = selector;
// drain response that is not part of proxy's 200 OK and is part of data pushed from server, and push to client
if (mixedServerResponseBuffer.limit() > mixedServerResponseBuffer.position()) {
this.client.configureBlocking(true);
OutputStream clientOut = this.client.socket().getOutputStream();
clientOut.write(mixedServerResponseBuffer.array(), mixedServerResponseBuffer.position(),
mixedServerResponseBuffer.limit() - mixedServerResponseBuffer.position());
clientOut.flush();
}
this.proxy.configureBlocking(false);
this.client.configureBlocking(false);
this.client.register(this.selector, OP_READ, this);
this.proxy.register(this.selector, OP_READ, this);
}
@Override
public HandlerState call()
throws Exception {
try {
switch (this.state) {
case READING:
read();
break;
case WRITING:
write();
break;
default:
throw new IllegalStateException("ReadWriteHandler should never be in state " + this.state);
}
} catch (CancelledKeyException e) {
LOG.warn("Encountered canceled key while " + this.state, e);
} catch (IOException ioe) {
closeChannels();
throw new IOException(String.format("Could not read/write between %s and %s", this.proxy, this.client), ioe);
} catch (Exception e) {
LOG.error("Unexpected exception", e);
try {
closeChannels();
} finally {
throw e;
}
}
return this.state;
}
private void write()
throws IOException {
SelectionKey proxyKey = this.proxy.keyFor(this.selector);
SelectionKey clientKey = this.client.keyFor(this.selector);
SocketChannel writeChannel = null;
SocketChannel readChannel = null;
SelectionKey writeKey = null;
if (this.selector.selectedKeys().contains(proxyKey) && proxyKey.isValid() && proxyKey.isWritable()) {
writeChannel = this.proxy;
readChannel = this.client;
writeKey = proxyKey;
} else if (this.selector.selectedKeys().contains(clientKey) && clientKey.isValid() && clientKey.isWritable()) {
writeChannel = this.client;
readChannel = this.proxy;
writeKey = clientKey;
}
if (writeKey != null) {
int lastWrite, totalWrite = 0;
this.buffer.flip();
int available = this.buffer.remaining();
while ((lastWrite = writeChannel.write(this.buffer)) > 0) {
totalWrite += lastWrite;
}
LOG.debug("{} bytes written to {}", totalWrite, writeChannel == this.proxy ? "proxy" : "client");
if (totalWrite == available) {
this.buffer.clear();
if(readChannel.isOpen()) {
readChannel.register(this.selector, SelectionKey.OP_READ, this);
writeChannel.register(this.selector, SelectionKey.OP_READ, this);
}
else{
writeChannel.close();
}
this.state = HandlerState.READING;
} else {
this.buffer.compact();
}
if (lastWrite == -1) {
closeChannels();
}
}
}
private void read()
throws IOException {
SelectionKey proxyKey = this.proxy.keyFor(this.selector);
SelectionKey clientKey = this.client.keyFor(this.selector);
SocketChannel readChannel = null;
SocketChannel writeChannel = null;
SelectionKey readKey = null;
if (this.selector.selectedKeys().contains(proxyKey) && proxyKey.isReadable()) {
readChannel = this.proxy;
writeChannel = this.client;
readKey = proxyKey;
} else if (this.selector.selectedKeys().contains(clientKey) && clientKey.isReadable()) {
readChannel = this.client;
writeChannel = this.proxy;
readKey = clientKey;
}
if (readKey != null) {
int lastRead, totalRead = 0;
while ((lastRead = readChannel.read(this.buffer)) > 0) {
totalRead += lastRead;
}
LOG.debug("{} bytes read from {}", totalRead, readChannel == this.proxy ? "proxy":"client");
if (totalRead > 0) {
readKey.cancel();
writeChannel.register(this.selector, SelectionKey.OP_WRITE, this);
this.state = HandlerState.WRITING;
}
if (lastRead == -1) {
readChannel.close();
}
}
}
private void closeChannels() {
if (this.proxy.isOpen()) {
try {
this.proxy.close();
} catch (IOException log) {
LOG.warn("Failed to close proxy channel {}", this.proxy,log);
}
}
if (this.client.isOpen()) {
try {
this.client.close();
} catch (IOException log) {
LOG.warn("Failed to close client channel {}", this.client,log);
}
}
}
}
| 2,789 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/Config.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
class Config {
public static final int PROXY_CONNECT_TIMEOUT_MS = 5000;
private final String remoteHost;
private final int remotePort;
private final String proxyHost;
private final int proxyPort;
public Config(String remoteHost, int remotePort, String proxyHost, int proxyPort) {
this.remoteHost = remoteHost;
this.remotePort = remotePort;
this.proxyHost = proxyHost;
this.proxyPort = proxyPort;
}
public String getRemoteHost() {
return this.remoteHost;
}
public int getRemotePort() {
return this.remotePort;
}
public String getProxyHost() {
return this.proxyHost;
}
public int getProxyPort() {
return this.proxyPort;
}
}
| 2,790 |
0 | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin | Create_ds/gobblin/gobblin-tunnel/src/main/java/org/apache/gobblin/tunnel/HandlerState.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.tunnel;
/**
* Represents the different states a given handler can be in.
*/
enum HandlerState {
ACCEPTING,
CONNECTING,
READING,
WRITING
}
| 2,791 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/commit/FsRenameCommitStepTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.commit;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.State;
/**
* Test for {@link FsRenameCommitStep}.
*
* @author Ziyang Liu
*/
@Test(groups = { "gobblin.commit" })
public class FsRenameCommitStepTest {
private static final String ROOT_DIR = "fs-rename-commit-sequence-test";
private FileSystem fs;
private FsRenameCommitStep step;
@BeforeClass
public void setUp() throws IOException {
this.fs = FileSystem.getLocal(new Configuration());
this.fs.delete(new Path(ROOT_DIR), true);
Path dir1 = new Path(ROOT_DIR, "dir1");
Path dir2 = new Path(ROOT_DIR, "dir2");
this.fs.mkdirs(dir1);
this.fs.mkdirs(dir2);
Path src = new Path(dir1, "file");
Path dst = new Path(dir2, "file");
this.fs.createNewFile(src);
this.step =
(FsRenameCommitStep) new FsRenameCommitStep.Builder<>().from(src).to(dst).withProps(new State()).build();
}
@AfterClass
public void tearDown() throws IOException {
this.fs.delete(new Path(ROOT_DIR), true);
}
@Test
public void testExecute() throws IOException {
this.step.execute();
Assert.assertTrue(this.fs.exists(new Path(ROOT_DIR, "dir2/file")));
}
}
| 2,792 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/instrumented/InstrumentedTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.instrumented;
import java.util.HashMap;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.Constructs;
import org.apache.gobblin.configuration.ConfigurationKeys;
import org.apache.gobblin.metrics.GobblinMetrics;
import org.apache.gobblin.configuration.State;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.instrumented.extractor.InstrumentedExtractor;
@Test(groups = { "gobblin.core" })
public class InstrumentedTest {
@Test
public void testInstrumented() {
GobblinMetrics gobblinMetrics = GobblinMetrics.get("parent.context");
State state = new State();
state.setProp(ConfigurationKeys.METRICS_ENABLED_KEY, Boolean.toString(true));
state.setProp(Instrumented.METRIC_CONTEXT_NAME_KEY, gobblinMetrics.getName());
Instrumented instrumented = new Instrumented(state, InstrumentedExtractor.class);
Assert.assertNotNull(instrumented.getMetricContext());
Assert.assertTrue(instrumented.getMetricContext().getParent().isPresent());
Assert.assertEquals(instrumented.getMetricContext().getParent().get(), gobblinMetrics.getMetricContext());
Map<String, ?> tags = instrumented.getMetricContext().getTagMap();
Map<String, String> expectedTags = new HashMap<>();
expectedTags.put("construct", Constructs.EXTRACTOR.toString());
expectedTags.put("class", InstrumentedExtractor.class.getCanonicalName());
expectedTags.put(MetricContext.METRIC_CONTEXT_ID_TAG_NAME,
tags.get(MetricContext.METRIC_CONTEXT_ID_TAG_NAME).toString());
expectedTags.put(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME,
tags.get(MetricContext.METRIC_CONTEXT_NAME_TAG_NAME).toString());
Assert.assertEquals(tags.size(), expectedTags.size());
for (Map.Entry<String, ?> tag : tags.entrySet()) {
Assert.assertTrue(expectedTags.containsKey(tag.getKey()));
Assert.assertEquals(expectedTags.get(tag.getKey()), tag.getValue().toString());
}
}
}
| 2,793 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/fork/CopyableGenericRecordTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import java.util.Arrays;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link CopyableGenericRecord}.
*
* @author Yinan Li
*/
@Test(groups = "gobblin.fork")
public class CopyableGenericRecordTest {
// Test Avro schema
private static final String AVRO_SCHEMA = "{\"namespace\": \"example.avro\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"User\",\n" +
" \"fields\": [\n" +
" {\"name\": \"name\", \"type\": \"string\"},\n" +
" {\"name\": \"favorite_number\", \"type\": \"int\"},\n" +
" {\"name\": \"favorite_colors\", \"type\": {\"type\": \"array\", \"items\": \"string\"}}\n" +
" ]\n" +
"}";
@Test
public void testCopy() throws CopyNotSupportedException {
GenericRecord record = new GenericData.Record(new Schema.Parser().parse(AVRO_SCHEMA));
record.put("name", "foo");
record.put("favorite_number", 68);
record.put("favorite_colors", Arrays.asList("blue", "black", "red"));
CopyableGenericRecord copyableGenericRecord = new CopyableGenericRecord(record);
GenericRecord copy = copyableGenericRecord.copy();
Assert.assertEquals(record, copy);
copy.put("name", "bar");
Assert.assertNotEquals(record, copy);
}
}
| 2,794 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/fork/IdentityForkOperatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import org.apache.gobblin.configuration.ConfigurationKeys;
import java.util.Arrays;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.configuration.WorkUnitState;
/**
* Unit tests for {@link IdentityForkOperator}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.fork"})
public class IdentityForkOperatorTest {
@Test
public void testForkMethods() {
ForkOperator<String, String> dummyForkOperator = new IdentityForkOperator<String, String>();
WorkUnitState workUnitState = new WorkUnitState();
workUnitState.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, 2);
List<Boolean> schemas = dummyForkOperator.forkSchema(workUnitState, "");
Assert.assertEquals(schemas, Arrays.asList(true, true));
List<Boolean> records = dummyForkOperator.forkDataRecord(workUnitState, "");
Assert.assertEquals(records, Arrays.asList(true, true));
Assert.assertEquals(dummyForkOperator.getBranches(workUnitState), 2);
workUnitState.setProp(ConfigurationKeys.FORK_BRANCHES_KEY, 0);
schemas = dummyForkOperator.forkSchema(workUnitState, "");
Assert.assertTrue(schemas.isEmpty());
records = dummyForkOperator.forkDataRecord(workUnitState, "");
Assert.assertTrue(records.isEmpty());
Assert.assertEquals(dummyForkOperator.getBranches(workUnitState), 0);
}
}
| 2,795 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/fork/CopyableSchemaTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.fork;
import org.apache.avro.Schema;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Unit tests for {@link CopyableSchema}.
*
* @author Yinan Li
*/
@Test(groups = {"gobblin.fork"})
public class CopyableSchemaTest {
// Test Avro schema
private static final String AVRO_SCHEMA = "{\"namespace\": \"example.avro\",\n" +
" \"type\": \"record\",\n" +
" \"name\": \"User\",\n" +
" \"fields\": [\n" +
" {\"name\": \"name\", \"type\": \"string\"},\n" +
" {\"name\": \"favorite_number\", \"type\": \"int\"},\n" +
" {\"name\": \"favorite_color\", \"type\": \"string\"}\n" +
" ]\n" +
"}";
@Test
public void testCopy() throws CopyNotSupportedException {
Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
CopyableSchema copyableSchema = new CopyableSchema(schema);
Schema copy = copyableSchema.copy();
Assert.assertEquals(schema, copy);
copy.addProp("foo", "bar");
Assert.assertNotEquals(schema, copy);
}
}
| 2,796 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/configuration/WorkUnitStateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.configuration;
import org.apache.gobblin.source.workunit.Extract;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.apache.gobblin.source.workunit.WorkUnit;
public class WorkUnitStateTest {
@Test
public void WorkUnitState()
throws IOException {
SourceState sourceState = new SourceState();
Extract extract = sourceState.createExtract(Extract.TableType.APPEND_ONLY, "test", "test");
WorkUnit wu = sourceState.createWorkUnit(extract);
WorkUnitState state = new WorkUnitState(wu);
Assert.assertEquals(state.getProp("string", "some string"), "some string");
Assert.assertEquals(state.getPropAsList("list", "item1,item2").get(0), "item1");
Assert.assertEquals(state.getPropAsList("list", "item1,item2").get(1), "item2");
Assert.assertEquals(state.getPropAsLong("long", Long.MAX_VALUE), Long.MAX_VALUE);
Assert.assertEquals(state.getPropAsInt("int", Integer.MAX_VALUE), Integer.MAX_VALUE);
Assert.assertEquals(state.getPropAsDouble("double", Double.MAX_VALUE), Double.MAX_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean", true), true);
wu.setProp("string", "some string");
wu.setProp("list", "item1,item2");
wu.setProp("long", Long.MAX_VALUE);
wu.setProp("int", Integer.MAX_VALUE);
wu.setProp("double", Double.MAX_VALUE);
wu.setProp("boolean", true);
state = new WorkUnitState(wu);
Assert.assertEquals(state.getProp("string"), "some string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item1");
Assert.assertEquals(state.getPropAsList("list").get(1), "item2");
Assert.assertEquals(state.getPropAsLong("long"), Long.MAX_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MAX_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MAX_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), true);
state.setProp("string", "some other string");
state.setProp("list", "item3,item4");
state.setProp("long", Long.MIN_VALUE);
state.setProp("int", Integer.MIN_VALUE);
state.setProp("double", Double.MIN_VALUE);
state.setProp("boolean", false);
Assert.assertNotEquals(state.getProp("string", "some string"), "some string");
Assert.assertNotEquals(state.getPropAsList("list", "item1,item2").get(0), "item1");
Assert.assertNotEquals(state.getPropAsList("list", "item1,item2").get(1), "item2");
Assert.assertNotEquals(state.getPropAsLong("long", Long.MAX_VALUE), Long.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsInt("int", Integer.MAX_VALUE), Integer.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsDouble("double", Double.MAX_VALUE), Double.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsBoolean("boolean", true), true);
Assert.assertNotEquals(state.getProp("string"), "some string");
Assert.assertNotEquals(state.getPropAsList("list").get(0), "item1");
Assert.assertNotEquals(state.getPropAsList("list").get(1), "item2");
Assert.assertNotEquals(state.getPropAsLong("long"), Long.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsInt("int"), Integer.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsDouble("double"), Double.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsBoolean("boolean"), true);
Assert.assertEquals(state.getProp("string"), "some other string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), false);
ByteArrayOutputStream byteStream = new ByteArrayOutputStream(1024);
DataOutputStream out = new DataOutputStream(byteStream);
state.write(out);
DataInputStream in = new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray()));
state = new WorkUnitState();
Assert.assertEquals(state.getProp("string"), null);
Assert.assertEquals(state.getProp("list"), null);
Assert.assertEquals(state.getProp("long"), null);
Assert.assertEquals(state.getProp("int"), null);
Assert.assertEquals(state.getProp("double"), null);
Assert.assertEquals(state.getProp("boolean"), null);
state.readFields(in);
Assert.assertEquals(state.getProp("string"), "some other string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), false);
WorkUnitState state2 = new WorkUnitState();
state2.addAll(state);
Assert.assertEquals(state2.getProp("string"), "some other string");
Assert.assertEquals(state2.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state2.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state2.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state2.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state2.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state2.getPropAsBoolean("boolean"), false);
}
@Test
public void testEquals() {
SourceState sourceState = new SourceState();
sourceState.setProp("testKey", "true");
WorkUnitState workUnitState = new WorkUnitState(new WorkUnit(sourceState, null));
Assert.assertEquals(workUnitState, workUnitState);
}
}
| 2,797 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/configuration/StateTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.configuration;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Map;
import org.testng.Assert;
import org.testng.annotations.Test;
public class StateTest {
@Test
public void testState()
throws IOException {
State state = new State();
Assert.assertEquals(state.getProp("string", "some string"), "some string");
Assert.assertEquals(state.getPropAsList("list", "item1,item2").get(0), "item1");
Assert.assertEquals(state.getPropAsList("list", "item1,item2").get(1), "item2");
Assert.assertEquals(state.getPropAsLong("long", Long.MAX_VALUE), Long.MAX_VALUE);
Assert.assertEquals(state.getPropAsInt("int", Integer.MAX_VALUE), Integer.MAX_VALUE);
Assert.assertEquals(state.getPropAsDouble("double", Double.MAX_VALUE), Double.MAX_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean", true), true);
state.setProp("string", "some string");
state.setProp("list", "item1,item2");
state.setProp("long", Long.MAX_VALUE);
state.setProp("int", Integer.MAX_VALUE);
state.setProp("double", Double.MAX_VALUE);
state.setProp("boolean", true);
Assert.assertEquals(state.getProp("string"), "some string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item1");
Assert.assertEquals(state.getPropAsList("list").get(1), "item2");
Assert.assertEquals(state.getPropAsLong("long"), Long.MAX_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MAX_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MAX_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), true);
state.setProp("string", "some other string");
state.setProp("list", "item3,item4");
state.setProp("long", Long.MIN_VALUE);
state.setProp("int", Integer.MIN_VALUE);
state.setProp("double", Double.MIN_VALUE);
state.setProp("boolean", false);
Assert.assertNotEquals(state.getProp("string", "some string"), "some string");
Assert.assertNotEquals(state.getPropAsList("list", "item1,item2").get(0), "item1");
Assert.assertNotEquals(state.getPropAsList("list", "item1,item2").get(1), "item2");
Assert.assertNotEquals(state.getPropAsLong("long", Long.MAX_VALUE), Long.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsInt("int", Integer.MAX_VALUE), Integer.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsDouble("double", Double.MAX_VALUE), Double.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsBoolean("boolean", true), true);
Assert.assertNotEquals(state.getProp("string"), "some string");
Assert.assertNotEquals(state.getPropAsList("list").get(0), "item1");
Assert.assertNotEquals(state.getPropAsList("list").get(1), "item2");
Assert.assertNotEquals(state.getPropAsLong("long"), Long.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsInt("int"), Integer.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsDouble("double"), Double.MAX_VALUE);
Assert.assertNotEquals(state.getPropAsBoolean("boolean"), true);
Assert.assertEquals(state.getProp("string"), "some other string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), false);
ByteArrayOutputStream byteStream = new ByteArrayOutputStream(1024);
DataOutputStream out = new DataOutputStream(byteStream);
state.write(out);
DataInputStream in = new DataInputStream(new ByteArrayInputStream(byteStream.toByteArray()));
state = new State();
Assert.assertEquals(state.getProp("string"), null);
Assert.assertEquals(state.getProp("list"), null);
Assert.assertEquals(state.getProp("long"), null);
Assert.assertEquals(state.getProp("int"), null);
Assert.assertEquals(state.getProp("double"), null);
Assert.assertEquals(state.getProp("boolean"), null);
state.readFields(in);
Assert.assertEquals(state.getProp("string"), "some other string");
Assert.assertEquals(state.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state.getPropAsBoolean("boolean"), false);
State state2 = new State();
state2.addAll(state);
Assert.assertEquals(state2.getProp("string"), "some other string");
Assert.assertEquals(state2.getPropAsList("list").get(0), "item3");
Assert.assertEquals(state2.getPropAsList("list").get(1), "item4");
Assert.assertEquals(state2.getPropAsLong("long"), Long.MIN_VALUE);
Assert.assertEquals(state2.getPropAsInt("int"), Integer.MIN_VALUE);
Assert.assertEquals(state2.getPropAsDouble("double"), Double.MIN_VALUE);
Assert.assertEquals(state2.getPropAsBoolean("boolean"), false);
}
@Test
public void testInterningOfKeyValues() throws Exception {
// Prove we can identify interned keys
String nonInterned = new String("myKey"); // not interned
String interned = new String("myInternedKey").intern(); // interned
Assert.assertFalse(isInterned(nonInterned));
Assert.assertTrue(isInterned(interned));
State state = new State();
state.setProp(new String("someKey"), new String("someValue"));
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DataOutput dataOutput = new DataOutputStream(outputStream);
state.write(dataOutput);
outputStream.flush();
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
DataInput dataInput = new DataInputStream(inputStream);
State readState = new State();
readState.readFields(dataInput);
inputStream.close();
Assert.assertEquals(state, readState);
assertInterned(state.getProperties(), false);
assertInterned(readState.getProperties(), true);
}
public static void assertInterned(Map<Object, Object> map, boolean interned) {
for (Map.Entry<Object, Object> entry : map.entrySet()) {
if (entry.getKey() instanceof String) {
Assert.assertEquals(isInterned((String) entry.getKey()), interned);
}
if (entry.getValue() instanceof String) {
Assert.assertEquals(isInterned((String) entry.getValue()), interned);
}
}
}
public static boolean isInterned(String str) {
return str == str.intern();
}
}
| 2,798 |
0 | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/configuration | Create_ds/gobblin/gobblin-core/src/test/java/org/apache/gobblin/configuration/workunit/MultiWorkUnitTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.configuration.workunit;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import org.apache.gobblin.source.workunit.MultiWorkUnit;
import org.apache.gobblin.source.workunit.WorkUnit;
/**
* Unit tests for {@link MultiWorkUnit}.
*/
@Test(groups = {"gobblin.configuration.workunit"})
public class MultiWorkUnitTest {
private MultiWorkUnit multiWorkUnit;
@BeforeClass
public void setUp() {
this.multiWorkUnit = new MultiWorkUnit();
WorkUnit workUnit1 = WorkUnit.createEmpty();
workUnit1.setHighWaterMark(1000);
workUnit1.setLowWaterMark(0);
workUnit1.setProp("k1", "v1");
this.multiWorkUnit.addWorkUnit(workUnit1);
WorkUnit workUnit2 = WorkUnit.createEmpty();
workUnit2.setHighWaterMark(2000);
workUnit2.setLowWaterMark(1001);
workUnit2.setProp("k2", "v2");
this.multiWorkUnit.addWorkUnit(workUnit2);
}
@Test
public void testSerDe()
throws IOException {
Closer closer = Closer.create();
try {
ByteArrayOutputStream baos = closer.register(new ByteArrayOutputStream());
DataOutputStream dos = closer.register(new DataOutputStream(baos));
this.multiWorkUnit.write(dos);
ByteArrayInputStream bais = closer.register((new ByteArrayInputStream(baos.toByteArray())));
DataInputStream dis = closer.register((new DataInputStream(bais)));
MultiWorkUnit copy = new MultiWorkUnit();
copy.readFields(dis);
List<WorkUnit> workUnitList = copy.getWorkUnits();
Assert.assertEquals(workUnitList.size(), 2);
Assert.assertEquals(workUnitList.get(0).getHighWaterMark(), 1000);
Assert.assertEquals(workUnitList.get(0).getLowWaterMark(), 0);
Assert.assertEquals(workUnitList.get(0).getProp("k1"), "v1");
Assert.assertEquals(workUnitList.get(1).getHighWaterMark(), 2000);
Assert.assertEquals(workUnitList.get(1).getLowWaterMark(), 1001);
Assert.assertEquals(workUnitList.get(1).getProp("k2"), "v2");
} catch (Throwable t) {
throw closer.rethrow(t);
} finally {
closer.close();
}
}
}
| 2,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.